mapFolding 0.11.1__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. mapFolding/__init__.py +18 -58
  2. mapFolding/basecamp.py +13 -10
  3. mapFolding/beDRY.py +113 -2
  4. mapFolding/dataBaskets.py +24 -2
  5. mapFolding/{toolboxFilesystem.py → filesystemToolkit.py} +3 -3
  6. mapFolding/infoBooth.py +96 -0
  7. mapFolding/oeis.py +3 -2
  8. mapFolding/someAssemblyRequired/RecipeJob.py +3 -4
  9. mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +182 -17
  10. mapFolding/someAssemblyRequired/__init__.py +3 -3
  11. mapFolding/someAssemblyRequired/_toolIfThis.py +5 -5
  12. mapFolding/someAssemblyRequired/{_toolboxContainers.py → _toolkitContainers.py} +6 -7
  13. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +8 -7
  14. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +3 -2
  15. mapFolding/someAssemblyRequired/transformationTools.py +11 -10
  16. mapFolding/syntheticModules/countParallel.py +98 -0
  17. mapFolding/syntheticModules/dataPacking.py +1 -1
  18. mapFolding/syntheticModules/numbaCount.py +189 -188
  19. mapFolding/theDao.py +1 -1
  20. mapFolding/theSSOT.py +4 -243
  21. {mapfolding-0.11.1.dist-info → mapfolding-0.11.2.dist-info}/METADATA +16 -8
  22. mapfolding-0.11.2.dist-info/RECORD +56 -0
  23. {mapfolding-0.11.1.dist-info → mapfolding-0.11.2.dist-info}/WHEEL +1 -1
  24. tests/conftest.py +7 -9
  25. tests/test_computations.py +1 -1
  26. tests/test_filesystem.py +1 -2
  27. tests/test_other.py +1 -1
  28. tests/test_tasks.py +1 -3
  29. mapfolding-0.11.1.dist-info/RECORD +0 -54
  30. /mapFolding/someAssemblyRequired/{toolboxNumba.py → toolkitNumba.py} +0 -0
  31. {mapfolding-0.11.1.dist-info → mapfolding-0.11.2.dist-info}/entry_points.txt +0 -0
  32. {mapfolding-0.11.1.dist-info → mapfolding-0.11.2.dist-info}/licenses/LICENSE +0 -0
  33. {mapfolding-0.11.1.dist-info → mapfolding-0.11.2.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,5 @@
1
+ from collections.abc import Sequence
2
+ from astToolkit import ClassIsAndAttribute, extractClassDef
1
3
  from mapFolding import raiseIfNoneGitHubIssueNumber3, The
2
4
  from mapFolding.someAssemblyRequired import (
3
5
  ast_Identifier,
@@ -20,20 +22,25 @@ from mapFolding.someAssemblyRequired import (
20
22
  str_nameDOTname,
21
23
  Then,
22
24
  write_astModule,
25
+ DeReConstructField2ast,
26
+ ShatteredDataclass,
23
27
  )
24
- from mapFolding.someAssemblyRequired.toolboxNumba import decorateCallableWithNumba, parametersNumbaLight
28
+ from mapFolding.someAssemblyRequired.toolkitNumba import decorateCallableWithNumba, parametersNumbaLight
25
29
  from mapFolding.someAssemblyRequired.transformationTools import (
26
30
  removeDataclassFromFunction,
27
31
  shatter_dataclassesDOTdataclass,
28
32
  unpackDataclassCallFunctionRepackDataclass,
29
33
  )
30
34
  from pathlib import PurePath
35
+ from Z0Z_tools import importLogicalPath2Callable
31
36
  import ast
37
+ import dataclasses
32
38
 
33
39
  algorithmSourceModuleHARDCODED = 'daoOfMapFolding'
34
40
  sourceCallableIdentifierHARDCODED = 'count'
35
41
  logicalPathInfixHARDCODED: ast_Identifier = 'syntheticModules'
36
42
  theCountingIdentifierHARDCODED: ast_Identifier = 'groupsOfFolds'
43
+ dataPackingModuleIdentifierHARDCODED: ast_Identifier = 'dataPacking'
37
44
 
38
45
  def makeInitializeGroupsOfFolds() -> None:
39
46
  callableIdentifierHARDCODED = 'initializeGroupsOfFolds'
@@ -118,11 +125,12 @@ def makeDaoOfMapFolding() -> PurePath:
118
125
  doTheNeedful.imports.update(shatteredDataclass.imports)
119
126
  targetCallableIdentifier = daoOfMapFolding.astFunctionDef.name
120
127
  doTheNeedful = unpackDataclassCallFunctionRepackDataclass(doTheNeedful, targetCallableIdentifier, shatteredDataclass)
121
- astTuple: ast.Tuple | None = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(daoOfMapFolding.astFunctionDef)
128
+ astTuple: ast.Tuple | None = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(daoOfMapFolding.astFunctionDef) # type: ignore
122
129
  if astTuple is None: raise raiseIfNoneGitHubIssueNumber3
123
130
  astTuple.ctx = ast.Store()
131
+ ast.Return()
124
132
 
125
- findThis = IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier))
133
+ findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier))
126
134
  doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), astTuple.elts)))
127
135
  changeAssignCallToTarget = NodeChanger(findThis, doThat)
128
136
  changeAssignCallToTarget.visit(doTheNeedful.astFunctionDef)
@@ -136,6 +144,160 @@ def makeDaoOfMapFolding() -> PurePath:
136
144
 
137
145
  return pathFilename
138
146
 
147
+ def makeDaoOfMapFoldingParallel() -> PurePath:
148
+ moduleIdentifierHARDCODED: ast_Identifier = 'countParallel'
149
+
150
+ algorithmSourceModule = algorithmSourceModuleHARDCODED
151
+ sourceCallableIdentifier = sourceCallableIdentifierHARDCODED
152
+ logicalPathSourceModule = '.'.join([The.packageName, algorithmSourceModule])
153
+
154
+ logicalPathInfix = logicalPathInfixHARDCODED
155
+ moduleIdentifier = moduleIdentifierHARDCODED
156
+
157
+ astModule = parseLogicalPath2astModule(logicalPathSourceModule)
158
+ ingredientsFunction = IngredientsFunction(inlineFunctionDef(sourceCallableIdentifier, astModule), LedgerOfImports(astModule))
159
+
160
+ dataclassName: ast.expr | None = NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(ingredientsFunction.astFunctionDef)
161
+ if dataclassName is None: raise raiseIfNoneGitHubIssueNumber3
162
+ dataclass_Identifier: ast_Identifier | None = NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName)
163
+ if dataclass_Identifier is None: raise raiseIfNoneGitHubIssueNumber3
164
+
165
+ dataclassLogicalPathModule = None
166
+ for moduleWithLogicalPath, listNameTuples in ingredientsFunction.imports.dictionaryImportFrom.items():
167
+ for nameTuple in listNameTuples:
168
+ if nameTuple[0] == dataclass_Identifier:
169
+ dataclassLogicalPathModule = moduleWithLogicalPath
170
+ break
171
+ if dataclassLogicalPathModule:
172
+ break
173
+ if dataclassLogicalPathModule is None: raise raiseIfNoneGitHubIssueNumber3
174
+ dataclassInstanceIdentifier = NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef)
175
+ if dataclassInstanceIdentifier is None: raise raiseIfNoneGitHubIssueNumber3
176
+ shatteredDataclass = shatter_dataclassesDOTdataclass(dataclassLogicalPathModule, dataclass_Identifier, dataclassInstanceIdentifier)
177
+
178
+ # Start add the parallel state fields to the count function ================================================
179
+ dataclassBaseFields = dataclasses.fields(importLogicalPath2Callable(dataclassLogicalPathModule, dataclass_Identifier)) # pyright: ignore [reportArgumentType]
180
+ dataclass_IdentifierParallel = 'Parallel' + dataclass_Identifier
181
+ dataclassFieldsParallel = dataclasses.fields(importLogicalPath2Callable(dataclassLogicalPathModule, dataclass_IdentifierParallel)) # pyright: ignore [reportArgumentType]
182
+ onlyParallelFields = [field for field in dataclassFieldsParallel if field.name not in [fieldBase.name for fieldBase in dataclassBaseFields]]
183
+
184
+ Official_fieldOrder: list[ast_Identifier] = []
185
+ dictionaryDeReConstruction: dict[ast_Identifier, DeReConstructField2ast] = {}
186
+
187
+ dataclassClassDef = extractClassDef(parseLogicalPath2astModule(dataclassLogicalPathModule), dataclass_IdentifierParallel)
188
+ if not isinstance(dataclassClassDef, ast.ClassDef): raise ValueError(f"I could not find `{dataclass_IdentifierParallel = }` in `{dataclassLogicalPathModule = }`.")
189
+
190
+ for aField in onlyParallelFields:
191
+ Official_fieldOrder.append(aField.name)
192
+ dictionaryDeReConstruction[aField.name] = DeReConstructField2ast(dataclassLogicalPathModule, dataclassClassDef, dataclassInstanceIdentifier, aField)
193
+
194
+ shatteredDataclassParallel = ShatteredDataclass(
195
+ countingVariableAnnotation=shatteredDataclass.countingVariableAnnotation,
196
+ countingVariableName=shatteredDataclass.countingVariableName,
197
+ field2AnnAssign={**shatteredDataclass.field2AnnAssign, **{dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].astAnnAssignConstructor for field in Official_fieldOrder}},
198
+ Z0Z_field2AnnAssign={**shatteredDataclass.Z0Z_field2AnnAssign, **{dictionaryDeReConstruction[field].name: dictionaryDeReConstruction[field].Z0Z_hack for field in Official_fieldOrder}},
199
+ list_argAnnotated4ArgumentsSpecification=shatteredDataclass.list_argAnnotated4ArgumentsSpecification + [dictionaryDeReConstruction[field].ast_argAnnotated for field in Official_fieldOrder],
200
+ list_keyword_field__field4init=shatteredDataclass.list_keyword_field__field4init + [dictionaryDeReConstruction[field].ast_keyword_field__field for field in Official_fieldOrder if dictionaryDeReConstruction[field].init],
201
+ listAnnotations=shatteredDataclass.listAnnotations + [dictionaryDeReConstruction[field].astAnnotation for field in Official_fieldOrder],
202
+ listName4Parameters=shatteredDataclass.listName4Parameters + [dictionaryDeReConstruction[field].astName for field in Official_fieldOrder],
203
+ listUnpack=shatteredDataclass.listUnpack + [Make.AnnAssign(dictionaryDeReConstruction[field].astName, dictionaryDeReConstruction[field].astAnnotation, dictionaryDeReConstruction[field].ast_nameDOTname) for field in Official_fieldOrder],
204
+ map_stateDOTfield2Name={**shatteredDataclass.map_stateDOTfield2Name, **{dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder}},
205
+ )
206
+ shatteredDataclassParallel.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclassParallel.listName4Parameters, ast.Store())
207
+ shatteredDataclassParallel.repack = Make.Assign([Make.Name(dataclassInstanceIdentifier)], value=Make.Call(Make.Name(dataclass_IdentifierParallel), list_keyword=shatteredDataclassParallel.list_keyword_field__field4init))
208
+ shatteredDataclassParallel.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclassParallel.listAnnotations))
209
+
210
+ shatteredDataclassParallel.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
211
+ shatteredDataclassParallel.imports.addImportFrom_asStr(dataclassLogicalPathModule, dataclass_IdentifierParallel)
212
+ shatteredDataclassParallel.imports.update(shatteredDataclass.imports)
213
+ shatteredDataclassParallel.imports.removeImportFrom(dataclassLogicalPathModule, dataclass_Identifier)
214
+
215
+ # End add the parallel state fields to the count function ================================================
216
+
217
+ ingredientsFunction.imports.update(shatteredDataclassParallel.imports)
218
+ ingredientsFunction = removeDataclassFromFunction(ingredientsFunction, shatteredDataclassParallel)
219
+
220
+ # Start add the parallel logic to the count function ================================================
221
+
222
+ findThis = ClassIsAndAttribute.testIs(ast.While, ClassIsAndAttribute.leftIs(ast.Compare, IfThis.isName_Identifier('leafConnectee')))
223
+ doThat = Then.extractIt(DOT.body)
224
+ captureCountGapsCodeBlock: NodeTourist[ast.While, Sequence[ast.stmt]] = NodeTourist(findThis, doThat)
225
+ countGapsCodeBlock = captureCountGapsCodeBlock.captureLastMatch(ingredientsFunction.astFunctionDef)
226
+ if countGapsCodeBlock is None: raise raiseIfNoneGitHubIssueNumber3
227
+
228
+ thisIsMyTaskIndexCodeBlock = ast.If(ast.BoolOp(ast.Or()
229
+ , values=[ast.Compare(ast.Name('leaf1ndex'), ops=[ast.NotEq()], comparators=[ast.Name('taskDivisions')])
230
+ , ast.Compare(ast.BinOp(ast.Name('leafConnectee'), op=ast.Mod(), right=ast.Name('taskDivisions')), ops=[ast.Eq()], comparators=[ast.Name('taskIndex')])])
231
+ , body=list(countGapsCodeBlock[0:-1]))
232
+
233
+ countGapsCodeBlockNew: list[ast.stmt] = [thisIsMyTaskIndexCodeBlock, countGapsCodeBlock[-1]]
234
+
235
+ doThat = Grab.bodyAttribute(Then.replaceWith(countGapsCodeBlockNew))
236
+ NodeChanger(findThis, doThat).visit(ingredientsFunction.astFunctionDef)
237
+
238
+ # End add the parallel logic to the count function ================================================
239
+
240
+ ingredientsFunction = removeUnusedParameters(ingredientsFunction)
241
+
242
+ ingredientsFunction = decorateCallableWithNumba(ingredientsFunction, parametersNumbaLight)
243
+
244
+ # Start unpack/repack the dataclass function ================================================
245
+ sourceCallableIdentifier = The.sourceCallableDispatcher
246
+
247
+ unRepackDataclass: IngredientsFunction = astModuleToIngredientsFunction(astModule, sourceCallableIdentifier)
248
+ unRepackDataclass.astFunctionDef.name = 'unRepack' + dataclass_IdentifierParallel
249
+ unRepackDataclass.imports.update(shatteredDataclassParallel.imports)
250
+ findThis = ClassIsAndAttribute.annotationIs(ast.arg, IfThis.isName_Identifier(dataclass_Identifier)) # type: ignore
251
+ doThat = Grab.annotationAttribute(Grab.idAttribute(Then.replaceWith(dataclass_IdentifierParallel))) # type: ignore
252
+ NodeChanger(findThis, doThat).visit(unRepackDataclass.astFunctionDef) # type: ignore
253
+ unRepackDataclass.astFunctionDef.returns = Make.Name(dataclass_IdentifierParallel)
254
+ targetCallableIdentifier = ingredientsFunction.astFunctionDef.name
255
+ unRepackDataclass = unpackDataclassCallFunctionRepackDataclass(unRepackDataclass, targetCallableIdentifier, shatteredDataclassParallel)
256
+
257
+ astTuple: ast.Tuple | None = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(ingredientsFunction.astFunctionDef) # type: ignore
258
+ if astTuple is None: raise raiseIfNoneGitHubIssueNumber3
259
+ astTuple.ctx = ast.Store()
260
+ findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier))
261
+ doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), astTuple.elts)))
262
+ changeAssignCallToTarget = NodeChanger(findThis, doThat)
263
+ changeAssignCallToTarget.visit(unRepackDataclass.astFunctionDef)
264
+
265
+ ingredientsDoTheNeedful: IngredientsFunction = IngredientsFunction(
266
+ astFunctionDef = ast.FunctionDef(name='doTheNeedful'
267
+ , args=ast.arguments(args=[ast.arg('state', annotation=ast.Name(dataclass_IdentifierParallel)), ast.arg('concurrencyLimit', annotation=ast.Name('int'))])
268
+ , body=[ast.Assign(targets=[ast.Name('stateParallel', ctx=ast.Store())], value=ast.Call(func=ast.Name('deepcopy'), args=[ast.Name('state')]))
269
+ , ast.AnnAssign(target=ast.Name('listStatesParallel', ctx=ast.Store()), annotation=ast.Subscript(value=ast.Name('list'), slice=ast.Name(dataclass_IdentifierParallel)), value=ast.BinOp(left=ast.List(elts=[ast.Name('stateParallel')]), op=ast.Mult(), right=ast.Attribute(value=ast.Name('stateParallel'), attr='taskDivisions')), simple=1)
270
+ , ast.AnnAssign(target=ast.Name('groupsOfFoldsTotal', ctx=ast.Store()), annotation=ast.Name('int'), value=ast.Constant(value=0), simple=1)
271
+
272
+ , ast.AnnAssign(target=ast.Name('dictionaryConcurrency', ctx=ast.Store()), annotation=ast.Subscript(value=ast.Name('dict'), slice=ast.Tuple(elts=[ast.Name('int'), ast.Subscript(value=ast.Name('ConcurrentFuture'), slice=ast.Name(dataclass_IdentifierParallel))])), value=ast.Dict(), simple=1)
273
+ , ast.With(items=[ast.withitem(context_expr=ast.Call(func=ast.Name('ProcessPoolExecutor'), args=[ast.Name('concurrencyLimit')]), optional_vars=ast.Name('concurrencyManager', ctx=ast.Store()))]
274
+ , body=[ast.For(target=ast.Name('indexSherpa', ctx=ast.Store()), iter=ast.Call(func=ast.Name('range'), args=[ast.Attribute(value=ast.Name('stateParallel'), attr='taskDivisions')])
275
+ , body=[ast.Assign(targets=[ast.Name('state', ctx=ast.Store())], value=ast.Call(func=ast.Name('deepcopy'), args=[ast.Name('stateParallel')]))
276
+ , ast.Assign(targets=[ast.Attribute(value=ast.Name('state'), attr='taskIndex', ctx=ast.Store())], value=ast.Name('indexSherpa'))
277
+ , ast.Assign(targets=[ast.Subscript(value=ast.Name('dictionaryConcurrency'), slice=ast.Name('indexSherpa'), ctx=ast.Store())], value=ast.Call(func=ast.Attribute(value=ast.Name('concurrencyManager'), attr='submit'), args=[ast.Name(unRepackDataclass.astFunctionDef.name), ast.Name('state')]))])
278
+ , ast.For(target=ast.Name('indexSherpa', ctx=ast.Store()), iter=ast.Call(func=ast.Name('range'), args=[ast.Attribute(value=ast.Name('stateParallel'), attr='taskDivisions')])
279
+ , body=[ast.Assign(targets=[ast.Subscript(value=ast.Name('listStatesParallel'), slice=ast.Name('indexSherpa'), ctx=ast.Store())], value=ast.Call(func=ast.Attribute(value=ast.Subscript(value=ast.Name('dictionaryConcurrency'), slice=ast.Name('indexSherpa')), attr='result')))
280
+ , ast.AugAssign(target=ast.Name('groupsOfFoldsTotal', ctx=ast.Store()), op=ast.Add(), value=ast.Attribute(value=ast.Subscript(value=ast.Name('listStatesParallel'), slice=ast.Name('indexSherpa')), attr='groupsOfFolds'))])])
281
+
282
+ , ast.AnnAssign(target=ast.Name('foldsTotal', ctx=ast.Store()), annotation=ast.Name('int'), value=ast.BinOp(left=ast.Name('groupsOfFoldsTotal'), op=ast.Mult(), right=ast.Attribute(value=ast.Name('stateParallel'), attr='leavesTotal')), simple=1)
283
+ , ast.Return(value=ast.Tuple(elts=[ast.Name('foldsTotal'), ast.Name('listStatesParallel')]))]
284
+ , returns=ast.Subscript(value=ast.Name('tuple'), slice=ast.Tuple(elts=[ast.Name('int'), ast.Subscript(value=ast.Name('list'), slice=ast.Name(dataclass_IdentifierParallel))])))
285
+ , imports = LedgerOfImports(Make.Module([ast.ImportFrom(module='concurrent.futures', names=[ast.alias(name='Future', asname='ConcurrentFuture'), ast.alias(name='ProcessPoolExecutor')], level=0),
286
+ ast.ImportFrom(module='copy', names=[ast.alias(name='deepcopy')], level=0),
287
+ ast.ImportFrom(module='multiprocessing', names=[ast.alias(name='set_start_method', asname='multiprocessing_set_start_method')], level=0),])
288
+ )
289
+ )
290
+
291
+ ingredientsModule = IngredientsModule([ingredientsFunction, unRepackDataclass, ingredientsDoTheNeedful]
292
+ , prologue = Make.Module([ast.If(test=ast.Compare(left=ast.Name('__name__'), ops=[ast.Eq()], comparators=[ast.Constant(value='__main__')]), body=[ast.Expr(value=ast.Call(func=ast.Name('multiprocessing_set_start_method'), args=[ast.Constant(value='spawn')]))])])
293
+ )
294
+ ingredientsModule.removeImportFromModule('numpy')
295
+
296
+ pathFilename = PurePath(The.pathPackage, logicalPathInfix, moduleIdentifier + The.fileExtension)
297
+
298
+ write_astModule(ingredientsModule, pathFilename, The.packageName)
299
+ return pathFilename
300
+
139
301
  def makeTheorem2() -> PurePath:
140
302
  moduleIdentifierHARDCODED: ast_Identifier = 'theorem2'
141
303
 
@@ -266,23 +428,25 @@ def numbaOnTheorem2(pathFilenameSource: PurePath) -> ast.ImportFrom:
266
428
  return astImportFrom
267
429
 
268
430
  def makeUnRePackDataclass(astImportFrom: ast.ImportFrom) -> None:
269
- moduleIdentifierHARDCODED: ast_Identifier = 'dataPacking'
431
+ callableIdentifierHARDCODED: ast_Identifier = 'sequential'
270
432
 
271
433
  algorithmSourceModule = algorithmSourceModuleHARDCODED
272
434
  sourceCallableIdentifier = The.sourceCallableDispatcher
273
435
  logicalPathSourceModule = '.'.join([The.packageName, algorithmSourceModule])
274
436
 
275
437
  logicalPathInfix = logicalPathInfixHARDCODED
276
- moduleIdentifier = moduleIdentifierHARDCODED
438
+ moduleIdentifier = dataPackingModuleIdentifierHARDCODED
439
+ callableIdentifier = callableIdentifierHARDCODED
277
440
 
278
- doTheNeedful: IngredientsFunction = astModuleToIngredientsFunction(parseLogicalPath2astModule(logicalPathSourceModule), sourceCallableIdentifier)
279
- dataclassName: ast.expr | None = NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(doTheNeedful.astFunctionDef)
441
+ ingredientsFunction: IngredientsFunction = astModuleToIngredientsFunction(parseLogicalPath2astModule(logicalPathSourceModule), sourceCallableIdentifier)
442
+ ingredientsFunction.astFunctionDef.name = callableIdentifier
443
+ dataclassName: ast.expr | None = NodeTourist(Be.arg, Then.extractIt(DOT.annotation)).captureLastMatch(ingredientsFunction.astFunctionDef)
280
444
  if dataclassName is None: raise raiseIfNoneGitHubIssueNumber3
281
445
  dataclass_Identifier: ast_Identifier | None = NodeTourist(Be.Name, Then.extractIt(DOT.id)).captureLastMatch(dataclassName)
282
446
  if dataclass_Identifier is None: raise raiseIfNoneGitHubIssueNumber3
283
447
 
284
448
  dataclassLogicalPathModule = None
285
- for moduleWithLogicalPath, listNameTuples in doTheNeedful.imports.dictionaryImportFrom.items():
449
+ for moduleWithLogicalPath, listNameTuples in ingredientsFunction.imports.dictionaryImportFrom.items():
286
450
  for nameTuple in listNameTuples:
287
451
  if nameTuple[0] == dataclass_Identifier:
288
452
  dataclassLogicalPathModule = moduleWithLogicalPath
@@ -290,26 +454,26 @@ def makeUnRePackDataclass(astImportFrom: ast.ImportFrom) -> None:
290
454
  if dataclassLogicalPathModule:
291
455
  break
292
456
  if dataclassLogicalPathModule is None: raise raiseIfNoneGitHubIssueNumber3
293
- dataclassInstanceIdentifier = NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(doTheNeedful.astFunctionDef)
457
+ dataclassInstanceIdentifier = NodeTourist(Be.arg, Then.extractIt(DOT.arg)).captureLastMatch(ingredientsFunction.astFunctionDef)
294
458
  if dataclassInstanceIdentifier is None: raise raiseIfNoneGitHubIssueNumber3
295
459
  shatteredDataclass = shatter_dataclassesDOTdataclass(dataclassLogicalPathModule, dataclass_Identifier, dataclassInstanceIdentifier)
296
460
 
297
- doTheNeedful.imports.update(shatteredDataclass.imports)
298
- doTheNeedful.imports.addAst(astImportFrom)
461
+ ingredientsFunction.imports.update(shatteredDataclass.imports)
462
+ ingredientsFunction.imports.addAst(astImportFrom)
299
463
  targetCallableIdentifier = astImportFrom.names[0].name
300
- doTheNeedful = unpackDataclassCallFunctionRepackDataclass(doTheNeedful, targetCallableIdentifier, shatteredDataclass)
464
+ ingredientsFunction = unpackDataclassCallFunctionRepackDataclass(ingredientsFunction, targetCallableIdentifier, shatteredDataclass)
301
465
  if astImportFrom.module is None: raise raiseIfNoneGitHubIssueNumber3
302
466
  targetFunctionDef = extractFunctionDef(parseLogicalPath2astModule(astImportFrom.module), targetCallableIdentifier)
303
467
  if targetFunctionDef is None: raise raiseIfNoneGitHubIssueNumber3
304
- astTuple: ast.Tuple | None = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(targetFunctionDef)
468
+ astTuple: ast.Tuple | None = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(targetFunctionDef) # type: ignore
305
469
  if astTuple is None: raise raiseIfNoneGitHubIssueNumber3
306
470
  astTuple.ctx = ast.Store()
307
471
 
308
- findThis = IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier))
472
+ findThis = ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier))
309
473
  doThat = Then.replaceWith(Make.Assign([astTuple], value=Make.Call(Make.Name(targetCallableIdentifier), astTuple.elts)))
310
- NodeChanger(findThis, doThat).visit(doTheNeedful.astFunctionDef)
474
+ NodeChanger(findThis, doThat).visit(ingredientsFunction.astFunctionDef)
311
475
 
312
- ingredientsModule = IngredientsModule(doTheNeedful)
476
+ ingredientsModule = IngredientsModule(ingredientsFunction)
313
477
  ingredientsModule.removeImportFromModule('numpy')
314
478
 
315
479
  pathFilename = PurePath(The.pathPackage, logicalPathInfix, moduleIdentifier + The.fileExtension)
@@ -322,4 +486,5 @@ if __name__ == '__main__':
322
486
  pathFilename = trimTheorem2(pathFilename)
323
487
  astImportFrom = numbaOnTheorem2(pathFilename)
324
488
  makeUnRePackDataclass(astImportFrom)
325
- makeDaoOfMapFolding()
489
+ pathFilename = makeDaoOfMapFolding()
490
+ makeDaoOfMapFoldingParallel()
@@ -53,8 +53,6 @@ from astToolkit import (
53
53
  extractClassDef as extractClassDef,
54
54
  extractFunctionDef as extractFunctionDef,
55
55
  Grab as Grab,
56
- importLogicalPath2Callable as importLogicalPath2Callable,
57
- importPathFilename2Callable as importPathFilename2Callable,
58
56
  IngredientsFunction as IngredientsFunction,
59
57
  IngredientsModule as IngredientsModule,
60
58
  LedgerOfImports as LedgerOfImports,
@@ -68,6 +66,8 @@ from astToolkit import (
68
66
  Then as Then,
69
67
  )
70
68
 
69
+ from Z0Z_tools import importLogicalPath2Callable as importLogicalPath2Callable, importPathFilename2Callable as importPathFilename2Callable
70
+
71
71
  from astToolkit.transformationTools import (
72
72
  inlineFunctionDef as inlineFunctionDef,
73
73
  removeUnusedParameters as removeUnusedParameters,
@@ -77,7 +77,7 @@ from astToolkit.transformationTools import (
77
77
 
78
78
  from mapFolding.someAssemblyRequired._toolIfThis import IfThis as IfThis
79
79
 
80
- from mapFolding.someAssemblyRequired._toolboxContainers import (
80
+ from mapFolding.someAssemblyRequired._toolkitContainers import (
81
81
  DeReConstructField2ast as DeReConstructField2ast,
82
82
  RecipeSynthesizeFlow as RecipeSynthesizeFlow,
83
83
  ShatteredDataclass as ShatteredDataclass,
@@ -21,7 +21,7 @@ they implement a declarative approach to AST manipulation that separates node id
21
21
 
22
22
  from astToolkit import IfThis as astToolkit_IfThis
23
23
  from collections.abc import Callable
24
- from mapFolding.someAssemblyRequired import ast_Identifier, Be
24
+ from mapFolding.someAssemblyRequired import ast_Identifier, Be, DOT
25
25
  from typing import TypeGuard
26
26
  import ast
27
27
 
@@ -39,21 +39,21 @@ class IfThis(astToolkit_IfThis):
39
39
  @staticmethod
40
40
  def isAttributeNamespace_IdentifierGreaterThan0(namespace: ast_Identifier, identifier: ast_Identifier) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
41
41
  return lambda node: (Be.Compare(node)
42
- and IfThis.isAttributeNamespace_Identifier(namespace, identifier)(node.left)
42
+ and IfThis.isAttributeNamespace_Identifier(namespace, identifier)(DOT.left(node))
43
43
  and Be.Gt(node.ops[0])
44
44
  and IfThis.isConstant_value(0)(node.comparators[0]))
45
45
  @staticmethod
46
46
  def isIfAttributeNamespace_IdentifierGreaterThan0(namespace: ast_Identifier, identifier: ast_Identifier) -> Callable[[ast.AST], TypeGuard[ast.If] | bool]:
47
47
  return lambda node: (Be.If(node)
48
- and IfThis.isAttributeNamespace_IdentifierGreaterThan0(namespace, identifier)(node.test))
48
+ and IfThis.isAttributeNamespace_IdentifierGreaterThan0(namespace, identifier)(DOT.test(node)))
49
49
 
50
50
  @staticmethod
51
51
  def isWhileAttributeNamespace_IdentifierGreaterThan0(namespace: ast_Identifier, identifier: ast_Identifier) -> Callable[[ast.AST], TypeGuard[ast.While] | bool]:
52
52
  return lambda node: (Be.While(node)
53
- and IfThis.isAttributeNamespace_IdentifierGreaterThan0(namespace, identifier)(node.test))
53
+ and IfThis.isAttributeNamespace_IdentifierGreaterThan0(namespace, identifier)(DOT.test(node)))
54
54
 
55
55
  @staticmethod
56
56
  def isAttributeNamespace_IdentifierLessThanOrEqual0(namespace: ast_Identifier, identifier: ast_Identifier) -> Callable[[ast.AST], TypeGuard[ast.Compare] | bool]:
57
57
  return lambda node: (Be.Compare(node)
58
- and IfThis.isAttributeNamespace_Identifier(namespace, identifier)(node.left)
58
+ and IfThis.isAttributeNamespace_Identifier(namespace, identifier)(DOT.left(node))
59
59
  and Be.LtE(node.ops[0]))
@@ -19,9 +19,10 @@ specific optimizations and transformations.
19
19
  """
20
20
 
21
21
  from collections.abc import Callable
22
+ from astToolkit import ClassIsAndAttribute
22
23
  from copy import deepcopy
23
24
  from mapFolding.someAssemblyRequired import ast_Identifier, DOT, IfThis, Make, NodeTourist, parseLogicalPath2astModule, str_nameDOTname, Then, LedgerOfImports
24
- from mapFolding.theSSOT import raiseIfNoneGitHubIssueNumber3, The
25
+ from mapFolding import raiseIfNoneGitHubIssueNumber3, The
25
26
  from pathlib import Path, PurePosixPath
26
27
  from typing import Any, cast
27
28
  import ast
@@ -252,14 +253,12 @@ class DeReConstructField2ast:
252
253
  self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
253
254
  self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
254
255
 
255
- findThis = IfThis.isAnnAssign_targetIs(IfThis.isName_Identifier(self.name))
256
-
257
- sherpa = NodeTourist(
258
- findThis=findThis
259
- , doThat=Then.extractIt(DOT.annotation)
256
+ sherpa: ast.expr = NodeTourist( # type: ignore
257
+ findThis=ClassIsAndAttribute.targetIs(ast.AnnAssign, IfThis.isName_Identifier(self.name))
258
+ , doThat=Then.extractIt(DOT.annotation) # type: ignore
260
259
  ).captureLastMatch(dataclassClassDef)
261
260
 
262
- if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
261
+ if sherpa is None: raise raiseIfNoneGitHubIssueNumber3 # type: ignore
263
262
  else: self.astAnnotation = sherpa
264
263
 
265
264
  self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
@@ -1,4 +1,5 @@
1
- from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3, The
1
+ from astToolkit import ClassIsAndAttribute
2
+ from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3, The, MapFoldingState
2
3
  from mapFolding.someAssemblyRequired import (
3
4
  ast_Identifier,
4
5
  Be,
@@ -15,9 +16,8 @@ from mapFolding.someAssemblyRequired import (
15
16
  write_astModule,
16
17
  )
17
18
  from mapFolding.someAssemblyRequired.RecipeJob import RecipeJobTheorem2Numba
18
- from mapFolding.someAssemblyRequired.toolboxNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
19
+ from mapFolding.someAssemblyRequired.toolkitNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
19
20
  from mapFolding.syntheticModules.initializeCount import initializeGroupsOfFolds
20
- from mapFolding.dataBaskets import MapFoldingState
21
21
  from pathlib import PurePosixPath
22
22
  from typing import cast, NamedTuple
23
23
  from Z0Z_tools import autoDecodingRLE
@@ -77,7 +77,7 @@ if __name__ == '__main__':
77
77
  ast_argNumbaProgress = ast.arg(arg=spices.numbaProgressBarIdentifier, annotation=ast.Name(id=numba_progressPythonClass, ctx=ast.Load()))
78
78
  ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
79
79
 
80
- findThis = IfThis.isAugAssignAndTargetIs(IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id))
80
+ findThis = ClassIsAndAttribute.targetIs(ast.AugAssign, IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id)) # type: ignore
81
81
  doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(1)])))
82
82
  countWithProgressBar = NodeChanger(findThis, doThat)
83
83
  countWithProgressBar.visit(ingredientsFunction.astFunctionDef)
@@ -161,10 +161,11 @@ def makeJobNumba(job: RecipeJobTheorem2Numba, spices: SpicesJobNumba) -> None:
161
161
  ingredientsCount: IngredientsFunction = IngredientsFunction(astFunctionDef, LedgerOfImports())
162
162
 
163
163
  # Remove `foldGroups` and any other unused statements, so you can dynamically determine which variables are not used
164
- findThis = IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier('foldGroups'))
164
+ findThis = ClassIsAndAttribute.targetsIs(ast.Assign, lambda list_expr: any([IfThis.isSubscript_Identifier('foldGroups')(node) for node in list_expr ]))
165
+ # findThis = IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier('foldGroups'))
165
166
  doThat = Then.removeIt
166
167
  remove_foldGroups = NodeChanger(findThis, doThat)
167
- remove_foldGroups.visit(ingredientsCount.astFunctionDef)
168
+ # remove_foldGroups.visit(ingredientsCount.astFunctionDef)
168
169
 
169
170
  # replace identifiers with static values with their values, so you can dynamically determine which variables are not used
170
171
  list_IdentifiersStaticValues = list_IdentifiersStaticValuesHARDCODED
@@ -260,7 +261,7 @@ if __name__ == '__main__':
260
261
  """
261
262
 
262
263
  if __name__ == '__main__':
263
- mapShape = (1,46)
264
+ mapShape = (2,4)
264
265
  state = MapFoldingState(mapShape)
265
266
  state = initializeGroupsOfFolds(state)
266
267
  # foldsTotalEstimated = getFoldsTotalKnown(state.mapShape) // state.leavesTotal
@@ -18,6 +18,7 @@ This creates extremely fast, specialized implementations that can be run directl
18
18
  as Python scripts or further compiled into standalone executables.
19
19
  """
20
20
 
21
+ from astToolkit import ClassIsAndAttribute
21
22
  from mapFolding import getPathFilenameFoldsTotal, raiseIfNoneGitHubIssueNumber3, The
22
23
  from mapFolding.someAssemblyRequired import (
23
24
  ast_Identifier,
@@ -35,7 +36,7 @@ from mapFolding.someAssemblyRequired import (
35
36
  write_astModule,
36
37
  )
37
38
  from mapFolding.someAssemblyRequired.RecipeJob import RecipeJob
38
- from mapFolding.someAssemblyRequired.toolboxNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
39
+ from mapFolding.someAssemblyRequired.toolkitNumba import parametersNumbaLight, SpicesJobNumba, decorateCallableWithNumba
39
40
  from mapFolding.someAssemblyRequired.transformationTools import dictionaryEstimates, makeInitializedComputationState
40
41
  from pathlib import PurePosixPath
41
42
  from typing import cast, NamedTuple
@@ -96,7 +97,7 @@ if __name__ == '__main__':
96
97
  ast_argNumbaProgress = ast.arg(arg=spices.numbaProgressBarIdentifier, annotation=ast.Name(id=numba_progressPythonClass, ctx=ast.Load()))
97
98
  ingredientsFunction.astFunctionDef.args.args.append(ast_argNumbaProgress)
98
99
 
99
- findThis = IfThis.isAugAssignAndTargetIs(IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id))
100
+ findThis = ClassIsAndAttribute.targetIs(ast.AugAssign, IfThis.isName_Identifier(job.shatteredDataclass.countingVariableName.id)) # type: ignore
100
101
  doThat = Then.replaceWith(Make.Expr(Make.Call(Make.Attribute(Make.Name(spices.numbaProgressBarIdentifier),'update'),[Make.Constant(1)])))
101
102
  countWithProgressBar = NodeChanger(findThis, doThat)
102
103
  countWithProgressBar.visit(ingredientsFunction.astFunctionDef)
@@ -19,7 +19,8 @@ logical structure and correctness.
19
19
  """
20
20
 
21
21
  from collections.abc import Callable
22
- from mapFolding.beDRY import outfitCountFolds
22
+ from astToolkit import ClassIsAndAttribute
23
+ from mapFolding import outfitCountFolds, ComputationState, The, getPathFilenameFoldsTotal
23
24
  from mapFolding.someAssemblyRequired import (
24
25
  ast_Identifier,
25
26
  astModuleToIngredientsFunction,
@@ -44,8 +45,6 @@ from mapFolding.someAssemblyRequired import (
44
45
  Then,
45
46
  unparseFindReplace,
46
47
  )
47
- from mapFolding.theSSOT import ComputationState, The
48
- from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal
49
48
  from os import PathLike
50
49
  from pathlib import Path, PurePath
51
50
  from typing import Any, Literal, overload
@@ -116,8 +115,8 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
116
115
  instance_Identifier: The variable name to use for the dataclass instance in generated code.
117
116
 
118
117
  Returns:
119
- A ShatteredDataclass containing AST representations of all dataclass components,
120
- with imports, field definitions, annotations, and repackaging code.
118
+ shatteredDataclass: A ShatteredDataclass containing AST representations of all dataclass components,
119
+ with imports, field definitions, annotations, and repackaging code.
121
120
 
122
121
  Raises:
123
122
  ValueError: If the dataclass cannot be found in the specified module or if no counting variable is identified in the dataclass.
@@ -136,7 +135,9 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
136
135
  countingVariable = dictionaryDeReConstruction[aField.name].name
137
136
 
138
137
  if countingVariable is None:
139
- raise ValueError(f"I could not find the counting variable in `{dataclass_Identifier = }` in `{logicalPathModule = }`.")
138
+ import warnings
139
+ warnings.warn(message=f"I could not find the counting variable in `{dataclass_Identifier = }` in `{logicalPathModule = }`.", category=UserWarning)
140
+ raise Exception
140
141
 
141
142
  shatteredDataclass = ShatteredDataclass(
142
143
  countingVariableAnnotation=dictionaryDeReConstruction[countingVariable].astAnnotation,
@@ -257,7 +258,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
257
258
  changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(shatteredDataclass.countingVariableName)))
258
259
  ingredientsParallel.astFunctionDef.returns = shatteredDataclass.countingVariableAnnotation
259
260
 
260
- unpack4parallelCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
261
+ unpack4parallelCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
261
262
 
262
263
  unpack4parallelCallable.visit(ingredientsDispatcher.astFunctionDef)
263
264
  replaceCall2concurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
@@ -279,9 +280,9 @@ def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shattere
279
280
 
280
281
  def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: ast_Identifier, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
281
282
  astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
282
- replaceAssignTargetCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
283
- unpack4targetCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
284
- repack4targetCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
283
+ replaceAssignTargetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
284
+ unpack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
285
+ repack4targetCallable = NodeChanger(ClassIsAndAttribute.valueIs(ast.Assign, IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
285
286
  replaceAssignTargetCallable.visit(ingredientsCaller.astFunctionDef)
286
287
  unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
287
288
  repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
@@ -0,0 +1,98 @@
1
+ from concurrent.futures import Future as ConcurrentFuture, ProcessPoolExecutor
2
+ from copy import deepcopy
3
+ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, ParallelMapFoldingState
4
+ from multiprocessing import set_start_method as multiprocessing_set_start_method
5
+ from numba import jit
6
+ if __name__ == '__main__':
7
+ multiprocessing_set_start_method('spawn')
8
+
9
+ @jit(cache=True, error_model='numpy', fastmath=True, forceinline=True)
10
+ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal, taskDivisions: DatatypeLeavesTotal, taskIndex: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal]:
11
+ while leaf1ndex > 0:
12
+ if leaf1ndex <= 1 or leafBelow[0] == 1:
13
+ if leaf1ndex > leavesTotal:
14
+ groupsOfFolds += 1
15
+ else:
16
+ dimensionsUnconstrained = dimensionsTotal
17
+ gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
18
+ indexDimension = 0
19
+ while indexDimension < dimensionsTotal:
20
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
21
+ if leafConnectee == leaf1ndex:
22
+ dimensionsUnconstrained -= 1
23
+ else:
24
+ while leafConnectee != leaf1ndex:
25
+ if leaf1ndex != taskDivisions or leafConnectee % taskDivisions == taskIndex:
26
+ gapsWhere[gap1ndexCeiling] = leafConnectee
27
+ if countDimensionsGapped[leafConnectee] == 0:
28
+ gap1ndexCeiling += 1
29
+ countDimensionsGapped[leafConnectee] += 1
30
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
31
+ indexDimension += 1
32
+ if not dimensionsUnconstrained:
33
+ indexLeaf = 0
34
+ while indexLeaf < leaf1ndex:
35
+ gapsWhere[gap1ndexCeiling] = indexLeaf
36
+ gap1ndexCeiling += 1
37
+ indexLeaf += 1
38
+ indexMiniGap = gap1ndex
39
+ while indexMiniGap < gap1ndexCeiling:
40
+ gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
41
+ if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
42
+ gap1ndex += 1
43
+ countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
44
+ indexMiniGap += 1
45
+ while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
46
+ leaf1ndex -= 1
47
+ leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
48
+ leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
49
+ if leaf1ndex > 0:
50
+ gap1ndex -= 1
51
+ leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
52
+ leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
53
+ leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
54
+ leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
55
+ gapRangeStart[leaf1ndex] = gap1ndex
56
+ leaf1ndex += 1
57
+ return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal, taskDivisions, taskIndex)
58
+
59
+ def unRepackParallelMapFoldingState(state: ParallelMapFoldingState) -> ParallelMapFoldingState:
60
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
61
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
62
+ gap1ndex: DatatypeElephino = state.gap1ndex
63
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
64
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
65
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
66
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
67
+ leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
68
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
69
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
70
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
71
+ gapRangeStart: Array1DElephino = state.gapRangeStart
72
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
73
+ leafAbove: Array1DLeavesTotal = state.leafAbove
74
+ leafBelow: Array1DLeavesTotal = state.leafBelow
75
+ connectionGraph: Array3D = state.connectionGraph
76
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
77
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
78
+ taskDivisions: DatatypeLeavesTotal = state.taskDivisions
79
+ taskIndex: DatatypeLeavesTotal = state.taskIndex
80
+ groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal, taskDivisions, taskIndex = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal, taskDivisions, taskIndex)
81
+ state = ParallelMapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow, taskDivisions=taskDivisions, taskIndex=taskIndex)
82
+ return state
83
+
84
+ def doTheNeedful(state: ParallelMapFoldingState, concurrencyLimit: int) -> tuple[int, list[ParallelMapFoldingState]]:
85
+ stateParallel = deepcopy(state)
86
+ listStatesParallel: list[ParallelMapFoldingState] = [stateParallel] * stateParallel.taskDivisions
87
+ groupsOfFoldsTotal: int = 0
88
+ dictionaryConcurrency: dict[int, ConcurrentFuture[ParallelMapFoldingState]] = {}
89
+ with ProcessPoolExecutor(concurrencyLimit) as concurrencyManager:
90
+ for indexSherpa in range(stateParallel.taskDivisions):
91
+ state = deepcopy(stateParallel)
92
+ state.taskIndex = indexSherpa
93
+ dictionaryConcurrency[indexSherpa] = concurrencyManager.submit(unRepackParallelMapFoldingState, state)
94
+ for indexSherpa in range(stateParallel.taskDivisions):
95
+ listStatesParallel[indexSherpa] = dictionaryConcurrency[indexSherpa].result()
96
+ groupsOfFoldsTotal += listStatesParallel[indexSherpa].groupsOfFolds
97
+ foldsTotal: int = groupsOfFoldsTotal * stateParallel.leavesTotal
98
+ return (foldsTotal, listStatesParallel)
@@ -1,7 +1,7 @@
1
1
  from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, MapFoldingState
2
2
  from mapFolding.syntheticModules.theorem2Numba import count
3
3
 
4
- def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
4
+ def sequential(state: MapFoldingState) -> MapFoldingState:
5
5
  mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
6
6
  groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
7
7
  gap1ndex: DatatypeElephino = state.gap1ndex