mapFolding 0.9.4__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. mapFolding/__init__.py +41 -7
  2. mapFolding/basecamp.py +100 -9
  3. mapFolding/beDRY.py +7 -15
  4. mapFolding/dataBaskets.py +12 -0
  5. mapFolding/datatypes.py +4 -4
  6. mapFolding/oeis.py +2 -7
  7. mapFolding/someAssemblyRequired/RecipeJob.py +97 -3
  8. mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +143 -42
  9. mapFolding/someAssemblyRequired/__init__.py +38 -49
  10. mapFolding/someAssemblyRequired/_astTypes.py +117 -0
  11. mapFolding/someAssemblyRequired/_theTypes.py +12 -41
  12. mapFolding/someAssemblyRequired/_toolBe.py +524 -0
  13. mapFolding/someAssemblyRequired/_toolDOT.py +493 -0
  14. mapFolding/someAssemblyRequired/_toolGrab.py +653 -0
  15. mapFolding/someAssemblyRequired/_toolIfThis.py +193 -0
  16. mapFolding/someAssemblyRequired/_toolMake.py +339 -0
  17. mapFolding/someAssemblyRequired/_toolThen.py +63 -0
  18. mapFolding/someAssemblyRequired/_toolboxAST.py +3 -3
  19. mapFolding/someAssemblyRequired/_toolboxContainers.py +124 -29
  20. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +274 -0
  21. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +12 -11
  22. mapFolding/someAssemblyRequired/toolboxNumba.py +4 -28
  23. mapFolding/someAssemblyRequired/transformationTools.py +46 -155
  24. mapFolding/syntheticModules/daoOfMapFolding.py +74 -0
  25. mapFolding/syntheticModules/dataPacking.py +1 -1
  26. mapFolding/syntheticModules/theorem2Numba.py +2 -8
  27. mapFolding/syntheticModules/theorem2Trimmed.py +43 -0
  28. mapFolding/toolFactory/astFactory.py +493 -0
  29. mapFolding/toolFactory/astFactory_annex.py +63 -0
  30. mapFolding/toolFactory/astFactory_docstrings.py +63 -0
  31. {mapfolding-0.9.4.dist-info → mapfolding-0.10.0.dist-info}/METADATA +2 -1
  32. mapfolding-0.10.0.dist-info/RECORD +66 -0
  33. {mapfolding-0.9.4.dist-info → mapfolding-0.10.0.dist-info}/WHEEL +1 -1
  34. tests/test_computations.py +1 -1
  35. mapFolding/Z0Z_flowControl.py +0 -117
  36. mapFolding/someAssemblyRequired/_tool_Make.py +0 -134
  37. mapFolding/someAssemblyRequired/_tool_Then.py +0 -157
  38. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +0 -387
  39. mapfolding-0.9.4.dist-info/RECORD +0 -57
  40. {mapfolding-0.9.4.dist-info → mapfolding-0.10.0.dist-info}/entry_points.txt +0 -0
  41. {mapfolding-0.9.4.dist-info → mapfolding-0.10.0.dist-info}/licenses/LICENSE +0 -0
  42. {mapfolding-0.9.4.dist-info → mapfolding-0.10.0.dist-info}/top_level.txt +0 -0
@@ -25,11 +25,12 @@ from mapFolding.beDRY import outfitCountFolds
25
25
  from mapFolding.someAssemblyRequired import (
26
26
  ast_Identifier,
27
27
  astModuleToIngredientsFunction,
28
- be,
28
+ Be,
29
+ DeReConstructField2ast,
29
30
  DOT,
30
31
  extractClassDef,
31
- grab,
32
- ifThis,
32
+ Grab,
33
+ IfThis,
33
34
  importLogicalPath2Callable,
34
35
  IngredientsFunction,
35
36
  IngredientsModule,
@@ -68,7 +69,7 @@ def makeDictionaryFunctionDef(module: ast.Module) -> dict[ast_Identifier, ast.Fu
68
69
  A dictionary mapping function identifiers to their AST function definition nodes.
69
70
  """
70
71
  dictionaryIdentifier2FunctionDef: dict[ast_Identifier, ast.FunctionDef] = {}
71
- NodeTourist(be.FunctionDef, Then.updateKeyValueIn(DOT.name, Then.extractIt, dictionaryIdentifier2FunctionDef)).visit(module)
72
+ NodeTourist(Be.FunctionDef, Then.updateKeyValueIn(DOT.name, Then.extractIt, dictionaryIdentifier2FunctionDef)).visit(module)
72
73
  return dictionaryIdentifier2FunctionDef
73
74
 
74
75
  def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) -> ast.FunctionDef:
@@ -97,12 +98,12 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
97
98
  raise ValueError(f"FunctionDefToInline not found in dictionaryIdentifier2FunctionDef: {identifierToInline = }") from ERRORmessage
98
99
 
99
100
  listIdentifiersCalledFunctions: list[ast_Identifier] = []
100
- findIdentifiersToInline = NodeTourist(findThis = ifThis.isCallToName, doThat = grab.funcDOTidAttribute(Then.appendTo(listIdentifiersCalledFunctions)))
101
+ findIdentifiersToInline = NodeTourist(findThis = IfThis.isCallToName, doThat = Grab.funcDOTidAttribute(Then.appendTo(listIdentifiersCalledFunctions)))
101
102
  findIdentifiersToInline.visit(FunctionDefToInline)
102
103
 
103
104
  dictionary4Inlining: dict[ast_Identifier, ast.FunctionDef] = {}
104
105
  for identifier in sorted(set(listIdentifiersCalledFunctions).intersection(dictionaryFunctionDef.keys())):
105
- if NodeTourist(ifThis.matchesMeButNotAnyDescendant(ifThis.isCall_Identifier(identifier)), Then.extractIt).captureLastMatch(module) is not None:
106
+ if NodeTourist(IfThis.matchesMeButNotAnyDescendant(IfThis.isCall_Identifier(identifier)), Then.extractIt).captureLastMatch(module) is not None:
106
107
  dictionary4Inlining[identifier] = dictionaryFunctionDef[identifier]
107
108
 
108
109
  keepGoing = True
@@ -115,25 +116,28 @@ def inlineFunctionDef(identifierToInline: ast_Identifier, module: ast.Module) ->
115
116
  if len(listIdentifiersCalledFunctions) > 0:
116
117
  keepGoing = True
117
118
  for identifier in listIdentifiersCalledFunctions:
118
- if NodeTourist(ifThis.matchesMeButNotAnyDescendant(ifThis.isCall_Identifier(identifier)), Then.extractIt).captureLastMatch(module) is not None:
119
+ if NodeTourist(IfThis.matchesMeButNotAnyDescendant(IfThis.isCall_Identifier(identifier)), Then.extractIt).captureLastMatch(module) is not None:
119
120
  FunctionDefTarget = dictionaryFunctionDef[identifier]
120
121
  if len(FunctionDefTarget.body) == 1:
121
- replacement = NodeTourist(be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
122
- inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
122
+ replacement = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
123
+
124
+ findThis = IfThis.isCall_Identifier(identifier)
125
+ doThat = Then.replaceWith(replacement)
126
+ inliner = NodeChanger(findThis, doThat)
123
127
  for astFunctionDef in dictionary4Inlining.values():
124
128
  inliner.visit(astFunctionDef)
125
129
  else:
126
- inliner = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(identifier)),Then.replaceWith(FunctionDefTarget.body[0:-1]))
130
+ inliner = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(identifier)),Then.replaceWith(FunctionDefTarget.body[0:-1]))
127
131
  for astFunctionDef in dictionary4Inlining.values():
128
132
  inliner.visit(astFunctionDef)
129
133
 
130
134
  for identifier, FunctionDefTarget in dictionary4Inlining.items():
131
135
  if len(FunctionDefTarget.body) == 1:
132
- replacement = NodeTourist(be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
133
- inliner = NodeChanger(ifThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
136
+ replacement = NodeTourist(Be.Return, Then.extractIt(DOT.value)).captureLastMatch(FunctionDefTarget)
137
+ inliner = NodeChanger(IfThis.isCall_Identifier(identifier), Then.replaceWith(replacement))
134
138
  inliner.visit(FunctionDefToInline)
135
139
  else:
136
- inliner = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(identifier)),Then.replaceWith(FunctionDefTarget.body[0:-1]))
140
+ inliner = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(identifier)),Then.replaceWith(FunctionDefTarget.body[0:-1]))
137
141
  inliner.visit(FunctionDefToInline)
138
142
  ast.fix_missing_locations(FunctionDefToInline)
139
143
  return FunctionDefToInline
@@ -177,120 +181,6 @@ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: bool =
177
181
  pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
178
182
  return pathFilenameJob
179
183
 
180
- @dataclasses.dataclass
181
- class DeReConstructField2ast:
182
- """
183
- Transform a dataclass field into AST node representations for code generation.
184
-
185
- This class extracts and transforms a dataclass Field object into various AST node
186
- representations needed for code generation. It handles the conversion of field
187
- attributes, type annotations, and metadata into AST constructs that can be used
188
- to reconstruct the field in generated code.
189
-
190
- The class is particularly important for decomposing dataclass fields (like those in
191
- ComputationState) to enable their use in specialized contexts like Numba-optimized
192
- functions, where the full dataclass cannot be directly used but its contents need
193
- to be accessible.
194
-
195
- Each field is processed according to its type and metadata to create appropriate
196
- variable declarations, type annotations, and initialization code as AST nodes.
197
- """
198
- dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
199
- dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
200
- dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[ast_Identifier]
201
- field: dataclasses.InitVar[dataclasses.Field[Any]]
202
-
203
- ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
204
-
205
- name: ast_Identifier = dataclasses.field(init=False)
206
- typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
207
- default: Any | None = dataclasses.field(init=False)
208
- default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
209
- repr: bool = dataclasses.field(init=False)
210
- hash: bool | None = dataclasses.field(init=False)
211
- init: bool = dataclasses.field(init=False)
212
- compare: bool = dataclasses.field(init=False)
213
- metadata: dict[Any, Any] = dataclasses.field(init=False)
214
- kw_only: bool = dataclasses.field(init=False)
215
-
216
- astName: ast.Name = dataclasses.field(init=False)
217
- ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
218
- ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
219
- astAnnotation: ast.expr = dataclasses.field(init=False)
220
- ast_argAnnotated: ast.arg = dataclasses.field(init=False)
221
- astAnnAssignConstructor: ast.AnnAssign|ast.Assign = dataclasses.field(init=False)
222
- Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
223
-
224
- def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: ast_Identifier, field: dataclasses.Field[Any]) -> None:
225
- self.compare = field.compare
226
- self.default = field.default if field.default is not dataclasses.MISSING else None
227
- self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
228
- self.hash = field.hash
229
- self.init = field.init
230
- self.kw_only = field.kw_only if field.kw_only is not dataclasses.MISSING else False
231
- self.metadata = dict(field.metadata)
232
- self.name = field.name
233
- self.repr = field.repr
234
- self.typeBuffalo = field.type
235
-
236
- self.astName = Make.Name(self.name)
237
- self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
238
- self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
239
-
240
- sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), Then.extractIt(DOT.annotation)).captureLastMatch(dataclassClassDef)
241
- if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
242
- else: self.astAnnotation = sherpa
243
-
244
- self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
245
- """
246
- from ast import Module, Expr, Subscript, Name, Tuple, Load
247
- Subscript(
248
- value=Name(id='ndarray', ctx=Load()),
249
- slice=Tuple(
250
- elts=[
251
- Subscript(
252
- value=Name(id='tuple', ctx=Load()),
253
- slice=Name(id='int', ctx=Load()),
254
- ctx=Load()),
255
- Subscript(
256
- value=Name(id='dtype', ctx=Load()),
257
- slice=Name(id='NumPyLeavesTotal', ctx=Load()),
258
- ctx=Load())],
259
- ctx=Load()),
260
- ctx=Load()
261
- )
262
-
263
- """
264
- dtype = self.metadata.get('dtype', None)
265
- if dtype:
266
- moduleWithLogicalPath: str_nameDOTname = 'numpy'
267
- annotationType = 'ndarray'
268
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, annotationType)
269
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, 'dtype')
270
- axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Name('uint8'))
271
- dtype_asnameName: ast.Name = self.astAnnotation # type: ignore
272
- if dtype_asnameName.id == 'Array3D':
273
- axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8'), Make.Name('uint8')]))
274
- ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
275
- constructor = 'array'
276
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, constructor)
277
- dtypeIdentifier: ast_Identifier = dtype.__name__
278
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, dtypeIdentifier, dtype_asnameName.id)
279
- self.astAnnAssignConstructor = Make.AnnAssign(self.astName, ast_expr, Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', dtype_asnameName)]))
280
- self.astAnnAssignConstructor = Make.Assign([self.astName], Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', dtype_asnameName)]))
281
- self.Z0Z_hack = (self.astAnnAssignConstructor, 'array')
282
- elif isinstance(self.astAnnotation, ast.Name):
283
- self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
284
- self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
285
- elif isinstance(self.astAnnotation, ast.Subscript):
286
- elementConstructor: ast_Identifier = self.metadata['elementConstructor']
287
- self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
288
- takeTheTuple: ast.Tuple = deepcopy(self.astAnnotation.slice) # type: ignore
289
- self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
290
- self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
291
- if isinstance(self.astAnnotation, ast.Name):
292
- self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
293
-
294
184
  def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
295
185
  """
296
186
  Decompose a dataclass definition into AST components for manipulation and code generation.
@@ -350,7 +240,7 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
350
240
  map_stateDOTfield2Name={dictionaryDeReConstruction[field].ast_nameDOTname: dictionaryDeReConstruction[field].astName for field in Official_fieldOrder},
351
241
  )
352
242
  shatteredDataclass.fragments4AssignmentOrParameters = Make.Tuple(shatteredDataclass.listName4Parameters, ast.Store())
353
- shatteredDataclass.repack = Make.Assign(listTargets=[Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_astKeywords=shatteredDataclass.list_keyword_field__field4init))
243
+ shatteredDataclass.repack = Make.Assign([Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_keyword=shatteredDataclass.list_keyword_field__field4init))
354
244
  shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
355
245
 
356
246
  shatteredDataclass.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
@@ -422,18 +312,18 @@ def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> Ingredie
422
312
  list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
423
313
  listName: list[ast.Name] = []
424
314
  fauxFunctionDef = deepcopy(ingredientsFunction.astFunctionDef)
425
- NodeChanger(be.Return, Then.removeIt).visit(fauxFunctionDef)
426
- NodeTourist(be.Name, Then.appendTo(listName)).visit(fauxFunctionDef)
315
+ NodeChanger(Be.Return, Then.removeIt).visit(fauxFunctionDef)
316
+ NodeTourist(Be.Name, Then.appendTo(listName)).visit(fauxFunctionDef)
427
317
  list_Identifiers: list[ast_Identifier] = [astName.id for astName in listName]
428
318
  list_IdentifiersNotUsed: list[ast_Identifier] = list(set(list_arg_arg) - set(list_Identifiers))
429
319
  for arg_Identifier in list_IdentifiersNotUsed:
430
- remove_arg = NodeChanger(ifThis.is_arg_Identifier(arg_Identifier), Then.removeIt)
320
+ remove_arg = NodeChanger(IfThis.is_arg_Identifier(arg_Identifier), Then.removeIt)
431
321
  remove_arg.visit(ingredientsFunction.astFunctionDef)
432
322
 
433
323
  list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
434
324
 
435
325
  listName: list[ast.Name] = [Make.Name(ast_arg.arg) for ast_arg in list_argCuzMyBrainRefusesToThink]
436
- replaceReturn = NodeChanger(be.Return, Then.replaceWith(Make.Return(Make.Tuple(listName))))
326
+ replaceReturn = NodeChanger(Be.Return, Then.replaceWith(Make.Return(Make.Tuple(listName))))
437
327
  replaceReturn.visit(ingredientsFunction.astFunctionDef)
438
328
 
439
329
  list_annotation: list[ast.expr] = [ast_arg.annotation for ast_arg in list_argCuzMyBrainRefusesToThink if ast_arg.annotation is not None]
@@ -469,7 +359,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
469
359
  (recipeFlow.sourceCallableSequential, recipeFlow.callableSequential),]
470
360
  for ingredients in listAllIngredientsFunctions:
471
361
  for source_Identifier, recipe_Identifier in listFindReplace:
472
- updateCallName = NodeChanger(ifThis.isCall_Identifier(source_Identifier), grab.funcAttribute(Then.replaceWith(Make.Name(recipe_Identifier))))
362
+ updateCallName = NodeChanger(IfThis.isCall_Identifier(source_Identifier), Grab.funcAttribute(Then.replaceWith(Make.Name(recipe_Identifier))))
473
363
  updateCallName.visit(ingredients.astFunctionDef)
474
364
 
475
365
  ingredientsDispatcher.astFunctionDef.name = recipeFlow.callableDispatcher
@@ -483,13 +373,13 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
483
373
  (recipeFlow.sourceConcurrencyManagerNamespace, recipeFlow.concurrencyManagerNamespace),]
484
374
  for ingredients in listAllIngredientsFunctions:
485
375
  for source_Identifier, recipe_Identifier in listFindReplace:
486
- updateName = NodeChanger(ifThis.isName_Identifier(source_Identifier) , grab.idAttribute(Then.replaceWith(recipe_Identifier)))
487
- update_arg = NodeChanger(ifThis.isArgument_Identifier(source_Identifier), grab.argAttribute(Then.replaceWith(recipe_Identifier))) # type: ignore
376
+ updateName = NodeChanger(IfThis.isName_Identifier(source_Identifier) , Grab.idAttribute(Then.replaceWith(recipe_Identifier)))
377
+ update_arg = NodeChanger(IfThis.isArgument_Identifier(source_Identifier), Grab.argAttribute(Then.replaceWith(recipe_Identifier))) # type: ignore
488
378
  updateName.visit(ingredients.astFunctionDef)
489
379
  update_arg.visit(ingredients.astFunctionDef)
490
380
 
491
- updateConcurrencyManager = NodeChanger(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.sourceConcurrencyManagerNamespace, recipeFlow.sourceConcurrencyManagerIdentifier)
492
- , grab.funcAttribute(Then.replaceWith(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier))))
381
+ updateConcurrencyManager = NodeChanger(IfThis.isCallAttributeNamespace_Identifier(recipeFlow.sourceConcurrencyManagerNamespace, recipeFlow.sourceConcurrencyManagerIdentifier)
382
+ , Grab.funcAttribute(Then.replaceWith(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier))))
493
383
  updateConcurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
494
384
 
495
385
  # shatter Dataclass =======================================================
@@ -511,7 +401,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
511
401
 
512
402
  # parallelCallable =========================================================
513
403
  if recipeFlow.removeDataclassParallel:
514
- ingredientsParallel.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
404
+ ingredientsParallel.astFunctionDef.args = Make.arguments(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
515
405
 
516
406
  ingredientsParallel.astFunctionDef = Z0Z_lameFindReplace(ingredientsParallel.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
517
407
 
@@ -522,18 +412,25 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
522
412
 
523
413
  listParameters = [parameter for parameter in shatteredDataclass.listName4Parameters if parameter.id in list_arg_arg]
524
414
 
525
- replaceCall2concurrencyManager = NodeChanger(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), listArguments=[Make.Name(recipeFlow.callableParallel)] + listParameters)))
415
+ replaceCall2concurrencyManager = NodeChanger(IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), [Make.Name(recipeFlow.callableParallel)] + listParameters)))
416
+
417
+ def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
418
+ # TODO I cannot remember why I made this function. It doesn't fit with how I normally do things.
419
+ def workhorse(node: ast.AST) -> ast.AST:
420
+ NodeTourist(Be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
421
+ return node
422
+ return workhorse
526
423
 
527
424
  # NOTE I am dissatisfied with this logic for many reasons, including that it requires separate NodeCollector and NodeReplacer instances.
528
425
  astCallConcurrencyResult: list[ast.Call] = []
529
- get_astCallConcurrencyResult = NodeTourist(ifThis.isAssignAndTargets0Is(ifThis.isSubscript_Identifier(getTheOtherRecord_damn)), getIt(astCallConcurrencyResult))
426
+ get_astCallConcurrencyResult = NodeTourist(IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier(getTheOtherRecord_damn)), getIt(astCallConcurrencyResult))
530
427
  get_astCallConcurrencyResult.visit(ingredientsDispatcher.astFunctionDef)
531
- replaceAssignParallelCallable = NodeChanger(ifThis.isAssignAndTargets0Is(ifThis.isSubscript_Identifier(getTheOtherRecord_damn)), grab.valueAttribute(Then.replaceWith(astCallConcurrencyResult[0])))
428
+ replaceAssignParallelCallable = NodeChanger(IfThis.isAssignAndTargets0Is(IfThis.isSubscript_Identifier(getTheOtherRecord_damn)), Grab.valueAttribute(Then.replaceWith(astCallConcurrencyResult[0])))
532
429
  replaceAssignParallelCallable.visit(ingredientsDispatcher.astFunctionDef)
533
- changeReturnParallelCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(shatteredDataclass.countingVariableName)))
430
+ changeReturnParallelCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(shatteredDataclass.countingVariableName)))
534
431
  ingredientsParallel.astFunctionDef.returns = shatteredDataclass.countingVariableAnnotation
535
432
 
536
- unpack4parallelCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
433
+ unpack4parallelCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
537
434
 
538
435
  unpack4parallelCallable.visit(ingredientsDispatcher.astFunctionDef)
539
436
  replaceCall2concurrencyManager.visit(ingredientsDispatcher.astFunctionDef)
@@ -546,29 +443,23 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
546
443
  return ingredientsModuleNumbaUnified
547
444
 
548
445
  def removeDataclassFromFunction(ingredientsTarget: IngredientsFunction, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
549
- ingredientsTarget.astFunctionDef.args = Make.argumentsSpecification(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
446
+ ingredientsTarget.astFunctionDef.args = Make.arguments(args=shatteredDataclass.list_argAnnotated4ArgumentsSpecification)
550
447
  ingredientsTarget.astFunctionDef.returns = shatteredDataclass.signatureReturnAnnotation
551
- changeReturnCallable = NodeChanger(be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
448
+ changeReturnCallable = NodeChanger(Be.Return, Then.replaceWith(Make.Return(shatteredDataclass.fragments4AssignmentOrParameters)))
552
449
  changeReturnCallable.visit(ingredientsTarget.astFunctionDef)
553
450
  ingredientsTarget.astFunctionDef = Z0Z_lameFindReplace(ingredientsTarget.astFunctionDef, shatteredDataclass.map_stateDOTfield2Name)
554
451
  return ingredientsTarget
555
452
 
556
453
  def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFunction, targetCallableIdentifier: ast_Identifier, shatteredDataclass: ShatteredDataclass) -> IngredientsFunction:
557
454
  astCallTargetCallable = Make.Call(Make.Name(targetCallableIdentifier), shatteredDataclass.listName4Parameters)
558
- replaceAssignTargetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign(listTargets=[shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
559
- unpack4targetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
560
- repack4targetCallable = NodeChanger(ifThis.isAssignAndValueIs(ifThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
455
+ replaceAssignTargetCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier)), Then.replaceWith(Make.Assign([shatteredDataclass.fragments4AssignmentOrParameters], value=astCallTargetCallable)))
456
+ unpack4targetCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisAbove(shatteredDataclass.listUnpack))
457
+ repack4targetCallable = NodeChanger(IfThis.isAssignAndValueIs(IfThis.isCall_Identifier(targetCallableIdentifier)), Then.insertThisBelow([shatteredDataclass.repack]))
561
458
  replaceAssignTargetCallable.visit(ingredientsCaller.astFunctionDef)
562
459
  unpack4targetCallable.visit(ingredientsCaller.astFunctionDef)
563
460
  repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
564
461
  return ingredientsCaller
565
462
 
566
- def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
567
- def workhorse(node: ast.AST) -> ast.AST:
568
- NodeTourist(be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
569
- return node
570
- return workhorse
571
-
572
463
  dictionaryEstimates: dict[tuple[int, ...], int] = {
573
464
  (2,2,2,2,2,2,2,2): 798148657152000,
574
465
  (2,21): 776374224866624,
@@ -603,7 +494,7 @@ def Z0Z_lameFindReplace(astTree: 个, mappingFindReplaceNodes: Mapping[ast.AST,
603
494
 
604
495
  while keepGoing:
605
496
  for nodeFind, nodeReplace in mappingFindReplaceNodes.items():
606
- NodeChanger(ifThis.Z0Z_unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
497
+ NodeChanger(IfThis.Z0Z_unparseIs(nodeFind), Then.replaceWith(nodeReplace)).visit(newTree)
607
498
 
608
499
  if ast.unparse(newTree) == ast.unparse(astTree):
609
500
  keepGoing = False
@@ -0,0 +1,74 @@
1
+ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, MapFoldingState
2
+ from numba import jit
3
+
4
+ @jit(cache=True, error_model='numpy', fastmath=True, forceinline=True)
5
+ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal]:
6
+ while leaf1ndex > 0:
7
+ if leaf1ndex <= 1 or leafBelow[0] == 1:
8
+ if leaf1ndex > leavesTotal:
9
+ groupsOfFolds += 1
10
+ else:
11
+ dimensionsUnconstrained = dimensionsTotal
12
+ gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
13
+ indexDimension = 0
14
+ while indexDimension < dimensionsTotal:
15
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
16
+ if leafConnectee == leaf1ndex:
17
+ dimensionsUnconstrained -= 1
18
+ else:
19
+ while leafConnectee != leaf1ndex:
20
+ gapsWhere[gap1ndexCeiling] = leafConnectee
21
+ if countDimensionsGapped[leafConnectee] == 0:
22
+ gap1ndexCeiling += 1
23
+ countDimensionsGapped[leafConnectee] += 1
24
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
25
+ indexDimension += 1
26
+ if not dimensionsUnconstrained:
27
+ indexLeaf = 0
28
+ while indexLeaf < leaf1ndex:
29
+ gapsWhere[gap1ndexCeiling] = indexLeaf
30
+ gap1ndexCeiling += 1
31
+ indexLeaf += 1
32
+ indexMiniGap = gap1ndex
33
+ while indexMiniGap < gap1ndexCeiling:
34
+ gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
35
+ if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
36
+ gap1ndex += 1
37
+ countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
38
+ indexMiniGap += 1
39
+ while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
40
+ leaf1ndex -= 1
41
+ leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
42
+ leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
43
+ if leaf1ndex > 0:
44
+ gap1ndex -= 1
45
+ leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
46
+ leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
47
+ leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
48
+ leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
49
+ gapRangeStart[leaf1ndex] = gap1ndex
50
+ leaf1ndex += 1
51
+ return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
52
+
53
+ def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
54
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
55
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
56
+ gap1ndex: DatatypeElephino = state.gap1ndex
57
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
58
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
59
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
60
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
61
+ leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
62
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
63
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
64
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
65
+ gapRangeStart: Array1DElephino = state.gapRangeStart
66
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
67
+ leafAbove: Array1DLeavesTotal = state.leafAbove
68
+ leafBelow: Array1DLeavesTotal = state.leafBelow
69
+ connectionGraph: Array3D = state.connectionGraph
70
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
71
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
72
+ groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
73
+ state = MapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow)
74
+ return state
@@ -20,6 +20,6 @@ def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
20
20
  connectionGraph: Array3D = state.connectionGraph
21
21
  dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
22
22
  leavesTotal: DatatypeLeavesTotal = state.leavesTotal
23
- groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
23
+ groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
24
24
  state = MapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow)
25
25
  return state
@@ -2,7 +2,7 @@ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D,
2
2
  from numba import jit
3
3
 
4
4
  @jit(cache=True, error_model='numpy', fastmath=True, forceinline=True)
5
- def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal]:
5
+ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal]:
6
6
  while leaf1ndex > 4:
7
7
  if leafBelow[0] == 1:
8
8
  if leaf1ndex > leavesTotal:
@@ -23,12 +23,6 @@ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1nde
23
23
  countDimensionsGapped[leafConnectee] += 1
24
24
  leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
25
25
  indexDimension += 1
26
- if not dimensionsUnconstrained:
27
- indexLeaf = 0
28
- while indexLeaf < leaf1ndex:
29
- gapsWhere[gap1ndexCeiling] = indexLeaf
30
- gap1ndexCeiling += 1
31
- indexLeaf += 1
32
26
  indexMiniGap = gap1ndex
33
27
  while indexMiniGap < gap1ndexCeiling:
34
28
  gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
@@ -48,4 +42,4 @@ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1nde
48
42
  gapRangeStart[leaf1ndex] = gap1ndex
49
43
  leaf1ndex += 1
50
44
  groupsOfFolds *= 2
51
- return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
45
+ return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
@@ -0,0 +1,43 @@
1
+ from mapFolding.dataBaskets import MapFoldingState
2
+
3
+ def count(state: MapFoldingState) -> MapFoldingState:
4
+ while state.leaf1ndex > 4:
5
+ if state.leafBelow[0] == 1:
6
+ if state.leaf1ndex > state.leavesTotal:
7
+ state.groupsOfFolds += 1
8
+ else:
9
+ state.dimensionsUnconstrained = state.dimensionsTotal
10
+ state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
11
+ state.indexDimension = 0
12
+ while state.indexDimension < state.dimensionsTotal:
13
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
14
+ if state.leafConnectee == state.leaf1ndex:
15
+ state.dimensionsUnconstrained -= 1
16
+ else:
17
+ while state.leafConnectee != state.leaf1ndex:
18
+ state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
19
+ if state.countDimensionsGapped[state.leafConnectee] == 0:
20
+ state.gap1ndexCeiling += 1
21
+ state.countDimensionsGapped[state.leafConnectee] += 1
22
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
23
+ state.indexDimension += 1
24
+ state.indexMiniGap = state.gap1ndex
25
+ while state.indexMiniGap < state.gap1ndexCeiling:
26
+ state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
27
+ if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
28
+ state.gap1ndex += 1
29
+ state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
30
+ state.indexMiniGap += 1
31
+ while state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]:
32
+ state.leaf1ndex -= 1
33
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
34
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
35
+ state.gap1ndex -= 1
36
+ state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
37
+ state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
38
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
39
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
40
+ state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
41
+ state.leaf1ndex += 1
42
+ state.groupsOfFolds *= 2
43
+ return state