mapFolding 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. mapFolding/__init__.py +41 -7
  2. mapFolding/basecamp.py +100 -9
  3. mapFolding/beDRY.py +7 -15
  4. mapFolding/dataBaskets.py +12 -0
  5. mapFolding/datatypes.py +4 -4
  6. mapFolding/oeis.py +2 -7
  7. mapFolding/someAssemblyRequired/RecipeJob.py +97 -3
  8. mapFolding/someAssemblyRequired/Z0Z_makeSomeModules.py +326 -0
  9. mapFolding/someAssemblyRequired/__init__.py +37 -29
  10. mapFolding/someAssemblyRequired/_theTypes.py +19 -19
  11. mapFolding/someAssemblyRequired/_tool_Make.py +12 -6
  12. mapFolding/someAssemblyRequired/_tool_Then.py +59 -21
  13. mapFolding/someAssemblyRequired/_toolboxAST.py +57 -0
  14. mapFolding/someAssemblyRequired/_toolboxAntecedents.py +123 -40
  15. mapFolding/someAssemblyRequired/_toolboxContainers.py +128 -37
  16. mapFolding/someAssemblyRequired/_toolboxPython.py +52 -50
  17. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +274 -0
  18. mapFolding/someAssemblyRequired/synthesizeNumbaJob.py +6 -4
  19. mapFolding/someAssemblyRequired/toolboxNumba.py +3 -27
  20. mapFolding/someAssemblyRequired/transformationTools.py +47 -177
  21. mapFolding/syntheticModules/daoOfMapFolding.py +74 -0
  22. mapFolding/syntheticModules/dataPacking.py +25 -0
  23. mapFolding/syntheticModules/initializeCount.py +49 -0
  24. mapFolding/syntheticModules/theorem2.py +49 -0
  25. mapFolding/syntheticModules/theorem2Numba.py +45 -0
  26. mapFolding/syntheticModules/theorem2Trimmed.py +43 -0
  27. {mapfolding-0.9.3.dist-info → mapfolding-0.9.5.dist-info}/METADATA +2 -1
  28. mapfolding-0.9.5.dist-info/RECORD +59 -0
  29. {mapfolding-0.9.3.dist-info → mapfolding-0.9.5.dist-info}/WHEEL +1 -1
  30. tests/test_computations.py +4 -2
  31. mapFolding/Z0Z_flowControl.py +0 -99
  32. mapfolding-0.9.3.dist-info/RECORD +0 -51
  33. /mapFolding/{theDaoOfMapFolding.py → daoOfMapFolding.py} +0 -0
  34. {mapfolding-0.9.3.dist-info → mapfolding-0.9.5.dist-info}/entry_points.txt +0 -0
  35. {mapfolding-0.9.3.dist-info → mapfolding-0.9.5.dist-info}/licenses/LICENSE +0 -0
  36. {mapfolding-0.9.3.dist-info → mapfolding-0.9.5.dist-info}/top_level.txt +0 -0
@@ -22,11 +22,13 @@ from autoflake import fix_code as autoflake_fix_code
22
22
  from collections.abc import Callable, Mapping
23
23
  from copy import deepcopy
24
24
  from mapFolding.beDRY import outfitCountFolds
25
- from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, writeStringToHere
26
25
  from mapFolding.someAssemblyRequired import (
27
26
  ast_Identifier,
27
+ astModuleToIngredientsFunction,
28
28
  be,
29
+ DeReConstructField2ast,
29
30
  DOT,
31
+ extractClassDef,
30
32
  grab,
31
33
  ifThis,
32
34
  importLogicalPath2Callable,
@@ -44,6 +46,7 @@ from mapFolding.someAssemblyRequired import (
44
46
  个,
45
47
  )
46
48
  from mapFolding.theSSOT import ComputationState, raiseIfNoneGitHubIssueNumber3, The
49
+ from mapFolding.toolboxFilesystem import getPathFilenameFoldsTotal, writeStringToHere
47
50
  from os import PathLike
48
51
  from pathlib import Path, PurePath
49
52
  from typing import Any, Literal, overload
@@ -52,59 +55,6 @@ import dataclasses
52
55
  import pickle
53
56
  import python_minifier
54
57
 
55
- def astModuleToIngredientsFunction(astModule: ast.AST, identifierFunctionDef: ast_Identifier) -> IngredientsFunction:
56
- """
57
- Extract a function definition from an AST module and create an IngredientsFunction.
58
-
59
- This function finds a function definition with the specified identifier in the given
60
- AST module and wraps it in an IngredientsFunction object along with its import context.
61
-
62
- Parameters:
63
- astModule: The AST module containing the function definition.
64
- identifierFunctionDef: The name of the function to extract.
65
-
66
- Returns:
67
- An IngredientsFunction object containing the function definition and its imports.
68
-
69
- Raises:
70
- raiseIfNoneGitHubIssueNumber3: If the function definition is not found.
71
- """
72
- astFunctionDef = extractFunctionDef(astModule, identifierFunctionDef)
73
- if not astFunctionDef: raise raiseIfNoneGitHubIssueNumber3
74
- return IngredientsFunction(astFunctionDef, LedgerOfImports(astModule))
75
-
76
- def extractClassDef(module: ast.AST, identifier: ast_Identifier) -> ast.ClassDef | None:
77
- """
78
- Extract a class definition with a specific name from an AST module.
79
-
80
- This function searches through an AST module for a class definition that
81
- matches the provided identifier and returns it if found.
82
-
83
- Parameters:
84
- module: The AST module to search within.
85
- identifier: The name of the class to find.
86
-
87
- Returns:
88
- The matching class definition AST node, or None if not found.
89
- """
90
- return NodeTourist(ifThis.isClassDef_Identifier(identifier), Then.extractIt).captureLastMatch(module)
91
-
92
- def extractFunctionDef(module: ast.AST, identifier: ast_Identifier) -> ast.FunctionDef | None:
93
- """
94
- Extract a function definition with a specific name from an AST module.
95
-
96
- This function searches through an AST module for a function definition that
97
- matches the provided identifier and returns it if found.
98
-
99
- Parameters:
100
- module: The AST module to search within.
101
- identifier: The name of the function to find.
102
-
103
- Returns:
104
- astFunctionDef: The matching function definition AST node, or None if not found.
105
- """
106
- return NodeTourist(ifThis.isFunctionDef_Identifier(identifier), Then.extractIt).captureLastMatch(module)
107
-
108
58
  def makeDictionaryFunctionDef(module: ast.Module) -> dict[ast_Identifier, ast.FunctionDef]:
109
59
  """
110
60
  Create a dictionary mapping function names to their AST definitions.
@@ -228,120 +178,6 @@ def makeInitializedComputationState(mapShape: tuple[int, ...], writeJob: bool =
228
178
  pathFilenameJob.write_bytes(pickle.dumps(stateUniversal))
229
179
  return pathFilenameJob
230
180
 
231
- @dataclasses.dataclass
232
- class DeReConstructField2ast:
233
- """
234
- Transform a dataclass field into AST node representations for code generation.
235
-
236
- This class extracts and transforms a dataclass Field object into various AST node
237
- representations needed for code generation. It handles the conversion of field
238
- attributes, type annotations, and metadata into AST constructs that can be used
239
- to reconstruct the field in generated code.
240
-
241
- The class is particularly important for decomposing dataclass fields (like those in
242
- ComputationState) to enable their use in specialized contexts like Numba-optimized
243
- functions, where the full dataclass cannot be directly used but its contents need
244
- to be accessible.
245
-
246
- Each field is processed according to its type and metadata to create appropriate
247
- variable declarations, type annotations, and initialization code as AST nodes.
248
- """
249
- dataclassesDOTdataclassLogicalPathModule: dataclasses.InitVar[str_nameDOTname]
250
- dataclassClassDef: dataclasses.InitVar[ast.ClassDef]
251
- dataclassesDOTdataclassInstance_Identifier: dataclasses.InitVar[ast_Identifier]
252
- field: dataclasses.InitVar[dataclasses.Field[Any]]
253
-
254
- ledger: LedgerOfImports = dataclasses.field(default_factory=LedgerOfImports)
255
-
256
- name: ast_Identifier = dataclasses.field(init=False)
257
- typeBuffalo: type[Any] | str | Any = dataclasses.field(init=False)
258
- default: Any | None = dataclasses.field(init=False)
259
- default_factory: Callable[..., Any] | None = dataclasses.field(init=False)
260
- repr: bool = dataclasses.field(init=False)
261
- hash: bool | None = dataclasses.field(init=False)
262
- init: bool = dataclasses.field(init=False)
263
- compare: bool = dataclasses.field(init=False)
264
- metadata: dict[Any, Any] = dataclasses.field(init=False)
265
- kw_only: bool = dataclasses.field(init=False)
266
-
267
- astName: ast.Name = dataclasses.field(init=False)
268
- ast_keyword_field__field: ast.keyword = dataclasses.field(init=False)
269
- ast_nameDOTname: ast.Attribute = dataclasses.field(init=False)
270
- astAnnotation: ast.expr = dataclasses.field(init=False)
271
- ast_argAnnotated: ast.arg = dataclasses.field(init=False)
272
- astAnnAssignConstructor: ast.AnnAssign|ast.Assign = dataclasses.field(init=False)
273
- Z0Z_hack: tuple[ast.AnnAssign|ast.Assign, str] = dataclasses.field(init=False)
274
-
275
- def __post_init__(self, dataclassesDOTdataclassLogicalPathModule: str_nameDOTname, dataclassClassDef: ast.ClassDef, dataclassesDOTdataclassInstance_Identifier: ast_Identifier, field: dataclasses.Field[Any]) -> None:
276
- self.compare = field.compare
277
- self.default = field.default if field.default is not dataclasses.MISSING else None
278
- self.default_factory = field.default_factory if field.default_factory is not dataclasses.MISSING else None
279
- self.hash = field.hash
280
- self.init = field.init
281
- self.kw_only = field.kw_only if field.kw_only is not dataclasses.MISSING else False
282
- self.metadata = dict(field.metadata)
283
- self.name = field.name
284
- self.repr = field.repr
285
- self.typeBuffalo = field.type
286
-
287
- self.astName = Make.Name(self.name)
288
- self.ast_keyword_field__field = Make.keyword(self.name, self.astName)
289
- self.ast_nameDOTname = Make.Attribute(Make.Name(dataclassesDOTdataclassInstance_Identifier), self.name)
290
-
291
- sherpa = NodeTourist(ifThis.isAnnAssign_targetIs(ifThis.isName_Identifier(self.name)), Then.extractIt(DOT.annotation)).captureLastMatch(dataclassClassDef)
292
- if sherpa is None: raise raiseIfNoneGitHubIssueNumber3
293
- else: self.astAnnotation = sherpa
294
-
295
- self.ast_argAnnotated = Make.arg(self.name, self.astAnnotation)
296
- """
297
- from ast import Module, Expr, Subscript, Name, Tuple, Load
298
- Subscript(
299
- value=Name(id='ndarray', ctx=Load()),
300
- slice=Tuple(
301
- elts=[
302
- Subscript(
303
- value=Name(id='tuple', ctx=Load()),
304
- slice=Name(id='int', ctx=Load()),
305
- ctx=Load()),
306
- Subscript(
307
- value=Name(id='dtype', ctx=Load()),
308
- slice=Name(id='NumPyLeavesTotal', ctx=Load()),
309
- ctx=Load())],
310
- ctx=Load()),
311
- ctx=Load()
312
- )
313
-
314
- """
315
- dtype = self.metadata.get('dtype', None)
316
- if dtype:
317
- moduleWithLogicalPath: str_nameDOTname = 'numpy'
318
- annotationType = 'ndarray'
319
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, annotationType)
320
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, 'dtype')
321
- axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Name('uint8'))
322
- dtype_asnameName: ast.Name = self.astAnnotation # type: ignore
323
- if dtype_asnameName.id == 'Array3D':
324
- axesSubscript = Make.Subscript(Make.Name('tuple'), Make.Tuple([Make.Name('uint8'), Make.Name('uint8'), Make.Name('uint8')]))
325
- ast_expr = Make.Subscript(Make.Name(annotationType), Make.Tuple([axesSubscript, Make.Subscript(Make.Name('dtype'), dtype_asnameName)]))
326
- constructor = 'array'
327
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, constructor)
328
- dtypeIdentifier: ast_Identifier = dtype.__name__
329
- self.ledger.addImportFrom_asStr(moduleWithLogicalPath, dtypeIdentifier, dtype_asnameName.id)
330
- self.astAnnAssignConstructor = Make.AnnAssign(self.astName, ast_expr, Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', dtype_asnameName)]))
331
- self.astAnnAssignConstructor = Make.Assign([self.astName], Make.Call(Make.Name(constructor), list_astKeywords=[Make.keyword('dtype', dtype_asnameName)]))
332
- self.Z0Z_hack = (self.astAnnAssignConstructor, 'array')
333
- elif isinstance(self.astAnnotation, ast.Name):
334
- self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, Make.Call(self.astAnnotation, [Make.Constant(-1)]))
335
- self.Z0Z_hack = (self.astAnnAssignConstructor, 'scalar')
336
- elif isinstance(self.astAnnotation, ast.Subscript):
337
- elementConstructor: ast_Identifier = self.metadata['elementConstructor']
338
- self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, elementConstructor)
339
- takeTheTuple: ast.Tuple = deepcopy(self.astAnnotation.slice) # type: ignore
340
- self.astAnnAssignConstructor = Make.AnnAssign(self.astName, self.astAnnotation, takeTheTuple)
341
- self.Z0Z_hack = (self.astAnnAssignConstructor, elementConstructor)
342
- if isinstance(self.astAnnotation, ast.Name):
343
- self.ledger.addImportFrom_asStr(dataclassesDOTdataclassLogicalPathModule, self.astAnnotation.id) # pyright: ignore [reportUnknownArgumentType, reportUnknownMemberType, reportIJustCalledATypeGuardMethod_WTF]
344
-
345
181
  def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclass_Identifier: ast_Identifier, instance_Identifier: ast_Identifier) -> ShatteredDataclass:
346
182
  """
347
183
  Decompose a dataclass definition into AST components for manipulation and code generation.
@@ -404,8 +240,8 @@ def shatter_dataclassesDOTdataclass(logicalPathModule: str_nameDOTname, dataclas
404
240
  shatteredDataclass.repack = Make.Assign(listTargets=[Make.Name(instance_Identifier)], value=Make.Call(Make.Name(dataclass_Identifier), list_astKeywords=shatteredDataclass.list_keyword_field__field4init))
405
241
  shatteredDataclass.signatureReturnAnnotation = Make.Subscript(Make.Name('tuple'), Make.Tuple(shatteredDataclass.listAnnotations))
406
242
 
407
- shatteredDataclass.ledger.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
408
- shatteredDataclass.ledger.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
243
+ shatteredDataclass.imports.update(*(dictionaryDeReConstruction[field].ledger for field in Official_fieldOrder))
244
+ shatteredDataclass.imports.addImportFrom_asStr(logicalPathModule, dataclass_Identifier)
409
245
 
410
246
  return shatteredDataclass
411
247
 
@@ -448,6 +284,27 @@ def write_astModule(ingredients: IngredientsModule, pathFilename: PathLike[Any]
448
284
 
449
285
  # END of acceptable classes and functions ======================================================
450
286
  def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> IngredientsFunction:
287
+ """
288
+ Removes unused parameters from a function's AST definition, return statement, and annotation.
289
+
290
+ This function analyzes the Abstract Syntax Tree (AST) of a given function and removes
291
+ any parameters that are not referenced within the function body. It updates the
292
+ function signature, the return statement (if it's a tuple containing unused variables),
293
+ and the return type annotation accordingly.
294
+
295
+ Parameters
296
+ ----------
297
+ ingredientsFunction : IngredientsFunction
298
+ An object containing the AST representation of a function to be processed.
299
+
300
+ Returns
301
+ -------
302
+ IngredientsFunction
303
+ The modified IngredientsFunction object with unused parameters and corresponding
304
+ return elements/annotations removed from its AST.
305
+
306
+ The modification is done in-place on the original AST nodes within the IngredientsFunction object.
307
+ """
451
308
  list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
452
309
  list_arg_arg: list[ast_Identifier] = [ast_arg.arg for ast_arg in list_argCuzMyBrainRefusesToThink]
453
310
  listName: list[ast.Name] = []
@@ -459,6 +316,18 @@ def removeUnusedParameters(ingredientsFunction: IngredientsFunction) -> Ingredie
459
316
  for arg_Identifier in list_IdentifiersNotUsed:
460
317
  remove_arg = NodeChanger(ifThis.is_arg_Identifier(arg_Identifier), Then.removeIt)
461
318
  remove_arg.visit(ingredientsFunction.astFunctionDef)
319
+
320
+ list_argCuzMyBrainRefusesToThink = ingredientsFunction.astFunctionDef.args.args + ingredientsFunction.astFunctionDef.args.posonlyargs + ingredientsFunction.astFunctionDef.args.kwonlyargs
321
+
322
+ listName: list[ast.Name] = [Make.Name(ast_arg.arg) for ast_arg in list_argCuzMyBrainRefusesToThink]
323
+ replaceReturn = NodeChanger(be.Return, Then.replaceWith(Make.Return(Make.Tuple(listName))))
324
+ replaceReturn.visit(ingredientsFunction.astFunctionDef)
325
+
326
+ list_annotation: list[ast.expr] = [ast_arg.annotation for ast_arg in list_argCuzMyBrainRefusesToThink if ast_arg.annotation is not None]
327
+ ingredientsFunction.astFunctionDef.returns = Make.Subscript(Make.Name('tuple'), Make.Tuple(list_annotation))
328
+
329
+ ast.fix_missing_locations(ingredientsFunction.astFunctionDef)
330
+
462
331
  return ingredientsFunction
463
332
 
464
333
  def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
@@ -514,7 +383,7 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
514
383
  instance_Identifier = recipeFlow.dataclassInstance
515
384
  getTheOtherRecord_damn = recipeFlow.dataclassInstanceTaskDistribution
516
385
  shatteredDataclass = shatter_dataclassesDOTdataclass(recipeFlow.logicalPathModuleDataclass, recipeFlow.sourceDataclassIdentifier, instance_Identifier)
517
- ingredientsDispatcher.imports.update(shatteredDataclass.ledger)
386
+ ingredientsDispatcher.imports.update(shatteredDataclass.imports)
518
387
 
519
388
  # How can I use dataclass settings as the SSOT for specific actions? https://github.com/hunterhogan/mapFolding/issues/16
520
389
  # Change callable parameters and Call to the callable at the same time ====
@@ -542,6 +411,13 @@ def makeNewFlow(recipeFlow: RecipeSynthesizeFlow) -> IngredientsModule:
542
411
 
543
412
  replaceCall2concurrencyManager = NodeChanger(ifThis.isCallAttributeNamespace_Identifier(recipeFlow.concurrencyManagerNamespace, recipeFlow.concurrencyManagerIdentifier), Then.replaceWith(Make.Call(Make.Attribute(Make.Name(recipeFlow.concurrencyManagerNamespace), recipeFlow.concurrencyManagerIdentifier), listArguments=[Make.Name(recipeFlow.callableParallel)] + listParameters)))
544
413
 
414
+ def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
415
+ # TODO I cannot remember why I made this function. It doesn't fit with how I normally do things.
416
+ def workhorse(node: ast.AST) -> ast.AST:
417
+ NodeTourist(be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
418
+ return node
419
+ return workhorse
420
+
545
421
  # NOTE I am dissatisfied with this logic for many reasons, including that it requires separate NodeCollector and NodeReplacer instances.
546
422
  astCallConcurrencyResult: list[ast.Call] = []
547
423
  get_astCallConcurrencyResult = NodeTourist(ifThis.isAssignAndTargets0Is(ifThis.isSubscript_Identifier(getTheOtherRecord_damn)), getIt(astCallConcurrencyResult))
@@ -581,12 +457,6 @@ def unpackDataclassCallFunctionRepackDataclass(ingredientsCaller: IngredientsFun
581
457
  repack4targetCallable.visit(ingredientsCaller.astFunctionDef)
582
458
  return ingredientsCaller
583
459
 
584
- def getIt(astCallConcurrencyResult: list[ast.Call]) -> Callable[[ast.AST], ast.AST]:
585
- def workhorse(node: ast.AST) -> ast.AST:
586
- NodeTourist(be.Call, Then.appendTo(astCallConcurrencyResult)).visit(node)
587
- return node
588
- return workhorse
589
-
590
460
  dictionaryEstimates: dict[tuple[int, ...], int] = {
591
461
  (2,2,2,2,2,2,2,2): 798148657152000,
592
462
  (2,21): 776374224866624,
@@ -0,0 +1,74 @@
1
+ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, MapFoldingState
2
+ from numba import jit
3
+
4
+ @jit(cache=True, error_model='numpy', fastmath=True, forceinline=True)
5
+ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexLeaf: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal]:
6
+ while leaf1ndex > 0:
7
+ if leaf1ndex <= 1 or leafBelow[0] == 1:
8
+ if leaf1ndex > leavesTotal:
9
+ groupsOfFolds += 1
10
+ else:
11
+ dimensionsUnconstrained = dimensionsTotal
12
+ gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
13
+ indexDimension = 0
14
+ while indexDimension < dimensionsTotal:
15
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
16
+ if leafConnectee == leaf1ndex:
17
+ dimensionsUnconstrained -= 1
18
+ else:
19
+ while leafConnectee != leaf1ndex:
20
+ gapsWhere[gap1ndexCeiling] = leafConnectee
21
+ if countDimensionsGapped[leafConnectee] == 0:
22
+ gap1ndexCeiling += 1
23
+ countDimensionsGapped[leafConnectee] += 1
24
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
25
+ indexDimension += 1
26
+ if not dimensionsUnconstrained:
27
+ indexLeaf = 0
28
+ while indexLeaf < leaf1ndex:
29
+ gapsWhere[gap1ndexCeiling] = indexLeaf
30
+ gap1ndexCeiling += 1
31
+ indexLeaf += 1
32
+ indexMiniGap = gap1ndex
33
+ while indexMiniGap < gap1ndexCeiling:
34
+ gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
35
+ if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
36
+ gap1ndex += 1
37
+ countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
38
+ indexMiniGap += 1
39
+ while leaf1ndex > 0 and gap1ndex == gapRangeStart[leaf1ndex - 1]:
40
+ leaf1ndex -= 1
41
+ leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
42
+ leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
43
+ if leaf1ndex > 0:
44
+ gap1ndex -= 1
45
+ leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
46
+ leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
47
+ leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
48
+ leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
49
+ gapRangeStart[leaf1ndex] = gap1ndex
50
+ leaf1ndex += 1
51
+ return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
52
+
53
+ def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
54
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
55
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
56
+ gap1ndex: DatatypeElephino = state.gap1ndex
57
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
58
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
59
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
60
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
61
+ leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
62
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
63
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
64
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
65
+ gapRangeStart: Array1DElephino = state.gapRangeStart
66
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
67
+ leafAbove: Array1DLeavesTotal = state.leafAbove
68
+ leafBelow: Array1DLeavesTotal = state.leafBelow
69
+ connectionGraph: Array3D = state.connectionGraph
70
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
71
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
72
+ groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexLeaf, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
73
+ state = MapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow)
74
+ return state
@@ -0,0 +1,25 @@
1
+ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal, MapFoldingState
2
+ from mapFolding.syntheticModules.theorem2Numba import count
3
+
4
+ def doTheNeedful(state: MapFoldingState) -> MapFoldingState:
5
+ mapShape: tuple[DatatypeLeavesTotal, ...] = state.mapShape
6
+ groupsOfFolds: DatatypeFoldsTotal = state.groupsOfFolds
7
+ gap1ndex: DatatypeElephino = state.gap1ndex
8
+ gap1ndexCeiling: DatatypeElephino = state.gap1ndexCeiling
9
+ indexDimension: DatatypeLeavesTotal = state.indexDimension
10
+ indexLeaf: DatatypeLeavesTotal = state.indexLeaf
11
+ indexMiniGap: DatatypeElephino = state.indexMiniGap
12
+ leaf1ndex: DatatypeLeavesTotal = state.leaf1ndex
13
+ leafConnectee: DatatypeLeavesTotal = state.leafConnectee
14
+ dimensionsUnconstrained: DatatypeLeavesTotal = state.dimensionsUnconstrained
15
+ countDimensionsGapped: Array1DLeavesTotal = state.countDimensionsGapped
16
+ gapRangeStart: Array1DElephino = state.gapRangeStart
17
+ gapsWhere: Array1DLeavesTotal = state.gapsWhere
18
+ leafAbove: Array1DLeavesTotal = state.leafAbove
19
+ leafBelow: Array1DLeavesTotal = state.leafBelow
20
+ connectionGraph: Array3D = state.connectionGraph
21
+ dimensionsTotal: DatatypeLeavesTotal = state.dimensionsTotal
22
+ leavesTotal: DatatypeLeavesTotal = state.leavesTotal
23
+ groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal = count(groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
24
+ state = MapFoldingState(mapShape=mapShape, groupsOfFolds=groupsOfFolds, gap1ndex=gap1ndex, gap1ndexCeiling=gap1ndexCeiling, indexDimension=indexDimension, indexLeaf=indexLeaf, indexMiniGap=indexMiniGap, leaf1ndex=leaf1ndex, leafConnectee=leafConnectee, dimensionsUnconstrained=dimensionsUnconstrained, countDimensionsGapped=countDimensionsGapped, gapRangeStart=gapRangeStart, gapsWhere=gapsWhere, leafAbove=leafAbove, leafBelow=leafBelow)
25
+ return state
@@ -0,0 +1,49 @@
1
+ from mapFolding.dataBaskets import MapFoldingState
2
+
3
+ def initializeGroupsOfFolds(state: MapFoldingState) -> MapFoldingState:
4
+ while state.groupsOfFolds == 0:
5
+ if state.leaf1ndex <= 1 or state.leafBelow[0] == 1:
6
+ if state.leaf1ndex > state.leavesTotal:
7
+ state.groupsOfFolds += 1
8
+ else:
9
+ state.dimensionsUnconstrained = state.dimensionsTotal
10
+ state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
11
+ state.indexDimension = 0
12
+ while state.indexDimension < state.dimensionsTotal:
13
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
14
+ if state.leafConnectee == state.leaf1ndex:
15
+ state.dimensionsUnconstrained -= 1
16
+ else:
17
+ while state.leafConnectee != state.leaf1ndex:
18
+ state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
19
+ if state.countDimensionsGapped[state.leafConnectee] == 0:
20
+ state.gap1ndexCeiling += 1
21
+ state.countDimensionsGapped[state.leafConnectee] += 1
22
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
23
+ state.indexDimension += 1
24
+ if not state.dimensionsUnconstrained:
25
+ state.indexLeaf = 0
26
+ while state.indexLeaf < state.leaf1ndex:
27
+ state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
28
+ state.gap1ndexCeiling += 1
29
+ state.indexLeaf += 1
30
+ state.indexMiniGap = state.gap1ndex
31
+ while state.indexMiniGap < state.gap1ndexCeiling:
32
+ state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
33
+ if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
34
+ state.gap1ndex += 1
35
+ state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
36
+ state.indexMiniGap += 1
37
+ while state.leaf1ndex > 0 and state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]:
38
+ state.leaf1ndex -= 1
39
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
40
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
41
+ if state.leaf1ndex > 0:
42
+ state.gap1ndex -= 1
43
+ state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
44
+ state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
45
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
46
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
47
+ state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
48
+ state.leaf1ndex += 1
49
+ return state
@@ -0,0 +1,49 @@
1
+ from mapFolding.dataBaskets import MapFoldingState
2
+
3
+ def count(state: MapFoldingState) -> MapFoldingState:
4
+ while state.leaf1ndex > 4:
5
+ if state.leafBelow[0] == 1:
6
+ if state.leaf1ndex > state.leavesTotal:
7
+ state.groupsOfFolds += 1
8
+ else:
9
+ state.dimensionsUnconstrained = state.dimensionsTotal
10
+ state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
11
+ state.indexDimension = 0
12
+ while state.indexDimension < state.dimensionsTotal:
13
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
14
+ if state.leafConnectee == state.leaf1ndex:
15
+ state.dimensionsUnconstrained -= 1
16
+ else:
17
+ while state.leafConnectee != state.leaf1ndex:
18
+ state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
19
+ if state.countDimensionsGapped[state.leafConnectee] == 0:
20
+ state.gap1ndexCeiling += 1
21
+ state.countDimensionsGapped[state.leafConnectee] += 1
22
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
23
+ state.indexDimension += 1
24
+ if not state.dimensionsUnconstrained:
25
+ state.indexLeaf = 0
26
+ while state.indexLeaf < state.leaf1ndex:
27
+ state.gapsWhere[state.gap1ndexCeiling] = state.indexLeaf
28
+ state.gap1ndexCeiling += 1
29
+ state.indexLeaf += 1
30
+ state.indexMiniGap = state.gap1ndex
31
+ while state.indexMiniGap < state.gap1ndexCeiling:
32
+ state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
33
+ if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
34
+ state.gap1ndex += 1
35
+ state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
36
+ state.indexMiniGap += 1
37
+ while state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]:
38
+ state.leaf1ndex -= 1
39
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
40
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
41
+ state.gap1ndex -= 1
42
+ state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
43
+ state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
44
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
45
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
46
+ state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
47
+ state.leaf1ndex += 1
48
+ state.groupsOfFolds *= 2
49
+ return state
@@ -0,0 +1,45 @@
1
+ from mapFolding.dataBaskets import Array1DElephino, Array1DLeavesTotal, Array3D, DatatypeElephino, DatatypeFoldsTotal, DatatypeLeavesTotal
2
+ from numba import jit
3
+
4
+ @jit(cache=True, error_model='numpy', fastmath=True, forceinline=True)
5
+ def count(groupsOfFolds: DatatypeFoldsTotal, gap1ndex: DatatypeElephino, gap1ndexCeiling: DatatypeElephino, indexDimension: DatatypeLeavesTotal, indexMiniGap: DatatypeElephino, leaf1ndex: DatatypeLeavesTotal, leafConnectee: DatatypeLeavesTotal, dimensionsUnconstrained: DatatypeLeavesTotal, countDimensionsGapped: Array1DLeavesTotal, gapRangeStart: Array1DElephino, gapsWhere: Array1DLeavesTotal, leafAbove: Array1DLeavesTotal, leafBelow: Array1DLeavesTotal, connectionGraph: Array3D, dimensionsTotal: DatatypeLeavesTotal, leavesTotal: DatatypeLeavesTotal) -> tuple[DatatypeFoldsTotal, DatatypeElephino, DatatypeElephino, DatatypeLeavesTotal, DatatypeElephino, DatatypeLeavesTotal, DatatypeLeavesTotal, DatatypeLeavesTotal, Array1DLeavesTotal, Array1DElephino, Array1DLeavesTotal, Array1DLeavesTotal, Array1DLeavesTotal, Array3D, DatatypeLeavesTotal, DatatypeLeavesTotal]:
6
+ while leaf1ndex > 4:
7
+ if leafBelow[0] == 1:
8
+ if leaf1ndex > leavesTotal:
9
+ groupsOfFolds += 1
10
+ else:
11
+ dimensionsUnconstrained = dimensionsTotal
12
+ gap1ndexCeiling = gapRangeStart[leaf1ndex - 1]
13
+ indexDimension = 0
14
+ while indexDimension < dimensionsTotal:
15
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leaf1ndex]
16
+ if leafConnectee == leaf1ndex:
17
+ dimensionsUnconstrained -= 1
18
+ else:
19
+ while leafConnectee != leaf1ndex:
20
+ gapsWhere[gap1ndexCeiling] = leafConnectee
21
+ if countDimensionsGapped[leafConnectee] == 0:
22
+ gap1ndexCeiling += 1
23
+ countDimensionsGapped[leafConnectee] += 1
24
+ leafConnectee = connectionGraph[indexDimension, leaf1ndex, leafBelow[leafConnectee]]
25
+ indexDimension += 1
26
+ indexMiniGap = gap1ndex
27
+ while indexMiniGap < gap1ndexCeiling:
28
+ gapsWhere[gap1ndex] = gapsWhere[indexMiniGap]
29
+ if countDimensionsGapped[gapsWhere[indexMiniGap]] == dimensionsUnconstrained:
30
+ gap1ndex += 1
31
+ countDimensionsGapped[gapsWhere[indexMiniGap]] = 0
32
+ indexMiniGap += 1
33
+ while gap1ndex == gapRangeStart[leaf1ndex - 1]:
34
+ leaf1ndex -= 1
35
+ leafBelow[leafAbove[leaf1ndex]] = leafBelow[leaf1ndex]
36
+ leafAbove[leafBelow[leaf1ndex]] = leafAbove[leaf1ndex]
37
+ gap1ndex -= 1
38
+ leafAbove[leaf1ndex] = gapsWhere[gap1ndex]
39
+ leafBelow[leaf1ndex] = leafBelow[leafAbove[leaf1ndex]]
40
+ leafBelow[leafAbove[leaf1ndex]] = leaf1ndex
41
+ leafAbove[leafBelow[leaf1ndex]] = leaf1ndex
42
+ gapRangeStart[leaf1ndex] = gap1ndex
43
+ leaf1ndex += 1
44
+ groupsOfFolds *= 2
45
+ return (groupsOfFolds, gap1ndex, gap1ndexCeiling, indexDimension, indexMiniGap, leaf1ndex, leafConnectee, dimensionsUnconstrained, countDimensionsGapped, gapRangeStart, gapsWhere, leafAbove, leafBelow, connectionGraph, dimensionsTotal, leavesTotal)
@@ -0,0 +1,43 @@
1
+ from mapFolding.dataBaskets import MapFoldingState
2
+
3
+ def count(state: MapFoldingState) -> MapFoldingState:
4
+ while state.leaf1ndex > 4:
5
+ if state.leafBelow[0] == 1:
6
+ if state.leaf1ndex > state.leavesTotal:
7
+ state.groupsOfFolds += 1
8
+ else:
9
+ state.dimensionsUnconstrained = state.dimensionsTotal
10
+ state.gap1ndexCeiling = state.gapRangeStart[state.leaf1ndex - 1]
11
+ state.indexDimension = 0
12
+ while state.indexDimension < state.dimensionsTotal:
13
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leaf1ndex]
14
+ if state.leafConnectee == state.leaf1ndex:
15
+ state.dimensionsUnconstrained -= 1
16
+ else:
17
+ while state.leafConnectee != state.leaf1ndex:
18
+ state.gapsWhere[state.gap1ndexCeiling] = state.leafConnectee
19
+ if state.countDimensionsGapped[state.leafConnectee] == 0:
20
+ state.gap1ndexCeiling += 1
21
+ state.countDimensionsGapped[state.leafConnectee] += 1
22
+ state.leafConnectee = state.connectionGraph[state.indexDimension, state.leaf1ndex, state.leafBelow[state.leafConnectee]]
23
+ state.indexDimension += 1
24
+ state.indexMiniGap = state.gap1ndex
25
+ while state.indexMiniGap < state.gap1ndexCeiling:
26
+ state.gapsWhere[state.gap1ndex] = state.gapsWhere[state.indexMiniGap]
27
+ if state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] == state.dimensionsUnconstrained:
28
+ state.gap1ndex += 1
29
+ state.countDimensionsGapped[state.gapsWhere[state.indexMiniGap]] = 0
30
+ state.indexMiniGap += 1
31
+ while state.gap1ndex == state.gapRangeStart[state.leaf1ndex - 1]:
32
+ state.leaf1ndex -= 1
33
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leafBelow[state.leaf1ndex]
34
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leafAbove[state.leaf1ndex]
35
+ state.gap1ndex -= 1
36
+ state.leafAbove[state.leaf1ndex] = state.gapsWhere[state.gap1ndex]
37
+ state.leafBelow[state.leaf1ndex] = state.leafBelow[state.leafAbove[state.leaf1ndex]]
38
+ state.leafBelow[state.leafAbove[state.leaf1ndex]] = state.leaf1ndex
39
+ state.leafAbove[state.leafBelow[state.leaf1ndex]] = state.leaf1ndex
40
+ state.gapRangeStart[state.leaf1ndex] = state.gap1ndex
41
+ state.leaf1ndex += 1
42
+ state.groupsOfFolds *= 2
43
+ return state
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mapFolding
3
- Version: 0.9.3
3
+ Version: 0.9.5
4
4
  Summary: Map folding algorithm with code transformation framework for optimizing numerical computations
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
@@ -32,6 +32,7 @@ Requires-Python: >=3.10
32
32
  Description-Content-Type: text/markdown
33
33
  License-File: LICENSE
34
34
  Requires-Dist: autoflake
35
+ Requires-Dist: cytoolz
35
36
  Requires-Dist: more_itertools
36
37
  Requires-Dist: numba_progress
37
38
  Requires-Dist: numba