mapFolding 0.17.0__py3-none-any.whl → 0.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (245) hide show
  1. easyRun/NOTcountingFolds.py +16 -10
  2. easyRun/__init__.py +1 -0
  3. easyRun/countFolds.py +17 -9
  4. easyRun/meanders.py +6 -8
  5. mapFolding/__init__.py +24 -35
  6. mapFolding/_e/Z0Z_analysisPython/SORTZ0Z_hypothesis.py +189 -0
  7. mapFolding/_e/Z0Z_analysisPython/SORTZ0Z_p2d6.py +143 -0
  8. mapFolding/_e/Z0Z_analysisPython/__init__.py +4 -0
  9. mapFolding/_e/Z0Z_analysisPython/exclusionData/__init__.py +0 -0
  10. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200.py +369 -0
  11. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/2001.py +694 -0
  12. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200/344/270/211.py +514 -0
  13. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200/344/270/2111.py +480 -0
  14. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200/344/272/214.py +511 -0
  15. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200/344/272/2141.py +515 -0
  16. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200/344/272/214/344/270/211.py +485 -0
  17. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/200/344/272/214/344/270/2111.py +442 -0
  18. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/211.py +313 -0
  19. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/270/2111.py +343 -0
  20. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/272/214.py +400 -0
  21. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/272/2141.py +497 -0
  22. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/272/214/344/270/211.py +463 -0
  23. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/344/272/214/344/270/2111.py +441 -0
  24. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266.py +35 -0
  25. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/2661.py +35 -0
  26. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200.py +382 -0
  27. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/2001.py +630 -0
  28. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200/344/270/211.py +488 -0
  29. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200/344/270/2111.py +475 -0
  30. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200/344/272/214.py +473 -0
  31. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200/344/272/2141.py +500 -0
  32. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200/344/272/214/344/270/211.py +465 -0
  33. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/200/344/272/214/344/270/2111.py +439 -0
  34. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/211.py +599 -0
  35. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/270/2111.py +536 -0
  36. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/272/214.py +506 -0
  37. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/272/2141.py +533 -0
  38. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/272/214/344/270/211.py +489 -0
  39. mapFolding/_e/Z0Z_analysisPython/exclusionData/aggregated/351/246/226/351/233/266/344/272/214/344/270/2111.py +474 -0
  40. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200.py +1186 -0
  41. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/2001.py +2158 -0
  42. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/2001Negative.py +2158 -0
  43. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200Negative.py +1186 -0
  44. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/270/211.py +1397 -0
  45. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/270/2111.py +1291 -0
  46. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/270/2111Negative.py +1291 -0
  47. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/270/211Negative.py +1397 -0
  48. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/214.py +1240 -0
  49. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/2141.py +1420 -0
  50. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/2141Negative.py +1420 -0
  51. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/214Negative.py +1240 -0
  52. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/214/344/270/211.py +1366 -0
  53. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/214/344/270/2111.py +1274 -0
  54. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/214/344/270/2111Negative.py +1274 -0
  55. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/200/344/272/214/344/270/211Negative.py +1366 -0
  56. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/211.py +1186 -0
  57. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/2111.py +1186 -0
  58. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/2111Negative.py +1186 -0
  59. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/270/211Negative.py +1186 -0
  60. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/214.py +1102 -0
  61. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/2141.py +1422 -0
  62. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/2141Negative.py +1422 -0
  63. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/214Negative.py +1102 -0
  64. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/214/344/270/211.py +1240 -0
  65. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/214/344/270/2111.py +1228 -0
  66. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/214/344/270/2111Negative.py +1228 -0
  67. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/344/272/214/344/270/211Negative.py +1240 -0
  68. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266.py +32 -0
  69. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/2661.py +1162 -0
  70. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/2661Negative.py +1162 -0
  71. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266Negative.py +32 -0
  72. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200.py +1186 -0
  73. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/2001.py +1926 -0
  74. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/2001Negative.py +1926 -0
  75. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200Negative.py +1186 -0
  76. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/270/211.py +1291 -0
  77. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/270/2111.py +1176 -0
  78. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/270/2111Negative.py +1176 -0
  79. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/270/211Negative.py +1291 -0
  80. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/214.py +1228 -0
  81. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/2141.py +1324 -0
  82. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/2141Negative.py +1324 -0
  83. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/214Negative.py +1228 -0
  84. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/214/344/270/211.py +1274 -0
  85. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/214/344/270/2111.py +1038 -0
  86. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/214/344/270/2111Negative.py +1038 -0
  87. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/200/344/272/214/344/270/211Negative.py +1274 -0
  88. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/211.py +2158 -0
  89. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/2111.py +1926 -0
  90. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/2111Negative.py +1926 -0
  91. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/270/211Negative.py +2158 -0
  92. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/214.py +1422 -0
  93. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/2141.py +1364 -0
  94. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/2141Negative.py +1364 -0
  95. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/214Negative.py +1422 -0
  96. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/214/344/270/211.py +1420 -0
  97. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/214/344/270/2111.py +1324 -0
  98. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/214/344/270/2111Negative.py +1324 -0
  99. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d5/351/246/226/351/233/266/344/272/214/344/270/211Negative.py +1420 -0
  100. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200.py +3133 -0
  101. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/2001.py +6039 -0
  102. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/2001Negative.py +6039 -0
  103. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200Negative.py +3133 -0
  104. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/270/211.py +3527 -0
  105. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/270/2111.py +2300 -0
  106. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/270/2111Negative.py +2300 -0
  107. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/270/211Negative.py +3527 -0
  108. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/214.py +3597 -0
  109. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/2141.py +3317 -0
  110. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/2141Negative.py +3317 -0
  111. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/214Negative.py +3597 -0
  112. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/214/344/270/211.py +3161 -0
  113. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/214/344/270/2111.py +2877 -0
  114. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/214/344/270/2111Negative.py +2877 -0
  115. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/200/344/272/214/344/270/211Negative.py +3161 -0
  116. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/211.py +2981 -0
  117. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/2111.py +3055 -0
  118. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/2111Negative.py +3055 -0
  119. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/270/211Negative.py +2981 -0
  120. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/214.py +3221 -0
  121. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/2141.py +3988 -0
  122. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/2141Negative.py +3988 -0
  123. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/214Negative.py +3221 -0
  124. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/214/344/270/211.py +3652 -0
  125. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/214/344/270/2111.py +2863 -0
  126. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/214/344/270/2111Negative.py +2863 -0
  127. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/344/272/214/344/270/211Negative.py +3652 -0
  128. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200.py +2485 -0
  129. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/2001.py +4566 -0
  130. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/2001Negative.py +4566 -0
  131. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200Negative.py +2485 -0
  132. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/270/211.py +3006 -0
  133. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/270/2111.py +2485 -0
  134. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/270/2111Negative.py +2485 -0
  135. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/270/211Negative.py +3006 -0
  136. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/214.py +3304 -0
  137. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/2141.py +3015 -0
  138. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/2141Negative.py +3015 -0
  139. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/214Negative.py +3304 -0
  140. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/214/344/270/211.py +2939 -0
  141. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/214/344/270/2111.py +2589 -0
  142. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/214/344/270/2111Negative.py +2589 -0
  143. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/200/344/272/214/344/270/211Negative.py +2939 -0
  144. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/211.py +3899 -0
  145. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/2111.py +2996 -0
  146. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/2111Negative.py +2996 -0
  147. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/270/211Negative.py +3899 -0
  148. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/214.py +3223 -0
  149. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/2141.py +3020 -0
  150. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/2141Negative.py +3020 -0
  151. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/214Negative.py +3223 -0
  152. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/214/344/270/211.py +3250 -0
  153. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/214/344/270/2111.py +2667 -0
  154. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/214/344/270/2111Negative.py +2667 -0
  155. mapFolding/_e/Z0Z_analysisPython/exclusionData/collatedp2d6/351/246/226/351/233/266/344/272/214/344/270/211Negative.py +3250 -0
  156. mapFolding/_e/Z0Z_analysisPython/measure.py +162 -0
  157. mapFolding/_e/Z0Z_analysisPython/positionAnalysis.py +403 -0
  158. mapFolding/_e/Z0Z_analysisPython/positionAnalysisPileRanges2d6.py +110 -0
  159. mapFolding/_e/Z0Z_analysisPython/theExcluderBeast.py +640 -0
  160. mapFolding/_e/Z0Z_analysisPython/toolkit.py +166 -0
  161. mapFolding/_e/Z0Z_analysisPython/toolkitCSVsequences.py +188 -0
  162. mapFolding/_e/Z0Z_analysisPython/workBenchPatternFinder.py +284 -0
  163. mapFolding/_e/Z0Z_notes/__init__.py +0 -0
  164. mapFolding/_e/Z0Z_notes/knowledgeDump.py +214 -0
  165. mapFolding/_e/__init__.py +45 -0
  166. mapFolding/_e/_beDRY.py +547 -0
  167. mapFolding/_e/_dataDynamic.py +1164 -0
  168. mapFolding/_e/_measure.py +579 -0
  169. mapFolding/_e/_semiotics.py +363 -0
  170. mapFolding/_e/_theTypes.py +31 -0
  171. mapFolding/_e/algorithms/__init__.py +1 -0
  172. mapFolding/_e/algorithms/constraintPropagation.py +158 -0
  173. mapFolding/_e/algorithms/elimination.py +118 -0
  174. mapFolding/_e/algorithms/eliminationCrease.py +66 -0
  175. mapFolding/_e/algorithms/iff.py +584 -0
  176. mapFolding/_e/basecamp.py +89 -0
  177. mapFolding/_e/dataBaskets.py +123 -0
  178. mapFolding/_e/dataRaw/__init__.py +0 -0
  179. mapFolding/_e/easyRun/__init__.py +0 -0
  180. mapFolding/_e/easyRun/eliminateFolds.py +72 -0
  181. mapFolding/_e/easyRun/pinning.py +62 -0
  182. mapFolding/_e/filters.py +384 -0
  183. mapFolding/_e/pin2/344/270/212nDimensions.py +882 -0
  184. mapFolding/_e/pin2/344/270/212nDimensionsAnnex.py +551 -0
  185. mapFolding/_e/pin2/344/270/212nDimensionsByCrease.py +190 -0
  186. mapFolding/_e/pin2/344/270/212nDimensionsByDomain.py +459 -0
  187. mapFolding/_e/pinIt.py +436 -0
  188. mapFolding/_semiotics.py +42 -0
  189. mapFolding/_theSSOT.py +11 -56
  190. mapFolding/_theTypes.py +52 -68
  191. mapFolding/algorithms/A086345.py +8 -3
  192. mapFolding/algorithms/__init__.py +1 -1
  193. mapFolding/algorithms/matrixMeandersNumPyndas.py +18 -18
  194. mapFolding/algorithms/oeisIDbyFormula.py +4 -4
  195. mapFolding/algorithms/zCuzDocStoopidoeisIDbyFormula.py +3 -3
  196. mapFolding/basecamp.py +13 -28
  197. mapFolding/beDRY.py +108 -99
  198. mapFolding/filesystemToolkit.py +15 -11
  199. mapFolding/oeis.py +17 -16
  200. mapFolding/reference/matrixMeandersAnalysis/prefixNotationNotes.py +2 -2
  201. mapFolding/reference/meandersDumpingGround/matrixMeandersBaselineV2.py +0 -1
  202. mapFolding/reference/meandersDumpingGround/matrixMeandersNumPyV1finalForm.py +8 -10
  203. mapFolding/someAssemblyRequired/RecipeJob.py +5 -5
  204. mapFolding/someAssemblyRequired/makeJobTheorem2Numba.py +5 -2
  205. mapFolding/someAssemblyRequired/makeJobTheorem2codon.py +9 -11
  206. mapFolding/someAssemblyRequired/mapFoldingModules/makeMapFoldingModules.py +2 -1
  207. mapFolding/someAssemblyRequired/transformationTools.py +2 -2
  208. mapFolding/tests/Z0Z_test_e_excluder.py +155 -0
  209. mapFolding/tests/conftest.py +193 -314
  210. mapFolding/tests/dataSamples/A001417.py +455 -0
  211. mapFolding/tests/dataSamples/__init__.py +1 -0
  212. mapFolding/tests/dataSamples/measurementData.py +1818 -0
  213. mapFolding/tests/dataSamples/p2DnDomain3_2_/351/246/226/344/270/200_/351/246/226/351/233/266/344/270/200.py +17 -0
  214. mapFolding/tests/dataSamples/p2DnDomain3_/351/246/226/344/270/200.py +17 -0
  215. mapFolding/tests/dataSamples/p2DnDomain5_4.py +17 -0
  216. mapFolding/tests/dataSamples/p2DnDomain6_5.py +17 -0
  217. mapFolding/tests/dataSamples/p2DnDomain6_7_5_4.py +17 -0
  218. mapFolding/tests/dataSamples/p2DnDomain7_6.py +17 -0
  219. mapFolding/tests/dataSamples/p2DnDomain/351/246/226/344/272/214_/351/246/226/351/233/266/344/270/200/344/272/214.py +17 -0
  220. mapFolding/tests/dataSamples/p2DnDomain/351/246/226/344/272/214_/351/246/226/351/233/266/344/272/214_/351/246/226/351/233/266/344/270/200/344/272/214_/351/246/226/344/270/200/344/272/214.py +17 -0
  221. mapFolding/tests/dataSamples/p2DnDomain/351/246/226/351/233/266/344/270/200/344/272/214_/351/246/226/344/270/200/344/272/214.py +15 -0
  222. mapFolding/tests/dataSamples/p2DnDomain/351/246/226/351/233/266/344/272/214_/351/246/226/344/272/214.py +15 -0
  223. mapFolding/tests/dataSamples/semioticsData.py +135 -0
  224. mapFolding/tests/test_computations.py +133 -88
  225. mapFolding/tests/test_e_computations.py +42 -0
  226. mapFolding/tests/test_e_dataDynamic.py +189 -0
  227. mapFolding/tests/test_e_measurements.py +257 -0
  228. mapFolding/tests/test_e_pinning.py +61 -0
  229. mapFolding/tests/test_e_semiotics.py +128 -0
  230. mapFolding/tests/test_filesystem.py +39 -17
  231. mapFolding/tests/{test_other.py → test_parameterValidation.py} +3 -3
  232. mapFolding/tests/{test_tasks.py → test_taskDivisions.py} +42 -23
  233. mapFolding/zCuzDocStoopid/makeDocstrings.py +3 -2
  234. {mapfolding-0.17.0.dist-info → mapfolding-0.18.0.dist-info}/METADATA +15 -9
  235. mapfolding-0.18.0.dist-info/RECORD +305 -0
  236. {mapfolding-0.17.0.dist-info → mapfolding-0.18.0.dist-info}/WHEEL +1 -1
  237. easyRun/A000682.py +0 -25
  238. easyRun/A005316.py +0 -20
  239. mapFolding/algorithms/A000136constraintPropagation.py +0 -95
  240. mapFolding/algorithms/A000136elimination.py +0 -163
  241. mapFolding/algorithms/A000136eliminationParallel.py +0 -77
  242. mapfolding-0.17.0.dist-info/RECORD +0 -107
  243. {mapfolding-0.17.0.dist-info → mapfolding-0.18.0.dist-info}/entry_points.txt +0 -0
  244. {mapfolding-0.17.0.dist-info → mapfolding-0.18.0.dist-info}/licenses/LICENSE +0 -0
  245. {mapfolding-0.17.0.dist-info → mapfolding-0.18.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,166 @@
1
+ from collections.abc import Callable, Sequence
2
+ from cytoolz.dicttoolz import valfilter as leafFilter
3
+ from cytoolz.functoolz import curry as syntacticCurry
4
+ from dataclasses import dataclass
5
+ from mapFolding import ansiColorReset, ansiColors, packageSettings
6
+ from mapFolding._e import DOTvalues, PermutationSpace, 一, 零, 首一, 首零一
7
+ from mapFolding._e._dataDynamic import getDataFrameFoldings
8
+ from mapFolding._e.dataBaskets import EliminationState
9
+ from mapFolding._e.filters import thisIsALeaf
10
+ from pathlib import Path
11
+ from pprint import pformat
12
+ import csv
13
+ import numpy
14
+ import sys
15
+
16
+ @syntacticCurry
17
+ def beansWithoutCornbread(state: EliminationState, permutationSpace: PermutationSpace) -> bool:
18
+ return any((beans in DOTvalues(permutationSpace)) ^ (cornbread in DOTvalues(permutationSpace)) for beans, cornbread in ((一+零, 一), (首一(state.dimensionsTotal), 首零一(state.dimensionsTotal))))
19
+
20
+ @dataclass
21
+ class PermutationSpaceStatus:
22
+ listSurplusDictionaries: list[PermutationSpace]
23
+ maskUnion: numpy.ndarray
24
+ indicesOverlappingRows: numpy.ndarray
25
+ indicesOverlappingPermutationSpace: set[int]
26
+ rowsRequired: int
27
+ rowsTotal: int
28
+
29
+ def detectPermutationSpaceErrors(arrayFoldings: numpy.ndarray, listPermutationSpace: Sequence[PermutationSpace]) -> PermutationSpaceStatus:
30
+ rowsTotal: int = int(arrayFoldings.shape[0])
31
+ listMasks: list[numpy.ndarray] = []
32
+ listSurplusDictionaries: list[PermutationSpace] = []
33
+ for permutationSpace in listPermutationSpace:
34
+ maskMatches: numpy.ndarray = numpy.ones(rowsTotal, dtype=bool)
35
+ for pile, leaf in leafFilter(thisIsALeaf, permutationSpace).items():
36
+ maskMatches = maskMatches & (arrayFoldings[:, pile] == leaf)
37
+ if not bool(maskMatches.any()):
38
+ listSurplusDictionaries.append(permutationSpace)
39
+ listMasks.append(maskMatches)
40
+
41
+ if listMasks:
42
+ masksStacked: numpy.ndarray = numpy.column_stack(listMasks)
43
+ else:
44
+ masksStacked = numpy.zeros((rowsTotal, 0), dtype=bool)
45
+
46
+ coverageCountPerRow: numpy.ndarray = masksStacked.sum(axis=1)
47
+ maskUnion: numpy.ndarray = coverageCountPerRow > 0
48
+ rowsRequired: int = int(maskUnion.sum())
49
+ indicesOverlappingRows: numpy.ndarray = numpy.flatnonzero(coverageCountPerRow >= 2)
50
+ indicesOverlappingPermutationSpace: set[int] = set()
51
+ if indicesOverlappingRows.size > 0:
52
+ for indexMask, mask in enumerate(listMasks):
53
+ if bool(mask[indicesOverlappingRows].any()):
54
+ indicesOverlappingPermutationSpace.add(indexMask)
55
+
56
+ return PermutationSpaceStatus(listSurplusDictionaries, maskUnion, indicesOverlappingRows, indicesOverlappingPermutationSpace, rowsRequired, rowsTotal)
57
+
58
+ def verifyPinning2Dn(state: EliminationState) -> None:
59
+ def getPermutationSpaceWithLeafValuesOnly(permutationSpace: PermutationSpace) -> PermutationSpace:
60
+ return leafFilter(thisIsALeaf, permutationSpace)
61
+ arrayFoldings = getDataFrameFoldings(state)
62
+ if arrayFoldings is not None:
63
+ arrayFoldings = arrayFoldings.to_numpy(dtype=numpy.uint8, copy=False)
64
+ pinningCoverage: PermutationSpaceStatus = detectPermutationSpaceErrors(arrayFoldings, state.listPermutationSpace)
65
+
66
+ listSurplusDictionariesOriginal: list[PermutationSpace] = pinningCoverage.listSurplusDictionaries
67
+ listDictionaryPinned: list[PermutationSpace] = [
68
+ getPermutationSpaceWithLeafValuesOnly(permutationSpace)
69
+ for permutationSpace in listSurplusDictionariesOriginal
70
+ ]
71
+ if listDictionaryPinned:
72
+ sys.stdout.write(ansiColors.YellowOnBlack)
73
+ sys.stdout.write(pformat(listDictionaryPinned[0:5], width=140) + '\n')
74
+ else:
75
+ sys.stdout.write(ansiColors.GreenOnBlack)
76
+ sys.stdout.write(f"{len(listDictionaryPinned)} surplus dictionaries.\n")
77
+ sys.stdout.write(ansiColorReset)
78
+
79
+ pathFilename = Path(f"{packageSettings.pathPackage}/_e/analysisExcel/p2d{state.dimensionsTotal}SurplusDictionaries.csv")
80
+
81
+ if listDictionaryPinned:
82
+ with pathFilename.open('w', newline='') as writeStream:
83
+ writerCSV = csv.writer(writeStream)
84
+ listPiles: list[int] = list(range(state.leavesTotal))
85
+ writerCSV.writerow(listPiles)
86
+ for permutationSpace in listDictionaryPinned:
87
+ writerCSV.writerow([permutationSpace.get(pile, '') for pile in listPiles])
88
+
89
+ if pinningCoverage.indicesOverlappingPermutationSpace:
90
+ sys.stdout.write(f"{ansiColors.RedOnWhite}{len(pinningCoverage.indicesOverlappingPermutationSpace)} overlapping dictionaries{ansiColorReset}\n")
91
+ for indexDictionary in sorted(pinningCoverage.indicesOverlappingPermutationSpace)[0:2]:
92
+ sys.stdout.write(pformat(leafFilter(thisIsALeaf, state.listPermutationSpace[indexDictionary]), width=140) + '\n')
93
+
94
+ beansOrCornbread: Callable[[PermutationSpace], bool] = beansWithoutCornbread(state)
95
+ listBeans: list[PermutationSpace] = list(filter(beansOrCornbread, state.listPermutationSpace))
96
+ if listBeans:
97
+ sys.stdout.write(f"{ansiColors.MagentaOnBlack}{len(listBeans)} dictionaries with beans but no cornbread.{ansiColorReset}\n")
98
+ sys.stdout.write(pformat(getPermutationSpaceWithLeafValuesOnly(listBeans[0]), width=140) + '\n')
99
+
100
+ maskUnion: numpy.ndarray = pinningCoverage.maskUnion
101
+ rowsRequired: int = pinningCoverage.rowsRequired
102
+ rowsTotal: int = pinningCoverage.rowsTotal
103
+ color = ansiColorReset
104
+ if rowsRequired < rowsTotal:
105
+ color = ansiColors.RedOnWhite
106
+ indicesMissingRows: numpy.ndarray = numpy.flatnonzero(~maskUnion)
107
+ for indexRow in indicesMissingRows[0:2]:
108
+ sys.stdout.write(f"{color}{arrayFoldings[indexRow, :]}\n")
109
+ sys.stdout.write(f"{color}Required rows: {rowsRequired}/{rowsTotal}{ansiColorReset}\n")
110
+
111
+ def verifyDomainAgainstKnown(domainComputed: Sequence[tuple[int, ...]], domainKnown: Sequence[tuple[int, ...]], *, printResults: bool = True) -> dict[str, list[tuple[int, ...]]]:
112
+ """Compare a computed domain against known verification data.
113
+
114
+ Parameters
115
+ ----------
116
+ domainComputed : Sequence[tuple[int, ...]]
117
+ The domain generated by the function under development.
118
+ domainKnown : Sequence[tuple[int, ...]]
119
+ The empirically extracted domain from verification data (e.g., from `makeVerificationDataLeavesDomain`).
120
+ printResults : bool = True
121
+ Whether to print the comparison results using pprint.
122
+
123
+ Returns
124
+ -------
125
+ comparisonResults : dict[str, list[tuple[int, ...]]]
126
+ Dictionary with keys:
127
+ - 'missing': tuples in domainKnown but not in domainComputed (the function fails to generate these)
128
+ - 'surplus': tuples in domainComputed but not in domainKnown (the function generates extra invalid tuples)
129
+ - 'matched': tuples present in both domains
130
+
131
+ """
132
+ setComputed: set[tuple[int, ...]] = set(domainComputed)
133
+ setKnown: set[tuple[int, ...]] = set(domainKnown)
134
+
135
+ listMissing: list[tuple[int, ...]] = sorted(setKnown - setComputed)
136
+ listSurplus: list[tuple[int, ...]] = sorted(setComputed - setKnown)
137
+ listMatched: list[tuple[int, ...]] = sorted(setComputed & setKnown)
138
+
139
+ comparisonResults: dict[str, list[tuple[int, ...]]] = {
140
+ 'missing': listMissing,
141
+ 'surplus': listSurplus,
142
+ 'matched': listMatched,
143
+ }
144
+
145
+ if printResults:
146
+ countComputed: int = len(setComputed)
147
+ countKnown: int = len(setKnown)
148
+ countMissing: int = len(listMissing)
149
+ countSurplus: int = len(listSurplus)
150
+ countMatched: int = len(listMatched)
151
+
152
+ sys.stdout.write(f"Domain comparison: {countComputed} computed vs {countKnown} known\n")
153
+ sys.stdout.write(f" Matched: {countMatched} ({100 * countMatched / countKnown:.1f}% of known)\n")
154
+
155
+ if listMissing:
156
+ sys.stdout.write(f" Missing ({countMissing} tuples in known but not in computed):\n")
157
+ sys.stdout.write(pformat(listMissing, width=140, compact=True) + '\n')
158
+
159
+ if listSurplus:
160
+ sys.stdout.write(f" Surplus ({countSurplus} tuples in computed but not in known):\n")
161
+ sys.stdout.write(pformat(listSurplus, width=140, compact=True) + '\n')
162
+
163
+ if not listMissing and not listSurplus:
164
+ sys.stdout.write(" Perfect match!\n")
165
+
166
+ return comparisonResults
@@ -0,0 +1,188 @@
1
+ from mapFolding import packageSettings
2
+ from mapFolding._e import getLeavesCreasePost
3
+ from mapFolding._e.dataBaskets import EliminationState
4
+ from pathlib import Path, PurePath
5
+ from typing import TextIO
6
+
7
+ def subdivideP2d7s0_1_3_2CSVFile(state: EliminationState, pathDataRaw: Path) -> None:
8
+ pathSorted: Path = pathDataRaw / "sorted"
9
+ pathSorted.mkdir(exist_ok=True)
10
+
11
+ pathFilenameSource: Path = pathDataRaw / "p2d7s0_1_3_2.csv"
12
+ if pathFilenameSource.exists():
13
+ setLeavesAllowedAfterTwo: set[int] = set(getLeavesCreasePost(state, 2))
14
+
15
+ dictionaryAppendStreams: dict[int, TextIO] = {}
16
+ try:
17
+ with pathFilenameSource.open('r', newline='') as readStream:
18
+ for lineRaw in readStream:
19
+ line: str = lineRaw.rstrip('\n').rstrip('\r')
20
+ if len(line) != 401:
21
+ continue
22
+ if line.count(',') != 127:
23
+ continue
24
+ if not line.startswith("0,1,3,2,"):
25
+ continue
26
+ if line[0] == ',' or line[-1] == ',' or ',,' in line:
27
+ continue
28
+
29
+ listPrefixParts: list[str] = line.split(',', 5)
30
+ if len(listPrefixParts) < 6:
31
+ continue
32
+ if not listPrefixParts[4].isdigit():
33
+ continue
34
+ leafFifth: int = int(listPrefixParts[4])
35
+ if leafFifth not in setLeavesAllowedAfterTwo:
36
+ continue
37
+
38
+ appendStream: TextIO | None = dictionaryAppendStreams.get(leafFifth)
39
+ if appendStream is None:
40
+ pathFilenameOutput: Path = pathDataRaw / f"p2d7s0_1_3_2_{leafFifth}.csv"
41
+ appendStream = pathFilenameOutput.open('a', newline='')
42
+ dictionaryAppendStreams[leafFifth] = appendStream
43
+
44
+ appendStream.write(line)
45
+ appendStream.write('\n')
46
+
47
+ pathFilenameDestination: Path = pathSorted / pathFilenameSource.name
48
+ pathFilenameSource.replace(pathFilenameDestination)
49
+ finally:
50
+ for appendStream in dictionaryAppendStreams.values():
51
+ appendStream.close()
52
+
53
+ def cleanAndSortSequencesCSVFile(state: EliminationState, pathFilename: PurePath) -> None:
54
+ pathFilenameTarget: Path = Path(pathFilename)
55
+ pathSorted: Path = pathFilenameTarget.parent / "sorted"
56
+ pathSorted.mkdir(exist_ok=True)
57
+
58
+ lineHeader: str | None = None
59
+ tupleHeaderExpected: tuple[int, ...] = tuple(range(state.leavesTotal))
60
+
61
+ setSequences: set[tuple[int, ...]] = set()
62
+ listSequencesUnique: list[tuple[int, ...]] = []
63
+
64
+ duplicatesDetected: bool = False
65
+ invalidLinesDetected: bool = False
66
+ sortedAlready: bool = True
67
+ sequencePrior: tuple[int, ...] | None = None
68
+
69
+ with pathFilenameTarget.open('r', newline='') as readStream:
70
+ for indexLine, lineRaw in enumerate(readStream):
71
+ line: str = lineRaw.rstrip('\n').rstrip('\r')
72
+ if indexLine == 0 and line.startswith("0,1,2,"):
73
+ listHeaderParts: list[str] = line.split(',')
74
+ if len(listHeaderParts) == state.leavesTotal:
75
+ try:
76
+ tupleHeaderFound: tuple[int, ...] = tuple(int(part) for part in listHeaderParts)
77
+ except ValueError:
78
+ tupleHeaderFound = ()
79
+ if tupleHeaderFound == tupleHeaderExpected:
80
+ lineHeader = line
81
+ continue
82
+
83
+ if not line:
84
+ continue
85
+ if line[0] == ',' or line[-1] == ',' or ',,' in line:
86
+ invalidLinesDetected = True
87
+ continue
88
+ if line.count(',') != state.leavesTotal - 1:
89
+ invalidLinesDetected = True
90
+ continue
91
+ try:
92
+ tupleSequence: tuple[int, ...] = tuple(int(part) for part in line.split(','))
93
+ except ValueError:
94
+ invalidLinesDetected = True
95
+ continue
96
+ if len(tupleSequence) != state.leavesTotal:
97
+ invalidLinesDetected = True
98
+ continue
99
+
100
+ if sequencePrior is not None and tupleSequence < sequencePrior:
101
+ sortedAlready = False
102
+ sequencePrior = tupleSequence
103
+
104
+ if tupleSequence in setSequences:
105
+ duplicatesDetected = True
106
+ continue
107
+ setSequences.add(tupleSequence)
108
+ listSequencesUnique.append(tupleSequence)
109
+
110
+ if not (duplicatesDetected or invalidLinesDetected or not sortedAlready):
111
+ return
112
+
113
+ listSequencesSorted: list[tuple[int, ...]] = sorted(listSequencesUnique)
114
+ pathFilenameBackup: Path = pathSorted / pathFilenameTarget.name
115
+ pathFilenameTarget.replace(pathFilenameBackup)
116
+ with pathFilenameTarget.open('w', newline='') as writeStream:
117
+ if lineHeader is not None:
118
+ writeStream.write(lineHeader)
119
+ writeStream.write('\n')
120
+ for tupleSequence in listSequencesSorted:
121
+ writeStream.write(','.join(str(value) for value in tupleSequence))
122
+ writeStream.write('\n')
123
+
124
+ def sortP2d7GeneratedCSVFiles(state: EliminationState, pathDataRaw: Path) -> None:
125
+ pathSorted: Path = pathDataRaw / "sorted"
126
+ pathSorted.mkdir(exist_ok=True)
127
+
128
+ setLeavesAllowedAfterOne: set[int] = set(getLeavesCreasePost(state, 1))
129
+ dictionaryAllowedAfterThird: dict[int, set[int]] = {
130
+ leafThird: set(getLeavesCreasePost(state, leafThird))
131
+ for leafThird in setLeavesAllowedAfterOne
132
+ }
133
+
134
+ dictionaryAppendStreams: dict[tuple[int, int], TextIO] = {}
135
+ try:
136
+ for pathFilenameSource in sorted(pathDataRaw.glob("p2d7_*.csv")):
137
+ with pathFilenameSource.open('r', newline='') as readStream:
138
+ for lineRaw in readStream:
139
+ line: str = lineRaw.rstrip('\n').rstrip('\r')
140
+ if len(line) != 401:
141
+ continue
142
+ if line.count(',') != 127:
143
+ continue
144
+ if not line.startswith("0,1,"):
145
+ continue
146
+ if line[0] == ',' or line[-1] == ',' or ',,' in line:
147
+ continue
148
+
149
+ listPrefixParts: list[str] = line.split(',', 4)
150
+ if len(listPrefixParts) < 5:
151
+ continue
152
+ if not listPrefixParts[2].isdigit() or not listPrefixParts[3].isdigit():
153
+ continue
154
+ leafThird: int = int(listPrefixParts[2])
155
+ leafFourth: int = int(listPrefixParts[3])
156
+ if leafThird not in setLeavesAllowedAfterOne:
157
+ continue
158
+ if leafFourth not in dictionaryAllowedAfterThird[leafThird]:
159
+ continue
160
+
161
+ key: tuple[int, int] = (leafThird, leafFourth)
162
+ appendStream: TextIO | None = dictionaryAppendStreams.get(key)
163
+ if appendStream is None:
164
+ pathFilenameOutput: Path = pathDataRaw / f"p2d7s0_1_{leafThird}_{leafFourth}.csv"
165
+ appendStream = pathFilenameOutput.open('a', newline='')
166
+ dictionaryAppendStreams[key] = appendStream
167
+
168
+ appendStream.write(line)
169
+ appendStream.write('\n')
170
+
171
+ pathFilenameDestination: Path = pathSorted / pathFilenameSource.name
172
+ pathFilenameSource.replace(pathFilenameDestination)
173
+ finally:
174
+ for appendStream in dictionaryAppendStreams.values():
175
+ appendStream.close()
176
+
177
+ if __name__ == '__main__':
178
+ sortEm = True
179
+ if sortEm:
180
+ state = EliminationState((2,) * 7)
181
+ pathDataRaw: Path = packageSettings.pathPackage / "_e" / "dataRaw"
182
+ sortP2d7GeneratedCSVFiles(state, pathDataRaw)
183
+ subdivideP2d7s0_1_3_2CSVFile(state, pathDataRaw)
184
+ for pathFilename in pathDataRaw.glob("p2d7s*.csv"):
185
+ cleanAndSortSequencesCSVFile(state, pathFilename)
186
+
187
+ # type \apps\mapFolding\mapFolding\_e\dataRaw\p2d7s*.csv | find /c /v ""
188
+ # 521292 of 562368 😢
@@ -0,0 +1,284 @@
1
+ # ruff: noqa: ERA001 T201 T203 # noqa: RUF100
2
+ from bisect import bisect_left
3
+ from collections.abc import Iterable
4
+ from cytoolz.functoolz import curry as syntacticCurry
5
+ from cytoolz.itertoolz import unique
6
+ from functools import partial
7
+ from gmpy2 import is_even, is_odd
8
+ from hunterMakesPy import raiseIfNone
9
+ from mapFolding import decreasing
10
+ from mapFolding._e import (
11
+ dimensionNearest首, getDictionaryLeafDomains, getDictionaryPileRanges, getLeavesCreaseAnte, getLeavesCreasePost,
12
+ getPileRange, getSumsOfProductsOfDimensionsNearest首, Leaf, leafInSubHyperplane, Pile, ptount, 零, 首一, 首二, 首零, 首零一)
13
+ from mapFolding._e._dataDynamic import getDataFrameFoldings
14
+ from mapFolding._e._measure import invertLeafIn2上nDimensions
15
+ from mapFolding._e.dataBaskets import EliminationState
16
+ from math import prod
17
+ from more_itertools import flatten, iter_index
18
+ from operator import add, iadd, isub, mul
19
+ from pprint import pprint
20
+ from typing import TYPE_CHECKING
21
+
22
+ if TYPE_CHECKING:
23
+ import pandas
24
+
25
+ def _getGroupedBy(state: EliminationState, pileTarget: Pile, groupByLeavesAtPiles: tuple[Pile, ...]) -> dict[Leaf | tuple[Leaf, ...], list[Leaf]]:
26
+ dataframeFoldings: pandas.DataFrame = raiseIfNone(getDataFrameFoldings(state))
27
+ groupedBy: dict[Leaf | tuple[Leaf, ...], list[Leaf]] = dataframeFoldings.groupby(list(groupByLeavesAtPiles))[pileTarget].apply(list).to_dict() # pyright: ignore[reportAssignmentType]
28
+ return {leaves: sorted(set(listLeaves)) for leaves, listLeaves in groupedBy.items()}
29
+
30
+ def getExcludedLeaves(state: EliminationState, pileTarget: Pile, groupByLeavesAtPiles: tuple[Pile, ...]) -> dict[Leaf | tuple[Leaf, ...], list[Leaf]]:
31
+ return {leaves: sorted(set(getDictionaryPileRanges(state)[pileTarget]).difference(set(listLeaves))) for leaves, listLeaves in _getGroupedBy(state, pileTarget, groupByLeavesAtPiles).items()}
32
+
33
+ if __name__ == '__main__':
34
+
35
+ state = EliminationState((2,) * 6)
36
+ """
37
+ 000011 3
38
+ 5 (5, 6, 10, 18, 34)
39
+ 9 (9, 10, 12, 20, 36)
40
+ 001111 15
41
+ 17 17 (17, 18, 20, 24, 40)
42
+ 010111 (23, 24, 40)
43
+ 011011 (27, 29, 45)
44
+ 33 33 (33, 34, 36, 40)
45
+
46
+ 100111 39 (39, 40)
47
+ 101011 43
48
+ 45 (45, 46, 54)
49
+ 110011 51
50
+ 53 53 (53, 54, 58)
51
+ 57 (57, 58, 60)
52
+
53
+ 111111 63
54
+
55
+ even bit count
56
+ 0 0 00 11 its creases: crease+1
57
+ 0 0 11 11 its creases: crease+1
58
+ 0 1 01 11 crease+1
59
+ 0 1 10 11 its creases: crease+1
60
+
61
+ odd bit count
62
+ 1 0 01 11 crease+1
63
+ 1 0 10 11 its creases: crease+1
64
+ 1 1 00 11 its creases: crease+1
65
+ 1 1 11 11 n/a
66
+
67
+ tt = (3, 5, 6, 9, 10, 12, 15, 17, 18, 20, 23, 24, 27, 29, 30, 33, 34, 36, 39, 40, 43, 45, 46, 51, 53, 54, 57, 58, 60, 63)
68
+ pp = (1, 2, 4, 8, 16, 32)
69
+
70
+ pp63 = (63,)
71
+ pp60 = (60,)
72
+ pp58 = (58, 60)
73
+ pp57 = (57, 58, 60)
74
+ pp54 = (54, 58)
75
+ pp53 = (53, 54, 58)
76
+ pp51 = (51, 53, 57)
77
+ pp46 = (46, 54)
78
+ pp45 = (45, 46, 54)
79
+ pp43 = (43, 45, 53)
80
+ pp40 = (40,)
81
+ pp39 = (39, 40)
82
+ pp36 = (36, 40)
83
+ pp34 = (34, 36, 40)
84
+ pp33 = (33, 34, 36, 40)
85
+ pp30 = (30, 34)
86
+ pp29 = (29, 30, 34)
87
+ pp27 = (27, 29, 45)
88
+ pp24 = (24, 40)
89
+ pp23 = (23, 24, 40)
90
+ pp20 = (20, 24, 40)
91
+ pp18 = (18, 20, 24, 40)
92
+ pp17 = (17, 18, 20, 24, 40)
93
+ pp15 = (15, 17, 33)
94
+ pp12 = (12, 20, 36)
95
+ pp10 = (10, 12, 20, 36)
96
+ pp9 = (9, 10, 12, 20, 36)
97
+ pp6 = (6, 10, 18, 34)
98
+ pp5 = (5, 6, 10, 18, 34)
99
+ pp3 = (3, 5, 9, 17, 33)
100
+
101
+ """
102
+
103
+ pile: Pile = 4
104
+ pileDimension = bisect_left(state.sumsOfProductsOfDimensionsNearest首, pile>>1<<1)
105
+ leafMinimum = is_even(pile) + state.productsOfDimensions[pileDimension]
106
+ pileRange: list[Leaf] = []
107
+
108
+ # pileRange.append(leafMinimum)
109
+
110
+ if is_even(pile):
111
+ dd = pileDimension
112
+
113
+ ss = state.sumsOfProductsOfDimensions[dd]
114
+ # pileRange.extend(map(partial(iadd, leafMinimum - ss), state.sumsOfProductsOfDimensions[1:dd]))
115
+ # pileRange.extend(map(partial(iadd, leafMinimum - ss), state.sumsOfProductsOfDimensions[dd + 1: state.dimensionsTotal]))
116
+
117
+ if dd < dimensionNearest首(pile):
118
+ dd += 1
119
+
120
+ ss = state.productsOfDimensions[dd]
121
+ pileRange.extend(map(partial(isub, leafMinimum + ss), state.sumsOfProductsOfDimensions[1:dd]))
122
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.sumsOfProductsOfDimensions[dd + 1: state.dimensionsTotal]))
123
+
124
+ if is_odd(pile):
125
+ dd = pileDimension
126
+
127
+ ss = state.sumsOfProductsOfDimensions[dd]
128
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[1:dd]))
129
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[dd + 1: state.dimensionsTotal]))
130
+
131
+ dd += 1
132
+
133
+ ss = state.sumsOfProductsOfDimensions[dd]
134
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[1:dd]))
135
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[dd + 1: state.dimensionsTotal]))
136
+
137
+ dd += 1
138
+
139
+ ss = state.sumsOfProductsOfDimensions[dd]
140
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[1:dd]))
141
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[dd + 1: state.dimensionsTotal]))
142
+
143
+ dd += 1
144
+
145
+ ss = state.sumsOfProductsOfDimensions[dd]
146
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[1:dd]))
147
+ pileRange.extend(map(partial(iadd, leafMinimum + ss), state.productsOfDimensions[dd + 1: state.dimensionsTotal]))
148
+
149
+ print(pile, pileDimension)
150
+ print(sorted(set(pileRange)))
151
+ rr = tuple(getPileRange(state, pile))
152
+ print(rr)
153
+ rrLess1 = tuple(getPileRange(state, pile - 1))
154
+ print(rrLess1)
155
+
156
+ """Notes
157
+ 33 has step = 4
158
+ """
159
+
160
+ leafExcluderStuff = False
161
+ if leafExcluderStuff:
162
+ pileExcluder = 60
163
+ pileTarget=31
164
+ dictionaryExcluded = getExcludedLeaves(state, pileTarget, groupByLeavesAtPiles=(pileExcluder,))
165
+ domains = getDictionaryLeafDomains(state)
166
+ pileRange31 = frozenset(getPileRange(state, 31))
167
+
168
+ for pile in range(state.leavesTotal):
169
+ continue
170
+ print(pile, set(getPileRange(state, pile)).difference(getExcludedLeaves(state, pileTarget, groupByLeavesAtPiles=(pile,)).keys()))
171
+
172
+ for excluder, listExcluded in dictionaryExcluded.items():
173
+ continue
174
+
175
+ invert = int(excluder^63) # pyright: ignore[reportUnknownArgumentType, reportOperatorIssue]
176
+ creasePostSS = tuple(getLeavesCreasePost(state, invert)) # pyright: ignore[reportArgumentType]
177
+ allCreasePostSSInRange = set(creasePostSS).intersection(pileRange31)
178
+ creaseAnte = tuple(getLeavesCreaseAnte(state, excluder)) # pyright: ignore[reportArgumentType]
179
+ creasePost = tuple(getLeavesCreasePost(state, excluder)) # pyright: ignore[reportArgumentType]
180
+ allCreaseAnteInRange = set(creaseAnte).intersection(pileRange31)
181
+ allCreasePostInRange = set(creasePost).intersection(pileRange31)
182
+ notExcluded = allCreasePostInRange.difference(listExcluded)
183
+ # print(excluder, invert, allCreasePostSSInRange.intersection(listExcluded), notExcluded, allCreasePostSSInRange.difference(listExcluded), set(creasePostSS).symmetric_difference(creasePost), creasePostSS, allCreasePostSSInRange)
184
+ # print(excluder.__format__('06b'), excluder, f"{notExcluded}\t", f"{creasePost}", sep='\t')
185
+ print(excluder, f"{allCreaseAnteInRange=}", f"{allCreasePostInRange=}", sep='\t')
186
+ print(excluder, f"{allCreaseAnteInRange.difference(listExcluded)}", f"{allCreasePostInRange.difference(listExcluded)}", sep='\t')
187
+
188
+ pileRangeByFormula: bool = False
189
+ if pileRangeByFormula:
190
+ state = EliminationState((2,) * 6)
191
+
192
+ # NOTE works for 9 <= odd piles <= 47
193
+ # I _think_ I need to be able to pass start/stop to intraDimensionalLeaves
194
+ # Yes, sort of. `Z0Z_alphaBeta` and `intraDimensionalLeaves` need to be the same function: and I need to be able to tweak all of the parameters.
195
+
196
+ @syntacticCurry
197
+ def intraDimensionalLeaves(state: EliminationState, dimensionOrigin: int) -> list[int]:
198
+ return list(map(partial(add, dimensionOrigin+2), state.sumsOfProductsOfDimensions[1: dimensionNearest首(dimensionOrigin)]))
199
+
200
+ @syntacticCurry
201
+ def Z0Z_alphaBeta(state: EliminationState, alphaStart: int = 0, betaStop: int = 0, charlieStep: int = 1) -> list[int]:
202
+ return list(flatten(map(intraDimensionalLeaves(state), state.productsOfDimensions[2 + alphaStart: (state.dimensionsTotal - 1) + betaStop: charlieStep])))
203
+
204
+ def Z0Z_getPileRange(state: EliminationState, pile: Pile) -> Iterable[Leaf]:
205
+ pileRange: list[Leaf] = []
206
+
207
+ # odd leaves < 32.
208
+ # ? 12 < even leaves < 32.
209
+ # ? 24 < even leaves < 32.
210
+ # piles 49, 51, 53, 55 need a higher start on yy=0.
211
+ for yy in range(3):
212
+ pileRange.extend(map(partial(mul, state.productsOfDimensions[yy]), Z0Z_alphaBeta(state, betaStop=-(yy))))
213
+
214
+ # 32 < even leaves
215
+ for yy in range(1):
216
+ pileRange.extend(map(partial(invertLeafIn2上nDimensions, state.dimensionsTotal), map(partial(mul, state.productsOfDimensions[yy])
217
+ , Z0Z_alphaBeta(state
218
+ , alphaStart=yy+(state.dimensionsTotal - 2 - dimensionNearest首(pile))
219
+ , betaStop=-(yy)
220
+ ))))
221
+ # ? 32 < odd leaves < 52
222
+ # ? 32 < odd leaves < 36
223
+ for yy in range(1,3):
224
+ pileRange.extend(map(partial(invertLeafIn2上nDimensions, state.dimensionsTotal), map(partial(mul, state.productsOfDimensions[yy]), Z0Z_alphaBeta(state, betaStop=-(yy)))))
225
+
226
+ # dimension origins
227
+ # piles 51, 53, 55 need a higher start.
228
+ pileRange.extend(state.productsOfDimensions[1 + ((零)+首零(state.dimensionsTotal) < pile):dimensionNearest首(pile+1)])
229
+ # inverse dimension origins: 62, 61, 59, 55, 47, 31
230
+ # pile5 needs a higher start.
231
+ pileRange.extend(map(partial(invertLeafIn2上nDimensions, state.dimensionsTotal), state.productsOfDimensions[0:state.dimensionsTotal]))
232
+
233
+ return tuple(sorted(pileRange))
234
+
235
+ def Z0Z_getPileRangeEven(state: EliminationState, pile: Pile) -> Iterable[Leaf]:
236
+ pileRange: list[Leaf] = []
237
+
238
+ for yy in range(3):
239
+ pileRange.extend(map(
240
+ partial(add, 1)
241
+ , (map(
242
+ partial(mul, state.productsOfDimensions[yy])
243
+ , Z0Z_alphaBeta(state, alphaStart = 0, betaStop=-(yy))
244
+ )
245
+ )
246
+ )
247
+ )
248
+
249
+ # for yy in range(1):
250
+ # pileRange.extend(map(partial(Z0Z_invert, state.dimensionsTotal), map(partial(mul, state.productsOfDimensions[yy])
251
+ # , Z0Z_alphaBeta(state
252
+ # , alphaStart=yy+(state.dimensionsTotal - 2 - dimensionNearest首(pile))
253
+ # , betaStop=-(yy)
254
+ # ))))
255
+ # for yy in range(1,3):
256
+ # pileRange.extend(map(partial(Z0Z_invert, state.dimensionsTotal), map(partial(mul, state.productsOfDimensions[yy]), Z0Z_alphaBeta(state, betaStop=-(yy)))))
257
+
258
+ # dimension origins
259
+ pileRange.extend(map(partial(add, 1), state.productsOfDimensions[1 + ((零)+首零(state.dimensionsTotal) < pile):dimensionNearest首(pile+1)]))
260
+ # inverse dimension origins: 62, 61, 59, 55, 47, 31
261
+ pileRange.extend(map(partial(invertLeafIn2上nDimensions, state.dimensionsTotal), map(partial(add, 1), state.productsOfDimensions[1:state.dimensionsTotal])))
262
+
263
+ return tuple(sorted(pileRange))
264
+
265
+ for pile in range(首一(state.dimensionsTotal), 首零一(state.dimensionsTotal), 2):
266
+ print(pile, (real:=tuple(getPileRange(state, pile))) == (computed:=Z0Z_getPileRangeEven(state, pile)), end=': ')
267
+ # print(f"{ansiColors.Green}surplus: {set(computed).difference(real)}", f"{ansiColors.Magenta}missing: {set(real).difference(computed)}{ansiColorReset}", sep='\n')
268
+ pprint(f"{computed=}", width=180)
269
+
270
+ for pile in range((零)+首二(state.dimensionsTotal), 首零一(state.dimensionsTotal), 2):
271
+ print(pile, (real:=tuple(getPileRange(state, pile))) == (computed:=Z0Z_getPileRange(state, pile)), end=': ')
272
+ # print(f"surplus: {set(computed).difference(real)}", f"missing: {set(real).difference(computed)}", sep='\n')
273
+ pprint(f"{computed=}", width=180)
274
+
275
+ # > 32: matches most tail0s != 1
276
+ # if pile > 32:
277
+ # pile-=1
278
+ # else:
279
+ # pile+=1
280
+ # zz = tuple(map(partial(xor, 1), zz))
281
+ # print(pile, (ll:=getPileRange(state, pile)) == (zz), end=': ')
282
+ # # print(set(zz).difference(ll), set(ll).difference(zz), sep='\t')
283
+ # pprint(zz, width=180)
284
+
File without changes