mapFolding 0.3.6__tar.gz → 0.3.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {mapfolding-0.3.6 → mapfolding-0.3.7}/PKG-INFO +5 -4
  2. mapfolding-0.3.7/mapFolding/citations/updateCitation.py +238 -0
  3. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/mapFolding.egg-info/PKG-INFO +5 -4
  4. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/mapFolding.egg-info/SOURCES.txt +0 -2
  5. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/mapFolding.egg-info/requires.txt +2 -1
  6. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/someAssemblyRequired/synthesizeModuleJob.py +4 -3
  7. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/syntheticModules/Initialize.py +3 -3
  8. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/syntheticModules/Sequential.py +1 -1
  9. {mapfolding-0.3.6 → mapfolding-0.3.7}/pyproject.toml +4 -3
  10. mapfolding-0.3.6/mapFolding/citations/updateCitation.py +0 -116
  11. mapfolding-0.3.6/mapFolding/citations/updateCitationgpt.py +0 -125
  12. mapfolding-0.3.6/mapFolding/someAssemblyRequired/generalizeSourceCode.py +0 -122
  13. {mapfolding-0.3.6 → mapfolding-0.3.7}/README.md +0 -0
  14. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/benchmarks/benchmarking.py +0 -0
  15. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/mapFolding.egg-info/dependency_links.txt +0 -0
  16. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/mapFolding.egg-info/entry_points.txt +0 -0
  17. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/mapFolding.egg-info/top_level.txt +0 -0
  18. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/flattened.py +0 -0
  19. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/hunterNumba.py +0 -0
  20. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/irvineJavaPort.py +0 -0
  21. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/jax.py +0 -0
  22. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/lunnan.py +0 -0
  23. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/lunnanNumpy.py +0 -0
  24. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/lunnanWhile.py +0 -0
  25. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/rotatedEntryPoint.py +0 -0
  26. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/reference/total_countPlus1vsPlusN.py +0 -0
  27. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/someAssemblyRequired/__init__.py +0 -0
  28. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/someAssemblyRequired/getLLVMforNoReason.py +0 -0
  29. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/someAssemblyRequired/makeJob.py +0 -0
  30. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/someAssemblyRequired/synthesizeModules.py +0 -0
  31. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/syntheticModules/Parallel.py +0 -0
  32. {mapfolding-0.3.6 → mapfolding-0.3.7}/mapFolding/syntheticModules/__init__.py +0 -0
  33. {mapfolding-0.3.6 → mapfolding-0.3.7}/setup.cfg +0 -0
  34. {mapfolding-0.3.6 → mapfolding-0.3.7}/tests/test_oeis.py +0 -0
  35. {mapfolding-0.3.6 → mapfolding-0.3.7}/tests/test_other.py +0 -0
  36. {mapfolding-0.3.6 → mapfolding-0.3.7}/tests/test_tasks.py +0 -0
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mapFolding
3
- Version: 0.3.6
3
+ Version: 0.3.7
4
4
  Summary: Count distinct ways to fold a map (or a strip of stamps)
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
7
- Project-URL: homepage, https://github.com/hunterhogan/mapFolding
7
+ Project-URL: Homepage, https://github.com/hunterhogan/mapFolding
8
8
  Project-URL: Donate, https://www.patreon.com/integrated
9
- Project-URL: repository, https://github.com/hunterhogan/mapFolding.git
9
+ Project-URL: Repository, https://github.com/hunterhogan/mapFolding.git
10
10
  Keywords: A001415,A001416,A001417,A001418,A195646,folding,map folding,OEIS,stamp folding
11
11
  Classifier: Development Status :: 5 - Production/Stable
12
12
  Classifier: Environment :: Console
@@ -30,12 +30,13 @@ Requires-Dist: jupyter; extra == "benchmark"
30
30
  Requires-Dist: pandas; extra == "benchmark"
31
31
  Requires-Dist: tqdm; extra == "benchmark"
32
32
  Provides-Extra: testing
33
+ Requires-Dist: attrs; extra == "testing"
33
34
  Requires-Dist: cffconvert; extra == "testing"
34
35
  Requires-Dist: more_itertools; extra == "testing"
35
- Requires-Dist: pytest; extra == "testing"
36
36
  Requires-Dist: pytest-cov; extra == "testing"
37
37
  Requires-Dist: pytest-env; extra == "testing"
38
38
  Requires-Dist: pytest-xdist; extra == "testing"
39
+ Requires-Dist: pytest; extra == "testing"
39
40
  Requires-Dist: python_minifier; extra == "testing"
40
41
  Requires-Dist: tomli; extra == "testing"
41
42
 
@@ -0,0 +1,238 @@
1
+ from cffconvert.cli.create_citation import create_citation
2
+ from packaging.metadata import Metadata as PyPAMetadata
3
+ from typing import Any, Dict, List
4
+ import attrs
5
+ import cffconvert
6
+ import tempfile
7
+ import packaging
8
+ import packaging.metadata
9
+ import packaging.utils
10
+ import packaging.version
11
+ import pathlib
12
+ import ruamel.yaml
13
+ import tomli
14
+
15
+ listProjectURLsTarget: List[str] = ["homepage", "license", "repository"]
16
+
17
+ """
18
+ Tentative plan:
19
+ - Commit and push to GitHub
20
+ - GitHub Action gathers information from the sources of truth
21
+ - If the citation needs to be updated, write to both
22
+ - pathFilenameCitationSSOT
23
+ - pathFilenameCitationDOTcffRepo
24
+ - Commit and push to GitHub
25
+ - this complicates things
26
+ - I want the updated citation to be in the `commit` field of itself
27
+ """
28
+
29
+ @attrs.define
30
+ class CitationNexus:
31
+ """
32
+ - one-to-one correlation with `cffconvert.lib.cff_1_2_x.citation` class Citation_1_2_x.cffobj
33
+ """
34
+ cffDASHversion: str # pathFilenameCitationSSOT
35
+ message: str # pathFilenameCitationSSOT
36
+
37
+ abstract: str | None = None # pathFilenameCitationSSOT
38
+ authors: list[dict[str,str]] = attrs.field(factory=list) # pathFilenamePackageSSOT; pyproject.toml authors
39
+ commit: str | None = None # workflows['Make GitHub Release']
40
+ contact: list[dict[str,str]] = attrs.field(factory=list) # pathFilenamePackageSSOT; pyproject.toml maintainers
41
+ dateDASHreleased: str | None = None # workflows['Make GitHub Release']
42
+ doi: str | None = None # pathFilenameCitationSSOT
43
+ identifiers: list[str] = attrs.field(factory=list) # workflows['Make GitHub Release']
44
+ keywords: list[str] = attrs.field(factory=list) # pathFilenamePackageSSOT; packaging.metadata.Metadata.keywords
45
+ license: str | None = None # pathFilenamePackageSSOT; packaging.metadata.Metadata.license_expression
46
+ licenseDASHurl: str | None = None # pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: license or pyproject.toml urls license
47
+ preferredDASHcitation: str | None = None # pathFilenameCitationSSOT
48
+ references: list[str] = attrs.field(factory=list) # bibtex files in pathCitationSSOT. Conversion method and timing TBD.
49
+ repositoryDASHartifact: str | None = None # (https://pypi.org/pypi/{package_name}/json').json()['releases']
50
+ repositoryDASHcode: str | None = None # workflows['Make GitHub Release']
51
+ repository: str | None = None # pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: repository
52
+ title: str | None = None # pathFilenamePackageSSOT; pyproject.toml name (packaging normalizes the names)
53
+ type: str | None = None # pathFilenameCitationSSOT
54
+ url: str | None = None # pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: homepage
55
+ version: str | None = None # pathFilenamePackageSSOT; packaging.metadata.Metadata.version
56
+
57
+ def setInStone(self, prophet: str) -> "CitationNexus":
58
+ match prophet:
59
+ case "Citation":
60
+ pass
61
+ # "freeze" these items
62
+ # setattr(self.cffDASHversion, 'type', Final[str])
63
+ # setattr(self.doi, 'type', Final[str])
64
+ # cffDASHversion: str # pathFilenameCitationSSOT
65
+ # message: str # pathFilenameCitationSSOT
66
+ # abstract: str | None = None # pathFilenameCitationSSOT
67
+ # doi: str | None = None # pathFilenameCitationSSOT
68
+ # preferredDASHcitation: str | None = None # pathFilenameCitationSSOT
69
+ # type: str | None = None # pathFilenameCitationSSOT
70
+ case "PyPA":
71
+ pass
72
+ # "freeze" these items
73
+ # setattr(self.keywords, 'type', Final[list[str]])
74
+ # setattr(self.license, 'type', Final[str])
75
+ # setattr(self.licenseDASHurl, 'type', Final[str])
76
+ # setattr(self.repository, 'type', Final[str])
77
+ # setattr(self.url, 'type', Final[str])
78
+ # setattr(self.version, 'type', Final[str])
79
+ case "pyprojectDOTtoml":
80
+ pass
81
+ # "freeze" these items
82
+ # setattr(self.authors, 'type', Final[list[dict[str,str]]])
83
+ # setattr(self.contact, 'type', Final[list[dict[str,str]]])
84
+ # setattr(self.title, 'type', Final[str])
85
+ return self
86
+
87
+ def getNexusCitation(pathFilenameCitationSSOT: pathlib.Path) -> CitationNexus:
88
+
89
+ # `cffconvert.cli.create_citation.create_citation()` is PAINFULLY mundane, but a major problem
90
+ # in the CFF ecosystem is divergence. Therefore, I will use this function so that my code
91
+ # converges with the CFF ecosystem.
92
+ citationObject: cffconvert.Citation = create_citation(infile=pathFilenameCitationSSOT, url=None)
93
+ # `._parse()` is a yaml loader: use it for convergence
94
+ cffobj: Dict[Any, Any] = citationObject._parse()
95
+
96
+ nexusCitation = CitationNexus(
97
+ cffDASHversion=cffobj["cff-version"],
98
+ message=cffobj["message"],
99
+ )
100
+
101
+ Z0Z_list: List[attrs.Attribute] = list(attrs.fields(type(nexusCitation)))
102
+ for Z0Z_field in Z0Z_list:
103
+ cffobjKeyName: str = Z0Z_field.name.replace("DASH", "-")
104
+ cffobjValue = cffobj.get(cffobjKeyName)
105
+ if cffobjValue: # An empty list will be False
106
+ setattr(nexusCitation, Z0Z_field.name, cffobjValue)
107
+
108
+ nexusCitation = nexusCitation.setInStone("Citation")
109
+ return nexusCitation
110
+
111
+ def getPypaMetadata(packageData: Dict[str, Any]) -> PyPAMetadata:
112
+ """
113
+ Create a PyPA metadata object (version 2.4) from packageData.
114
+ https://packaging.python.org/en/latest/specifications/core-metadata/
115
+ """
116
+ dictionaryProjectURLs: Dict[str, str] = {}
117
+ for urlName, url in packageData.get("urls", {}).items():
118
+ urlName = urlName.lower()
119
+ if urlName in listProjectURLsTarget:
120
+ dictionaryProjectURLs[urlName] = url
121
+
122
+ metadataRaw = packaging.metadata.RawMetadata(
123
+ keywords=packageData.get("keywords", []),
124
+ license_expression=packageData.get("license", {}).get("text", ""),
125
+ metadata_version="2.4",
126
+ name=packaging.utils.canonicalize_name(packageData.get("name", None), validate=True), # packaging.metadata.InvalidMetadata: 'name' is a required field
127
+ project_urls=dictionaryProjectURLs,
128
+ version=packageData.get("version", None),
129
+ )
130
+
131
+ metadata = PyPAMetadata().from_raw(metadataRaw)
132
+ return metadata
133
+
134
+ def addPypaMetadata(nexusCitation: CitationNexus, metadata: PyPAMetadata) -> CitationNexus:
135
+ if not metadata.name:
136
+ raise ValueError("Metadata name is required.")
137
+
138
+ nexusCitation.title = metadata.name
139
+ if metadata.version: nexusCitation.version = str(metadata.version)
140
+ if metadata.keywords: nexusCitation.keywords = metadata.keywords
141
+ if metadata.license_expression: nexusCitation.license = metadata.license_expression
142
+
143
+ Z0Z_lookup: Dict[str, str] = {
144
+ "homepage": "url",
145
+ "license": "licenseDASHurl",
146
+ "repository": "repository",
147
+ }
148
+ if metadata.project_urls:
149
+ for urlTarget in listProjectURLsTarget:
150
+ url = metadata.project_urls.get(urlTarget, None)
151
+ if url:
152
+ setattr(nexusCitation, Z0Z_lookup[urlTarget], url)
153
+
154
+ nexusCitation = nexusCitation.setInStone("PyPA")
155
+ return nexusCitation
156
+
157
+ def add_pyprojectDOTtoml(nexusCitation: CitationNexus, packageData: Dict[str, Any]) -> CitationNexus:
158
+ def Z0Z_ImaNotValidatingNoNames(person: Dict[str, str]) -> Dict[str, str]:
159
+ cffPerson: Dict[str, str] = {}
160
+ if person.get('name', None):
161
+ cffPerson['given-names'], cffPerson['family-names'] = person['name'].split(' ', 1)
162
+ if person.get('email', None):
163
+ cffPerson['email'] = person['email']
164
+ return cffPerson
165
+ listAuthors = packageData.get("authors", None)
166
+ if not listAuthors:
167
+ raise ValueError("Authors are required.")
168
+ else:
169
+ listPersons = []
170
+ for person in listAuthors:
171
+ listPersons.append(Z0Z_ImaNotValidatingNoNames(person))
172
+ nexusCitation.authors = listPersons
173
+ if packageData.get("maintainers", None):
174
+ listPersons = []
175
+ for person in packageData["maintainers"]:
176
+ listPersons.append(Z0Z_ImaNotValidatingNoNames(person))
177
+ nexusCitation.contact = listPersons
178
+ nexusCitation.title = packageData["name"]
179
+ nexusCitation = nexusCitation.setInStone("pyprojectDOTtoml")
180
+ return nexusCitation
181
+
182
+ def writeCitation(nexusCitation: CitationNexus, pathFilenameCitationSSOT: pathlib.Path, pathFilenameCitationDOTcffRepo: pathlib.Path):
183
+ # NOTE embarrassingly hacky process to follow
184
+ parameterIndent= 2
185
+ parameterLineWidth = 60
186
+ yamlWorkhorse = ruamel.yaml.YAML()
187
+
188
+ def srsly(Z0Z_filed, Z0Z_value):
189
+ if Z0Z_value: # empty lists
190
+ return True
191
+ else:
192
+ return False
193
+
194
+ dictionaryCitation = attrs.asdict(nexusCitation, filter=srsly)
195
+ for keyName in list(dictionaryCitation.keys()):
196
+ dictionaryCitation[keyName.replace("DASH", "-")] = dictionaryCitation.pop(keyName)
197
+
198
+ pathFilenameForValidation = pathlib.Path(tempfile.mktemp())
199
+
200
+ def writeStream(pathFilename):
201
+ with open(pathFilename, 'w') as pathlibIsAStealthContextManagerThatRuamelCannotDetectAndRefusesToWorkWith:
202
+ yamlWorkhorse.dump(dictionaryCitation, pathlibIsAStealthContextManagerThatRuamelCannotDetectAndRefusesToWorkWith)
203
+
204
+ writeStream(pathFilenameForValidation)
205
+
206
+ citationObject: cffconvert.Citation = create_citation(infile=pathFilenameForValidation, url=None)
207
+ if citationObject.validate(verbose=True) is None:
208
+ writeStream(pathFilenameCitationSSOT)
209
+ writeStream(pathFilenameCitationDOTcffRepo)
210
+
211
+ def logistics():
212
+ # Prefer reliable, dynamic values over hardcoded ones
213
+ packageNameHARDCODED: str = 'mapFolding'
214
+
215
+ packageName: str = packageNameHARDCODED
216
+ pathRepoRoot = pathlib.Path(__file__).parent.parent.parent
217
+ pathFilenamePackageSSOT = pathRepoRoot / 'pyproject.toml'
218
+ filenameGitHubAction = 'updateCitation.yml'
219
+ pathFilenameGitHubAction = pathRepoRoot / '.github' / 'workflows' / filenameGitHubAction
220
+
221
+ filenameCitationDOTcff = 'CITATION.cff'
222
+ pathCitations = pathRepoRoot / packageName / 'citations'
223
+ pathFilenameCitationSSOT = pathCitations / filenameCitationDOTcff
224
+ pathFilenameCitationDOTcffRepo = pathRepoRoot / filenameCitationDOTcff
225
+
226
+ nexusCitation = getNexusCitation(pathFilenameCitationSSOT)
227
+
228
+ tomlPackageData: Dict[str, Any] = tomli.loads(pathFilenamePackageSSOT.read_text())['project']
229
+ # https://packaging.python.org/en/latest/specifications/pyproject-toml/
230
+ pypaMetadata: PyPAMetadata = getPypaMetadata(tomlPackageData)
231
+
232
+ nexusCitation = addPypaMetadata(nexusCitation, pypaMetadata)
233
+ nexusCitation = add_pyprojectDOTtoml(nexusCitation, tomlPackageData)
234
+
235
+ writeCitation(nexusCitation, pathFilenameCitationSSOT, pathFilenameCitationDOTcffRepo)
236
+
237
+ if __name__ == '__main__':
238
+ logistics()
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mapFolding
3
- Version: 0.3.6
3
+ Version: 0.3.7
4
4
  Summary: Count distinct ways to fold a map (or a strip of stamps)
5
5
  Author-email: Hunter Hogan <HunterHogan@pm.me>
6
6
  License: CC-BY-NC-4.0
7
- Project-URL: homepage, https://github.com/hunterhogan/mapFolding
7
+ Project-URL: Homepage, https://github.com/hunterhogan/mapFolding
8
8
  Project-URL: Donate, https://www.patreon.com/integrated
9
- Project-URL: repository, https://github.com/hunterhogan/mapFolding.git
9
+ Project-URL: Repository, https://github.com/hunterhogan/mapFolding.git
10
10
  Keywords: A001415,A001416,A001417,A001418,A195646,folding,map folding,OEIS,stamp folding
11
11
  Classifier: Development Status :: 5 - Production/Stable
12
12
  Classifier: Environment :: Console
@@ -30,12 +30,13 @@ Requires-Dist: jupyter; extra == "benchmark"
30
30
  Requires-Dist: pandas; extra == "benchmark"
31
31
  Requires-Dist: tqdm; extra == "benchmark"
32
32
  Provides-Extra: testing
33
+ Requires-Dist: attrs; extra == "testing"
33
34
  Requires-Dist: cffconvert; extra == "testing"
34
35
  Requires-Dist: more_itertools; extra == "testing"
35
- Requires-Dist: pytest; extra == "testing"
36
36
  Requires-Dist: pytest-cov; extra == "testing"
37
37
  Requires-Dist: pytest-env; extra == "testing"
38
38
  Requires-Dist: pytest-xdist; extra == "testing"
39
+ Requires-Dist: pytest; extra == "testing"
39
40
  Requires-Dist: python_minifier; extra == "testing"
40
41
  Requires-Dist: tomli; extra == "testing"
41
42
 
@@ -2,7 +2,6 @@ README.md
2
2
  pyproject.toml
3
3
  mapFolding/benchmarks/benchmarking.py
4
4
  mapFolding/citations/updateCitation.py
5
- mapFolding/citations/updateCitationgpt.py
6
5
  mapFolding/mapFolding.egg-info/PKG-INFO
7
6
  mapFolding/mapFolding.egg-info/SOURCES.txt
8
7
  mapFolding/mapFolding.egg-info/dependency_links.txt
@@ -19,7 +18,6 @@ mapFolding/reference/lunnanWhile.py
19
18
  mapFolding/reference/rotatedEntryPoint.py
20
19
  mapFolding/reference/total_countPlus1vsPlusN.py
21
20
  mapFolding/someAssemblyRequired/__init__.py
22
- mapFolding/someAssemblyRequired/generalizeSourceCode.py
23
21
  mapFolding/someAssemblyRequired/getLLVMforNoReason.py
24
22
  mapFolding/someAssemblyRequired/makeJob.py
25
23
  mapFolding/someAssemblyRequired/synthesizeModuleJob.py
@@ -9,11 +9,12 @@ pandas
9
9
  tqdm
10
10
 
11
11
  [testing]
12
+ attrs
12
13
  cffconvert
13
14
  more_itertools
14
- pytest
15
15
  pytest-cov
16
16
  pytest-env
17
17
  pytest-xdist
18
+ pytest
18
19
  python_minifier
19
20
  tomli
@@ -116,10 +116,11 @@ def writeModuleWithNumba(listDimensions, **keywordArguments: Optional[str]) -> p
116
116
  linesLaunch = """"""
117
117
  linesLaunch = linesLaunch + f"""
118
118
  if __name__ == '__main__':
119
- import time
120
- timeStart = time.perf_counter()
119
+ # import time
120
+ # timeStart = time.perf_counter()
121
121
  {identifierCallableLaunch}()
122
- print(time.perf_counter() - timeStart)"""
122
+ # print(time.perf_counter() - timeStart)
123
+ """
123
124
 
124
125
  linesWriteFoldsTotal = """"""
125
126
  linesWriteFoldsTotal = "\n".join([linesWriteFoldsTotal
@@ -1,8 +1,8 @@
1
- import numba
2
- from numpy import integer
3
- from mapFolding import indexMy, indexTrack
4
1
  import numpy
5
2
  from typing import Any, Tuple
3
+ from mapFolding import indexMy, indexTrack
4
+ import numba
5
+ from numpy import integer
6
6
 
7
7
  @numba.jit((numba.uint8[:, :, ::1], numba.uint8[::1], numba.uint8[::1], numba.uint8[:, ::1]))
8
8
  def countInitialize(connectionGraph: numpy.ndarray[Tuple[int, int, int], numpy.dtype[integer[Any]]], gapsWhere: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], my: numpy.ndarray[Tuple[int], numpy.dtype[integer[Any]]], track: numpy.ndarray[Tuple[int, int], numpy.dtype[integer[Any]]]):
@@ -1,6 +1,6 @@
1
1
  from numpy import integer
2
- import numba
3
2
  from typing import Any, Tuple
3
+ import numba
4
4
  from mapFolding import indexMy, indexTrack
5
5
  import numpy
6
6
 
@@ -37,19 +37,20 @@ optional-dependencies = { benchmark = [
37
37
  "pandas",
38
38
  "tqdm",
39
39
  ], testing = [
40
+ "attrs",
40
41
  "cffconvert",
41
42
  "more_itertools",
42
- "pytest",
43
43
  "pytest-cov",
44
44
  "pytest-env",
45
45
  "pytest-xdist",
46
+ "pytest",
46
47
  "python_minifier",
47
48
  "tomli",] }
48
49
  readme = { file = "README.md", content-type = "text/markdown" }
49
50
  requires-python = ">=3.10,<3.14"
50
51
  scripts = { getOEISids = "mapFolding.oeis:getOEISids", clearOEIScache = "mapFolding.oeis:clearOEIScache", OEIS_for_n = "mapFolding.oeis:OEIS_for_n" }
51
- urls = { homepage = "https://github.com/hunterhogan/mapFolding", Donate = "https://www.patreon.com/integrated", repository = "https://github.com/hunterhogan/mapFolding.git" }
52
- version = "0.3.6"
52
+ urls = { Homepage = "https://github.com/hunterhogan/mapFolding", Donate = "https://www.patreon.com/integrated", Repository = "https://github.com/hunterhogan/mapFolding.git" }
53
+ version = "0.3.7"
53
54
 
54
55
  [tool.coverage]
55
56
  report = { exclude_lines = [
@@ -1,116 +0,0 @@
1
- from cffconvert.cli.create_citation import create_citation
2
- from cffconvert.cli.validate_or_write_output import validate_or_write_output
3
- from typing import Any, Dict
4
- import cffconvert
5
- import pathlib
6
- import packaging.metadata
7
- import tomli
8
- import inspect
9
- import json
10
- import ruamel.yaml
11
- import packaging
12
- from packaging.metadata import Metadata as PyPAMetadata
13
- import packaging.utils
14
- import packaging.version
15
-
16
- def addPypaMetadata(citation: cffconvert.Citation, metadata: PyPAMetadata) -> cffconvert.Citation:
17
- # https://github.com/citation-file-format/cff-initializer-javascript
18
- """
19
- keywords: pathFilenamePackageSSOT; packaging.metadata.Metadata.keywords
20
- license: pathFilenamePackageSSOT; packaging.metadata.Metadata.license_expression
21
- title: pathFilenamePackageSSOT; packaging.metadata.Metadata.name
22
- url: pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: homepage
23
- repository: pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: repository
24
- version: pathFilenamePackageSSOT; packaging.metadata.Metadata.version
25
- """
26
- return citation
27
-
28
- def getPypaMetadata(packageData: Dict[str, Any]) -> PyPAMetadata:
29
- # https://packaging.python.org/en/latest/specifications/core-metadata/
30
- dictionaryProjectURLs = {}
31
- for urlKey, urlValue in packageData.get('urls', {}).items():
32
- if urlKey.lower() in ('homepage', 'repository'):
33
- dictionaryProjectURLs[urlKey] = urlValue
34
-
35
- metadataRaw = packaging.metadata.RawMetadata(
36
- keywords=packageData.get('keywords', []),
37
- license_expression=packageData.get('license', {}).get('text', ''),
38
- metadata_version='2.4',
39
- name=packaging.utils.canonicalize_name(packageData.get('name', None), validate=True),
40
- project_urls=dictionaryProjectURLs,
41
- version=packageData.get('version', None),
42
- )
43
-
44
- metadata = PyPAMetadata().from_raw(metadataRaw)
45
- return metadata
46
-
47
- """
48
- Tentative plan:
49
- - Commit and push to GitHub
50
- - GitHub Action gathers information from the sources of truth
51
- - If the citation needs to be updated, write to both
52
- - pathFilenameCitationSSOT
53
- - pathFilenameCitationDOTcffRepo
54
- - Commit and push to GitHub
55
- - this complicates things
56
- - I want the updated citation to be in the `commit` field of itself
57
- """
58
- """cffconvert.Citation fields and the source of truth
59
- abstract: pathFilenameCitationSSOT
60
- authors: pathFilenamePackageSSOT
61
- cff-version: pathFilenameCitationSSOT
62
- commit: workflows['Make GitHub Release']
63
- contact: pathFilenamePackageSSOT
64
- date-released: workflows['Make GitHub Release']
65
- doi: pathFilenameCitationSSOT
66
- identifiers: workflows['Make GitHub Release']
67
- license-url: pathFilenamePackageSSOT
68
- message: pathFilenameCitationSSOT
69
- preferred-citation: pathFilenameCitationSSOT
70
- references: to be determined
71
- repository-artifact: (https://pypi.org/pypi/{package_name}/json').json()['releases']
72
- repository-code: workflows['Make GitHub Release']
73
- type: pathFilenameCitationSSOT
74
-
75
- keywords: pathFilenamePackageSSOT; packaging.metadata.Metadata.keywords
76
- license: pathFilenamePackageSSOT; packaging.metadata.Metadata.license_expression
77
- title: pathFilenamePackageSSOT; packaging.metadata.Metadata.name
78
- url: pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: homepage
79
- repository: pathFilenamePackageSSOT; packaging.metadata.Metadata.project_urls: repository
80
- version: pathFilenamePackageSSOT; packaging.metadata.Metadata.version
81
- """
82
-
83
- def logistics():
84
- # Prefer reliable, dynamic values over hardcoded ones
85
- packageName: str = 'mapFolding'
86
- pathRepoRoot = pathlib.Path(__file__).parent.parent.parent
87
- pathFilenamePackageSSOT = pathRepoRoot / 'pyproject.toml'
88
- filenameGitHubAction = 'updateCitation.yml'
89
- pathFilenameGitHubAction = pathRepoRoot / '.github' / 'workflows' / filenameGitHubAction
90
-
91
- filenameCitationDOTcff = 'CITATION.cff'
92
- pathCitations = pathRepoRoot / packageName / 'citations'
93
- pathFilenameCitationSSOT = pathCitations / filenameCitationDOTcff
94
- pathFilenameCitationDOTcffRepo = pathRepoRoot / filenameCitationDOTcff
95
-
96
- citationObject: cffconvert.Citation = create_citation(infile=pathFilenameCitationSSOT, url=None)
97
- print(citationObject._parse().as_cff())
98
-
99
- tomlPackageData: Dict[str, Any] = tomli.loads(pathFilenamePackageSSOT.read_text())['project']
100
- # https://packaging.python.org/en/latest/specifications/pyproject-toml/
101
- pypaMetadata: PyPAMetadata = getPypaMetadata(tomlPackageData)
102
-
103
- validate_or_write_output(outfile=pathFilenameCitationSSOT, outputformat='cff', validate_only=False, citation=citationObject)
104
- validate_or_write_output(outfile=pathFilenameCitationDOTcffRepo, outputformat='cff', validate_only=False, citation=citationObject)
105
-
106
- if __name__ == '__main__':
107
- logistics()
108
-
109
- # print(f"{pypaMetadata.name=}, {pypaMetadata.version=}, {pypaMetadata.keywords=}, {pypaMetadata.license_expression=}, {pypaMetadata.project_urls=}")
110
- # path_cffconvert = pathlib.Path(inspect.getfile(cffconvert)).parent
111
- # pathFilenameSchema = path_cffconvert / "schemas/1.2.0/schema.json"
112
- # scheme: Dict[str, Any] = json.loads(pathFilenameSchema.read_text())
113
- # schemaSpecifications: Dict[str, Any] = scheme['properties']
114
-
115
- # for property, subProperties in schemaSpecifications.items():
116
- # print(property, subProperties.get('items', None))
@@ -1,125 +0,0 @@
1
- from cffconvert.cli.create_citation import create_citation
2
- from cffconvert.cli.validate_or_write_output import validate_or_write_output
3
- from typing import Any, Dict
4
- import cffconvert
5
- import pathlib
6
- import packaging.metadata
7
- import tomli
8
- import ruamel.yaml
9
- import packaging
10
- from packaging.metadata import Metadata as PyPAMetadata
11
- import packaging.utils
12
- import packaging.version
13
-
14
- def addPypaMetadata(citation: cffconvert.Citation, metadata: PyPAMetadata) -> cffconvert.Citation:
15
- """
16
- Map the PyPA metadata to the citation's internal representation.
17
-
18
- Mapping:
19
- - title: metadata.name
20
- - version: metadata.version (converted to string)
21
- - keywords: metadata.keywords
22
- - license: metadata.license_expression
23
- - url: from project URLs (homepage)
24
- - repository: from project URLs (repository)
25
- """
26
- # Access the internal dictionary (used for conversion)
27
- citationData: Dict[str, Any] = citation._cffobj
28
-
29
- # Update title from PyPA metadata name
30
- if metadata.name:
31
- citationData["title"] = metadata.name
32
-
33
- # Update version from PyPA metadata version
34
- if metadata.version:
35
- citationData["version"] = str(metadata.version)
36
-
37
- # Update keywords from PyPA metadata keywords
38
- if metadata.keywords:
39
- citationData["keywords"] = metadata.keywords
40
-
41
- # Update license from PyPA metadata license_expression
42
- if metadata.license_expression:
43
- citationData["license"] = metadata.license_expression
44
-
45
- # Retrieve the project URLs that were attached in getPypaMetadata
46
- projectURLs: Dict[str, str] = getattr(metadata, "_project_urls", {})
47
-
48
- # Update the homepage URL
49
- if "homepage" in projectURLs:
50
- citationData["url"] = projectURLs["homepage"]
51
-
52
- # Update the repository URL
53
- if "repository" in projectURLs:
54
- citationData["repository"] = projectURLs["repository"]
55
-
56
- return citation
57
-
58
- def getPypaMetadata(packageData: Dict[str, Any]) -> PyPAMetadata:
59
- """
60
- Create a PyPA metadata object (version 2.4) from packageData.
61
-
62
- Mapping for project URLs:
63
- - 'homepage' and 'repository' are accepted from packageData['urls'].
64
- """
65
- dictionaryProjectURLs: Dict[str, str] = {}
66
- for urlKey, urlValue in packageData.get("urls", {}).items():
67
- lowerUrlKey = urlKey.lower()
68
- if lowerUrlKey in ("homepage", "repository"):
69
- dictionaryProjectURLs[lowerUrlKey] = urlValue
70
-
71
- metadataRaw = packaging.metadata.RawMetadata(
72
- keywords=packageData.get("keywords", []),
73
- license_expression=packageData.get("license", {}).get("text", ""),
74
- metadata_version="2.4",
75
- name=packaging.utils.canonicalize_name(packageData.get("name", None), validate=True),
76
- project_urls=dictionaryProjectURLs,
77
- version=packageData.get("version", None),
78
- )
79
-
80
- metadata = PyPAMetadata().from_raw(metadataRaw)
81
- # Attach the project URLs dictionary so it can be used later.
82
- setattr(metadata, "_project_urls", dictionaryProjectURLs)
83
- return metadata
84
-
85
- def logistics():
86
- # Determine paths from your SSOT.
87
- packageName: str = "mapFolding"
88
- pathRepoRoot = pathlib.Path(__file__).parent.parent.parent
89
- pathFilenamePackageSSOT = pathRepoRoot / "pyproject.toml"
90
- filenameGitHubAction = "updateCitation.yml"
91
- pathFilenameGitHubAction = pathRepoRoot / ".github" / "workflows" / filenameGitHubAction
92
-
93
- filenameCitationDOTcff = "CITATION.cff"
94
- pathCitations = pathRepoRoot / packageName / "citations"
95
- pathFilenameCitationSSOT = pathCitations / filenameCitationDOTcff
96
- pathFilenameCitationDOTcffRepo = pathRepoRoot / filenameCitationDOTcff
97
-
98
- # Create a citation object from the SSOT citation file.
99
- citationObject: cffconvert.Citation = create_citation(infile=pathFilenameCitationSSOT, url=None)
100
- # Print the current citation in CFF format (for debugging) using the as_cff method.
101
- print(citationObject.as_cff())
102
-
103
- # Load package metadata from pyproject.toml.
104
- tomlPackageData: Dict[str, Any] = tomli.loads(pathFilenamePackageSSOT.read_text())["project"]
105
- pypaMetadata: PyPAMetadata = getPypaMetadata(tomlPackageData)
106
-
107
- # Map the PyPA metadata into the citation's internal representation.
108
- citationObject = addPypaMetadata(citation=citationObject, metadata=pypaMetadata)
109
-
110
- # Validate and write out the updated citation file in both locations.
111
- # validate_or_write_output(
112
- # outfile=pathFilenameCitationSSOT,
113
- # outputformat="cff",
114
- # validate_only=False,
115
- # citation=citationObject,
116
- # )
117
- validate_or_write_output(
118
- outfile=pathFilenameCitationDOTcffRepo,
119
- outputformat="cff",
120
- validate_only=False,
121
- citation=citationObject,
122
- )
123
-
124
- if __name__ == "__main__":
125
- logistics()
@@ -1,122 +0,0 @@
1
- from mapFolding import datatypeLargeDEFAULT, datatypeMediumDEFAULT, datatypeSmallDEFAULT
2
- from typing import Dict, Optional, List, Set, Union
3
- import ast
4
-
5
- class RecursiveInlinerWithEnum(ast.NodeTransformer):
6
- """Process AST nodes to inline functions and substitute enum values.
7
- Also handles function decorators during inlining."""
8
-
9
- def __init__(self, dictionaryFunctions: Dict[str, ast.FunctionDef], dictionaryEnumValues: Dict[str, int]) -> None:
10
- self.dictionaryFunctions = dictionaryFunctions
11
- self.dictionaryEnumValues = dictionaryEnumValues
12
- self.processed = set()
13
-
14
- def inlineFunctionBody(self, functionName: str) -> Optional[ast.FunctionDef]:
15
- if functionName in self.processed:
16
- return None
17
-
18
- self.processed.add(functionName)
19
- inlineDefinition = self.dictionaryFunctions[functionName]
20
- # Recursively process the function body
21
- for node in ast.walk(inlineDefinition):
22
- self.visit(node)
23
- return inlineDefinition
24
-
25
- def visit_Attribute(self, node: ast.Attribute) -> ast.AST:
26
- # Substitute enum identifiers (e.g., indexMy.leaf1ndex.value)
27
- if isinstance(node.value, ast.Attribute) and isinstance(node.value.value, ast.Name):
28
- enumPath = f"{node.value.value.id}.{node.value.attr}.{node.attr}"
29
- if enumPath in self.dictionaryEnumValues:
30
- return ast.Constant(value=self.dictionaryEnumValues[enumPath])
31
- return self.generic_visit(node)
32
-
33
- def visit_Call(self, node: ast.Call) -> ast.AST:
34
- callNode = self.generic_visit(node)
35
- if isinstance(callNode, ast.Call) and isinstance(callNode.func, ast.Name) and callNode.func.id in self.dictionaryFunctions:
36
- inlineDefinition = self.inlineFunctionBody(callNode.func.id)
37
- if (inlineDefinition and inlineDefinition.body):
38
- lastStmt = inlineDefinition.body[-1]
39
- if isinstance(lastStmt, ast.Return) and lastStmt.value is not None:
40
- return self.visit(lastStmt.value)
41
- elif isinstance(lastStmt, ast.Expr) and lastStmt.value is not None:
42
- return self.visit(lastStmt.value)
43
- return ast.Constant(value=None)
44
- return callNode
45
-
46
- def visit_Expr(self, node: ast.Expr) -> Union[ast.AST, List[ast.AST]]:
47
- if isinstance(node.value, ast.Call):
48
- if isinstance(node.value.func, ast.Name) and node.value.func.id in self.dictionaryFunctions:
49
- inlineDefinition = self.inlineFunctionBody(node.value.func.id)
50
- if inlineDefinition:
51
- return [self.visit(stmt) for stmt in inlineDefinition.body]
52
- return self.generic_visit(node)
53
-
54
- def findRequiredImports(node: ast.AST) -> Set[str]:
55
- """Find all modules that need to be imported based on AST analysis.
56
- NOTE: due to hardcoding, this is a glorified regex. No, wait, this is less versatile than regex."""
57
- requiredImports = set()
58
-
59
- class ImportFinder(ast.NodeVisitor):
60
- def visit_Name(self, node: ast.Name) -> None:
61
- if node.id in {'numba'}:
62
- requiredImports.add(node.id)
63
- self.generic_visit(node)
64
-
65
- def visitDecorator(self, node: ast.AST) -> None:
66
- if isinstance(node, ast.Call) and isinstance(node.func, ast.Name):
67
- if node.func.id == 'jit':
68
- requiredImports.add('numba')
69
- self.generic_visit(node)
70
-
71
- ImportFinder().visit(node)
72
- return requiredImports
73
-
74
- def generateImports(requiredImports: Set[str]) -> str:
75
- """Generate import statements based on required modules."""
76
- importStatements = {'import numba', 'from mapFolding import indexMy, indexTrack'}
77
-
78
- importMapping = {
79
- 'numba': 'import numba',
80
- }
81
-
82
- for moduleName in sorted(requiredImports):
83
- if moduleName in importMapping:
84
- importStatements.add(importMapping[moduleName])
85
-
86
- return '\n'.join(importStatements)
87
-
88
- def makeInlineFunction(sourceCode: str, targetFunctionName: str, dictionaryEnumValues: Dict[str, int], skipEnum: bool=False, **keywordArguments: Optional[str]):
89
- datatypeLarge = keywordArguments.get('datatypeLarge', datatypeLargeDEFAULT)
90
- datatypeMedium = keywordArguments.get('datatypeMedium', datatypeMediumDEFAULT)
91
- datatypeSmall = keywordArguments.get('datatypeSmall', datatypeSmallDEFAULT)
92
- if skipEnum:
93
- dictionaryEnumValues = {}
94
- dictionaryParsed = ast.parse(sourceCode)
95
- dictionaryFunctions = {
96
- element.name: element
97
- for element in dictionaryParsed.body
98
- if isinstance(element, ast.FunctionDef)
99
- }
100
- nodeTarget = dictionaryFunctions[targetFunctionName]
101
- nodeInliner = RecursiveInlinerWithEnum(dictionaryFunctions, dictionaryEnumValues)
102
- nodeInlined = nodeInliner.visit(nodeTarget)
103
- ast.fix_missing_locations(nodeInlined)
104
- callableInlinedDecorators = [decorator for decorator in nodeInlined.decorator_list]
105
-
106
- requiredImports = findRequiredImports(nodeInlined)
107
- importStatements = generateImports(requiredImports)
108
- importsRequired = importStatements
109
- dictionaryDecoratorsNumba={
110
- 'countInitialize':
111
- f'@numba.jit((numba.{datatypeSmall}[:,:,::1], numba.{datatypeMedium}[::1], numba.{datatypeSmall}[::1], numba.{datatypeMedium}[:,::1]), parallel=False, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)\n',
112
- 'countParallel':
113
- f'@numba.jit((numba.{datatypeSmall}[:,:,::1], numba.{datatypeLarge}[::1], numba.{datatypeMedium}[::1], numba.{datatypeSmall}[::1], numba.{datatypeMedium}[:,::1]), parallel=True, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)\n',
114
- 'countSequential':
115
- f'@numba.jit((numba.{datatypeSmall}[:,:,::1], numba.{datatypeLarge}[::1], numba.{datatypeMedium}[::1], numba.{datatypeSmall}[::1], numba.{datatypeMedium}[:,::1]), parallel=False, boundscheck=False, cache=True, error_model="numpy", fastmath=True, looplift=False, nogil=True, nopython=True)\n',
116
- }
117
-
118
- lineNumbaDecorator = dictionaryDecoratorsNumba[targetFunctionName]
119
-
120
- # inlinedCode = ast.unparse(ast.Module(body=[nodeInlined], type_ignores=[]))
121
- callableInlined = lineNumbaDecorator + ast.unparse(nodeInlined)
122
- return (callableInlined, callableInlinedDecorators, importsRequired)
File without changes
File without changes