nbdev 2.3.25__py3-none-any.whl → 2.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nbdev/__init__.py +1 -1
- nbdev/_modidx.py +22 -32
- nbdev/clean.py +18 -16
- nbdev/cli.py +57 -22
- nbdev/config.py +65 -37
- nbdev/diff.py +92 -0
- nbdev/doclinks.py +109 -50
- nbdev/export.py +33 -18
- nbdev/frontmatter.py +5 -3
- nbdev/maker.py +35 -33
- nbdev/merge.py +11 -9
- nbdev/migrate.py +20 -18
- nbdev/process.py +17 -15
- nbdev/processors.py +43 -30
- nbdev/qmd.py +9 -7
- nbdev/quarto.py +68 -29
- nbdev/release.py +46 -36
- nbdev/serve.py +8 -5
- nbdev/showdoc.py +45 -162
- nbdev/sync.py +17 -11
- nbdev/test.py +6 -4
- {nbdev-2.3.25.dist-info → nbdev-2.4.8.dist-info}/METADATA +40 -19
- nbdev-2.4.8.dist-info/RECORD +30 -0
- {nbdev-2.3.25.dist-info → nbdev-2.4.8.dist-info}/WHEEL +1 -1
- {nbdev-2.3.25.dist-info → nbdev-2.4.8.dist-info}/entry_points.txt +3 -0
- nbdev-2.3.25.dist-info/RECORD +0 -29
- {nbdev-2.3.25.dist-info → nbdev-2.4.8.dist-info/licenses}/LICENSE +0 -0
- {nbdev-2.3.25.dist-info → nbdev-2.4.8.dist-info}/top_level.txt +0 -0
nbdev/release.py
CHANGED
|
@@ -1,29 +1,32 @@
|
|
|
1
|
+
"""Auto-generated tagged releases and release notes from GitHub issues"""
|
|
2
|
+
|
|
1
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/api/18_release.ipynb.
|
|
2
4
|
|
|
3
5
|
# %% auto 0
|
|
4
|
-
__all__ = ['GH_HOST', 'Release', 'changelog', 'release_git', 'release_gh', 'pypi_json', 'latest_pypi',
|
|
5
|
-
'conda_output_path', 'write_conda_meta', 'write_requirements', 'anaconda_upload',
|
|
6
|
-
'chk_conda_rel', 'release_pypi', 'release_both', 'bump_version', 'nbdev_bump_version']
|
|
6
|
+
__all__ = ['GH_HOST', 'CONDA_WARNING', 'Release', 'changelog', 'release_git', 'release_gh', 'pypi_json', 'latest_pypi',
|
|
7
|
+
'pypi_details', 'conda_output_path', 'write_conda_meta', 'write_requirements', 'anaconda_upload',
|
|
8
|
+
'release_conda', 'chk_conda_rel', 'release_pypi', 'release_both', 'bump_version', 'nbdev_bump_version']
|
|
7
9
|
|
|
8
|
-
# %% ../nbs/api/18_release.ipynb
|
|
10
|
+
# %% ../nbs/api/18_release.ipynb
|
|
9
11
|
from fastcore.all import *
|
|
10
12
|
from ghapi.core import *
|
|
11
13
|
|
|
12
14
|
from datetime import datetime
|
|
15
|
+
from packaging.version import Version
|
|
13
16
|
import shutil,subprocess
|
|
14
17
|
|
|
15
18
|
from .doclinks import *
|
|
16
19
|
|
|
17
|
-
# %% ../nbs/api/18_release.ipynb
|
|
20
|
+
# %% ../nbs/api/18_release.ipynb
|
|
18
21
|
GH_HOST = "https://api.github.com"
|
|
19
22
|
|
|
20
|
-
# %% ../nbs/api/18_release.ipynb
|
|
23
|
+
# %% ../nbs/api/18_release.ipynb
|
|
21
24
|
def _find_config(cfg_name="settings.ini"):
|
|
22
25
|
cfg_path = Path().absolute()
|
|
23
26
|
while cfg_path != cfg_path.parent and not (cfg_path/cfg_name).exists(): cfg_path = cfg_path.parent
|
|
24
27
|
return Config(cfg_path, cfg_name)
|
|
25
28
|
|
|
26
|
-
# %% ../nbs/api/18_release.ipynb
|
|
29
|
+
# %% ../nbs/api/18_release.ipynb
|
|
27
30
|
def _issue_txt(issue):
|
|
28
31
|
res = '- {} ([#{}]({}))'.format(issue.title.strip(), issue.number, issue.html_url)
|
|
29
32
|
if hasattr(issue, 'pull_request'): res += ', thanks to [@{}]({})'.format(issue.user.login, issue.user.html_url)
|
|
@@ -40,7 +43,7 @@ def _load_json(cfg, k):
|
|
|
40
43
|
try: return json.loads(cfg[k])
|
|
41
44
|
except json.JSONDecodeError as e: raise Exception(f"Key: `{k}` in .ini file is not a valid JSON string: {e}")
|
|
42
45
|
|
|
43
|
-
# %% ../nbs/api/18_release.ipynb
|
|
46
|
+
# %% ../nbs/api/18_release.ipynb
|
|
44
47
|
class Release:
|
|
45
48
|
def __init__(self, owner=None, repo=None, token=None, **groups):
|
|
46
49
|
"Create CHANGELOG.md from GitHub issues"
|
|
@@ -60,17 +63,20 @@ class Release:
|
|
|
60
63
|
|
|
61
64
|
def _issues(self, label):
|
|
62
65
|
return self.gh.issues.list_for_repo(state='closed', sort='created', filter='all', since=self.commit_date, labels=label)
|
|
63
|
-
def _issue_groups(self): return parallel(self._issues, self.groups.keys(), progress=False)
|
|
66
|
+
def _issue_groups(self): return parallel(self._issues, self.groups.keys(), progress=False, threadpool=True)
|
|
64
67
|
|
|
65
|
-
# %% ../nbs/api/18_release.ipynb
|
|
68
|
+
# %% ../nbs/api/18_release.ipynb
|
|
66
69
|
@patch
|
|
67
70
|
def changelog(self:Release,
|
|
68
71
|
debug=False): # Just print the latest changes, instead of updating file
|
|
69
72
|
"Create the CHANGELOG.md file, or return the proposed text if `debug` is `True`"
|
|
70
73
|
if not self.changefile.exists(): self.changefile.write_text("# Release notes\n\n<!-- do not remove -->\n")
|
|
71
74
|
marker = '<!-- do not remove -->\n'
|
|
72
|
-
try: self.commit_date = self.gh.repos.get_latest_release().published_at
|
|
73
|
-
except HTTP404NotFoundError: self.commit_date = '2000-01-01T00:00:004Z'
|
|
75
|
+
try: self.commit_date = (lr:=self.gh.repos.get_latest_release()).published_at
|
|
76
|
+
except HTTP404NotFoundError: lr,self.commit_date = None,'2000-01-01T00:00:004Z'
|
|
77
|
+
if lr and (Version(self.cfg.version) <= Version(lr.tag_name)):
|
|
78
|
+
print(f'Error: Version bump required: expected: >{lr.tag_name}, got: {self.cfg.version}.')
|
|
79
|
+
raise SystemExit(1)
|
|
74
80
|
res = f"\n## {self.cfg.version}\n"
|
|
75
81
|
issues = self._issue_groups()
|
|
76
82
|
res += '\n'.join(_issues_txt(*o) for o in zip(issues, self.groups.values()))
|
|
@@ -80,7 +86,7 @@ def changelog(self:Release,
|
|
|
80
86
|
self.changefile.write_text(res)
|
|
81
87
|
run(f'git add {self.changefile}')
|
|
82
88
|
|
|
83
|
-
# %% ../nbs/api/18_release.ipynb
|
|
89
|
+
# %% ../nbs/api/18_release.ipynb
|
|
84
90
|
@patch
|
|
85
91
|
def release(self:Release):
|
|
86
92
|
"Tag and create a release in GitHub for the current version"
|
|
@@ -89,7 +95,7 @@ def release(self:Release):
|
|
|
89
95
|
self.gh.create_release(ver, branch=self.cfg.branch, body=notes)
|
|
90
96
|
return ver
|
|
91
97
|
|
|
92
|
-
# %% ../nbs/api/18_release.ipynb
|
|
98
|
+
# %% ../nbs/api/18_release.ipynb
|
|
93
99
|
@patch
|
|
94
100
|
def latest_notes(self:Release):
|
|
95
101
|
"Latest CHANGELOG entry"
|
|
@@ -98,7 +104,7 @@ def latest_notes(self:Release):
|
|
|
98
104
|
if not len(its)>0: return ''
|
|
99
105
|
return '\n'.join(its[1].splitlines()[1:]).strip()
|
|
100
106
|
|
|
101
|
-
# %% ../nbs/api/18_release.ipynb
|
|
107
|
+
# %% ../nbs/api/18_release.ipynb
|
|
102
108
|
@call_parse
|
|
103
109
|
def changelog(
|
|
104
110
|
debug:store_true=False, # Print info to be added to CHANGELOG, instead of updating file
|
|
@@ -108,7 +114,7 @@ def changelog(
|
|
|
108
114
|
res = Release(repo=repo).changelog(debug=debug)
|
|
109
115
|
if debug: print(res)
|
|
110
116
|
|
|
111
|
-
# %% ../nbs/api/18_release.ipynb
|
|
117
|
+
# %% ../nbs/api/18_release.ipynb
|
|
112
118
|
@call_parse
|
|
113
119
|
def release_git(
|
|
114
120
|
token:str=None # Optional GitHub token (otherwise `token` file is used)
|
|
@@ -117,7 +123,7 @@ def release_git(
|
|
|
117
123
|
ver = Release(token=token).release()
|
|
118
124
|
print(f"Released {ver}")
|
|
119
125
|
|
|
120
|
-
# %% ../nbs/api/18_release.ipynb
|
|
126
|
+
# %% ../nbs/api/18_release.ipynb
|
|
121
127
|
@call_parse
|
|
122
128
|
def release_gh(
|
|
123
129
|
token:str=None # Optional GitHub token (otherwise `token` file is used)
|
|
@@ -132,7 +138,7 @@ def release_gh(
|
|
|
132
138
|
ver = Release(token=token).release()
|
|
133
139
|
print(f"Released {ver}")
|
|
134
140
|
|
|
135
|
-
# %% ../nbs/api/18_release.ipynb
|
|
141
|
+
# %% ../nbs/api/18_release.ipynb
|
|
136
142
|
from fastcore.all import *
|
|
137
143
|
from .config import *
|
|
138
144
|
from .cli import *
|
|
@@ -144,18 +150,18 @@ except ImportError: from pip._vendor.packaging.version import parse
|
|
|
144
150
|
|
|
145
151
|
_PYPI_URL = 'https://pypi.org/pypi/'
|
|
146
152
|
|
|
147
|
-
# %% ../nbs/api/18_release.ipynb
|
|
153
|
+
# %% ../nbs/api/18_release.ipynb
|
|
148
154
|
def pypi_json(s):
|
|
149
155
|
"Dictionary decoded JSON for PYPI path `s`"
|
|
150
156
|
return urljson(f'{_PYPI_URL}{s}/json')
|
|
151
157
|
|
|
152
|
-
# %% ../nbs/api/18_release.ipynb
|
|
158
|
+
# %% ../nbs/api/18_release.ipynb
|
|
153
159
|
def latest_pypi(name):
|
|
154
160
|
"Latest version of `name` on pypi"
|
|
155
161
|
return max(parse(r) for r,o in pypi_json(name)['releases'].items()
|
|
156
162
|
if not parse(r).is_prerelease and not nested_idx(o, 0, 'yanked'))
|
|
157
163
|
|
|
158
|
-
# %% ../nbs/api/18_release.ipynb
|
|
164
|
+
# %% ../nbs/api/18_release.ipynb
|
|
159
165
|
def pypi_details(name):
|
|
160
166
|
"Version, URL, and SHA256 for `name` from pypi"
|
|
161
167
|
ver = str(latest_pypi(name))
|
|
@@ -163,7 +169,7 @@ def pypi_details(name):
|
|
|
163
169
|
rel = [o for o in pypi['urls'] if o['packagetype']=='sdist'][0]
|
|
164
170
|
return ver,rel['url'],rel['digests']['sha256']
|
|
165
171
|
|
|
166
|
-
# %% ../nbs/api/18_release.ipynb
|
|
172
|
+
# %% ../nbs/api/18_release.ipynb
|
|
167
173
|
import shlex
|
|
168
174
|
from subprocess import Popen, PIPE, CalledProcessError
|
|
169
175
|
|
|
@@ -177,12 +183,12 @@ def _run(cmd):
|
|
|
177
183
|
if p.returncode != 0: raise CalledProcessError(p.returncode, p.args)
|
|
178
184
|
return res
|
|
179
185
|
|
|
180
|
-
# %% ../nbs/api/18_release.ipynb
|
|
186
|
+
# %% ../nbs/api/18_release.ipynb
|
|
181
187
|
def conda_output_path(name, build='build'):
|
|
182
188
|
"Output path for conda build"
|
|
183
189
|
return run(f'conda {build} --output {name}').strip().replace('\\', '/')
|
|
184
190
|
|
|
185
|
-
# %% ../nbs/api/18_release.ipynb
|
|
191
|
+
# %% ../nbs/api/18_release.ipynb
|
|
186
192
|
def _write_yaml(path, name, d1, d2):
|
|
187
193
|
path = Path(path)
|
|
188
194
|
p = path/name
|
|
@@ -192,7 +198,7 @@ def _write_yaml(path, name, d1, d2):
|
|
|
192
198
|
yaml.safe_dump(d1, f)
|
|
193
199
|
yaml.safe_dump(d2, f)
|
|
194
200
|
|
|
195
|
-
# %% ../nbs/api/18_release.ipynb
|
|
201
|
+
# %% ../nbs/api/18_release.ipynb
|
|
196
202
|
def _get_conda_meta():
|
|
197
203
|
cfg = get_config()
|
|
198
204
|
name,ver = cfg.lib_name,cfg.version
|
|
@@ -234,12 +240,12 @@ def _get_conda_meta():
|
|
|
234
240
|
}
|
|
235
241
|
return name,d1,d2
|
|
236
242
|
|
|
237
|
-
# %% ../nbs/api/18_release.ipynb
|
|
243
|
+
# %% ../nbs/api/18_release.ipynb
|
|
238
244
|
def write_conda_meta(path='conda'):
|
|
239
245
|
"Writes a `meta.yaml` file to the `conda` directory of the current directory"
|
|
240
246
|
_write_yaml(path, *_get_conda_meta())
|
|
241
247
|
|
|
242
|
-
# %% ../nbs/api/18_release.ipynb
|
|
248
|
+
# %% ../nbs/api/18_release.ipynb
|
|
243
249
|
@call_parse
|
|
244
250
|
def write_requirements(path:str=''):
|
|
245
251
|
"Writes a `requirements.txt` file to `directory` based on settings.ini."
|
|
@@ -248,9 +254,12 @@ def write_requirements(path:str=''):
|
|
|
248
254
|
req = '\n'.join([cfg.get(k, '').replace(' ', '\n') for k in ['requirements', 'pip_requirements']])
|
|
249
255
|
(d/'requirements.txt').mk_write(req)
|
|
250
256
|
|
|
251
|
-
# %% ../nbs/api/18_release.ipynb
|
|
257
|
+
# %% ../nbs/api/18_release.ipynb
|
|
258
|
+
CONDA_WARNING='Conda support for nbdev is deprecated and scheduled for removal in a future version.'
|
|
259
|
+
|
|
252
260
|
def anaconda_upload(name, loc=None, user=None, token=None, env_token=None):
|
|
253
261
|
"Upload `name` to anaconda"
|
|
262
|
+
warn(CONDA_WARNING)
|
|
254
263
|
user = f'-u {user} ' if user else ''
|
|
255
264
|
if env_token: token = os.getenv(env_token)
|
|
256
265
|
token = f'-t {token} ' if token else ''
|
|
@@ -258,7 +267,7 @@ def anaconda_upload(name, loc=None, user=None, token=None, env_token=None):
|
|
|
258
267
|
if not loc: raise Exception("Failed to find output")
|
|
259
268
|
return _run(f'anaconda {token} upload {user} {loc} --skip-existing')
|
|
260
269
|
|
|
261
|
-
# %% ../nbs/api/18_release.ipynb
|
|
270
|
+
# %% ../nbs/api/18_release.ipynb
|
|
262
271
|
@call_parse
|
|
263
272
|
def release_conda(
|
|
264
273
|
path:str='conda', # Path where package will be created
|
|
@@ -269,6 +278,7 @@ def release_conda(
|
|
|
269
278
|
upload_user:str=None # Optional user to upload package to
|
|
270
279
|
):
|
|
271
280
|
"Create a `meta.yaml` file ready to be built into a package, and optionally build and upload it"
|
|
281
|
+
warn(CONDA_WARNING)
|
|
272
282
|
name = get_config().lib_name
|
|
273
283
|
write_conda_meta(path)
|
|
274
284
|
out = f"Done. Next steps:\n```\ncd {path}\n"""
|
|
@@ -286,10 +296,10 @@ def release_conda(
|
|
|
286
296
|
if skip_upload: return print(loc)
|
|
287
297
|
if not upload_user: upload_user = get_config().conda_user
|
|
288
298
|
if not upload_user: return print("`conda_user` not in settings.ini and no `upload_user` passed. Cannot upload")
|
|
289
|
-
if 'anaconda upload' not in res: return print(f"{res}\n\
|
|
299
|
+
if 'anaconda upload' not in res: return print(f"{res}\n\nFailed. Check auto-upload not set in .condarc. Try `--do_build False`.")
|
|
290
300
|
return anaconda_upload(name, loc)
|
|
291
301
|
|
|
292
|
-
# %% ../nbs/api/18_release.ipynb
|
|
302
|
+
# %% ../nbs/api/18_release.ipynb
|
|
293
303
|
def chk_conda_rel(
|
|
294
304
|
nm:str, # Package name on pypi
|
|
295
305
|
apkg:str=None, # Anaconda Package (defaults to {nm})
|
|
@@ -303,17 +313,17 @@ def chk_conda_rel(
|
|
|
303
313
|
pypitag = latest_pypi(nm)
|
|
304
314
|
if force or not condatag or pypitag > max(condatag): return f'{pypitag}'
|
|
305
315
|
|
|
306
|
-
# %% ../nbs/api/18_release.ipynb
|
|
316
|
+
# %% ../nbs/api/18_release.ipynb
|
|
307
317
|
@call_parse
|
|
308
318
|
def release_pypi(
|
|
309
319
|
repository:str="pypi" # Respository to upload to (defined in ~/.pypirc)
|
|
310
320
|
):
|
|
311
321
|
"Create and upload Python package to PyPI"
|
|
312
322
|
_dir = get_config().lib_path.parent
|
|
313
|
-
system(f'cd {_dir} && rm -rf dist build && python
|
|
323
|
+
system(f'cd {_dir} && rm -rf dist build && python -m build')
|
|
314
324
|
system(f'twine upload --repository {repository} {_dir}/dist/*')
|
|
315
325
|
|
|
316
|
-
# %% ../nbs/api/18_release.ipynb
|
|
326
|
+
# %% ../nbs/api/18_release.ipynb
|
|
317
327
|
@call_parse
|
|
318
328
|
def release_both(
|
|
319
329
|
path:str='conda', # Path where package will be created
|
|
@@ -329,7 +339,7 @@ def release_both(
|
|
|
329
339
|
release_conda.__wrapped__(path, do_build=do_build, build_args=build_args, skip_upload=skip_upload, mambabuild=mambabuild, upload_user=upload_user)
|
|
330
340
|
nbdev_bump_version.__wrapped__()
|
|
331
341
|
|
|
332
|
-
# %% ../nbs/api/18_release.ipynb
|
|
342
|
+
# %% ../nbs/api/18_release.ipynb
|
|
333
343
|
def bump_version(version, part=2, unbump=False):
|
|
334
344
|
version = version.split('.')
|
|
335
345
|
incr = -1 if unbump else 1
|
|
@@ -337,7 +347,7 @@ def bump_version(version, part=2, unbump=False):
|
|
|
337
347
|
for i in range(part+1, 3): version[i] = '0'
|
|
338
348
|
return '.'.join(version)
|
|
339
349
|
|
|
340
|
-
# %% ../nbs/api/18_release.ipynb
|
|
350
|
+
# %% ../nbs/api/18_release.ipynb
|
|
341
351
|
@call_parse
|
|
342
352
|
def nbdev_bump_version(
|
|
343
353
|
part:int=2, # Part of version to bump
|
nbdev/serve.py
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
"""A parallel ipynb processor (experimental)"""
|
|
2
|
+
|
|
1
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/api/17_serve.ipynb.
|
|
2
4
|
|
|
3
5
|
# %% auto 0
|
|
4
6
|
__all__ = ['proc_nbs']
|
|
5
7
|
|
|
6
|
-
# %% ../nbs/api/17_serve.ipynb
|
|
8
|
+
# %% ../nbs/api/17_serve.ipynb
|
|
7
9
|
import ast,subprocess,threading,sys
|
|
8
10
|
from shutil import rmtree,copy2
|
|
9
11
|
|
|
@@ -17,7 +19,7 @@ from .doclinks import nbglob_cli,nbglob
|
|
|
17
19
|
from .processors import FilterDefaults
|
|
18
20
|
import nbdev.serve_drv
|
|
19
21
|
|
|
20
|
-
# %% ../nbs/api/17_serve.ipynb
|
|
22
|
+
# %% ../nbs/api/17_serve.ipynb
|
|
21
23
|
def _is_qpy(path:Path):
|
|
22
24
|
"Is `path` a py script starting with frontmatter?"
|
|
23
25
|
path = Path(path)
|
|
@@ -32,9 +34,9 @@ def _is_qpy(path:Path):
|
|
|
32
34
|
vl = v.splitlines()
|
|
33
35
|
if vl[0]=='---' and vl[-1]=='---': return '\n'.join(vl[1:-1])
|
|
34
36
|
|
|
35
|
-
# %% ../nbs/api/17_serve.ipynb
|
|
37
|
+
# %% ../nbs/api/17_serve.ipynb
|
|
36
38
|
def _proc_file(s, cache, path, mtime=None):
|
|
37
|
-
skips = ('_proc', '_docs', '_site')
|
|
39
|
+
skips = ('_proc', '_docs', '_site', 'settings.ini')
|
|
38
40
|
if not s.is_file() or any(o[0]=='.' or o in skips for o in s.parts): return
|
|
39
41
|
d = cache/s.relative_to(path)
|
|
40
42
|
if s.suffix=='.py': d = d.with_suffix('')
|
|
@@ -49,7 +51,7 @@ def _proc_file(s, cache, path, mtime=None):
|
|
|
49
51
|
if md is not None: return s,d,md.strip()
|
|
50
52
|
else: copy2(s,d)
|
|
51
53
|
|
|
52
|
-
# %% ../nbs/api/17_serve.ipynb
|
|
54
|
+
# %% ../nbs/api/17_serve.ipynb
|
|
53
55
|
@delegates(nbglob_cli)
|
|
54
56
|
def proc_nbs(
|
|
55
57
|
path:str='', # Path to notebooks
|
|
@@ -64,6 +66,7 @@ def proc_nbs(
|
|
|
64
66
|
path = Path(path or cfg.nbs_path)
|
|
65
67
|
files = nbglob(path, func=Path, file_glob='', file_re='', **kwargs)
|
|
66
68
|
if (path/'_quarto.yml').exists(): files.append(path/'_quarto.yml')
|
|
69
|
+
if (path/'_brand.yml').exists(): files.append(path/'_brand.yml')
|
|
67
70
|
if (path/'_extensions').exists(): files.extend(nbglob(path/'_extensions', func=Path, file_glob='', file_re='', skip_file_re='^[.]'))
|
|
68
71
|
|
|
69
72
|
# If settings.ini or filter script newer than cache folder modified, delete cache
|
nbdev/showdoc.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Display symbol documentation in notebook and website"""
|
|
2
|
+
|
|
1
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/api/08_showdoc.ipynb.
|
|
2
4
|
|
|
3
5
|
# %% ../nbs/api/08_showdoc.ipynb 2
|
|
@@ -5,7 +7,6 @@ from __future__ import annotations
|
|
|
5
7
|
from .doclinks import *
|
|
6
8
|
from .config import get_config
|
|
7
9
|
|
|
8
|
-
from fastcore.dispatch import TypeDispatch
|
|
9
10
|
from fastcore.docments import *
|
|
10
11
|
from fastcore.utils import *
|
|
11
12
|
|
|
@@ -16,152 +17,12 @@ from textwrap import fill
|
|
|
16
17
|
from types import FunctionType
|
|
17
18
|
|
|
18
19
|
# %% auto 0
|
|
19
|
-
__all__ = ['
|
|
20
|
-
'colab_link']
|
|
21
|
-
|
|
22
|
-
# %% ../nbs/api/08_showdoc.ipynb 6
|
|
23
|
-
def _non_empty_keys(d:dict): return L([k for k,v in d.items() if v != inspect._empty])
|
|
24
|
-
def _bold(s): return f'**{s}**' if s.strip() else s
|
|
25
|
-
|
|
26
|
-
# %% ../nbs/api/08_showdoc.ipynb 7
|
|
27
|
-
def _escape_markdown(s):
|
|
28
|
-
for c in '|^': s = re.sub(rf'\\?\{c}', f'\{c}', s)
|
|
29
|
-
return s.replace('\n', '<br>')
|
|
30
|
-
|
|
31
|
-
# %% ../nbs/api/08_showdoc.ipynb 9
|
|
32
|
-
def _maybe_nm(o):
|
|
33
|
-
if (o == inspect._empty): return ''
|
|
34
|
-
else: return o.__name__ if hasattr(o, '__name__') else _escape_markdown(str(o))
|
|
35
|
-
|
|
36
|
-
# %% ../nbs/api/08_showdoc.ipynb 11
|
|
37
|
-
def _list2row(l:list): return '| '+' | '.join([_maybe_nm(o) for o in l]) + ' |'
|
|
38
|
-
|
|
39
|
-
# %% ../nbs/api/08_showdoc.ipynb 13
|
|
40
|
-
class DocmentTbl:
|
|
41
|
-
# this is the column order we want these items to appear
|
|
42
|
-
_map = OrderedDict({'anno':'Type', 'default':'Default', 'docment':'Details'})
|
|
43
|
-
|
|
44
|
-
def __init__(self, obj, verbose=True, returns=True):
|
|
45
|
-
"Compute the docment table string"
|
|
46
|
-
self.verbose = verbose
|
|
47
|
-
self.returns = False if isdataclass(obj) else returns
|
|
48
|
-
try: self.params = L(signature_ex(obj, eval_str=True).parameters.keys())
|
|
49
|
-
except (ValueError,TypeError): self.params=[]
|
|
50
|
-
try: _dm = docments(obj, full=True, returns=returns)
|
|
51
|
-
except: _dm = {}
|
|
52
|
-
if 'self' in _dm: del _dm['self']
|
|
53
|
-
for d in _dm.values(): d['docment'] = ifnone(d['docment'], inspect._empty)
|
|
54
|
-
self.dm = _dm
|
|
55
|
-
|
|
56
|
-
@property
|
|
57
|
-
def _columns(self):
|
|
58
|
-
"Compute the set of fields that have at least one non-empty value so we don't show tables empty columns"
|
|
59
|
-
cols = set(flatten(L(self.dm.values()).filter().map(_non_empty_keys)))
|
|
60
|
-
candidates = self._map if self.verbose else {'docment': 'Details'}
|
|
61
|
-
return OrderedDict({k:v for k,v in candidates.items() if k in cols})
|
|
62
|
-
|
|
63
|
-
@property
|
|
64
|
-
def has_docment(self): return 'docment' in self._columns and self._row_list
|
|
65
|
-
|
|
66
|
-
@property
|
|
67
|
-
def has_return(self): return self.returns and bool(_non_empty_keys(self.dm.get('return', {})))
|
|
68
|
-
|
|
69
|
-
def _row(self, nm, props):
|
|
70
|
-
"unpack data for single row to correspond with column names."
|
|
71
|
-
return [nm] + [props[c] for c in self._columns]
|
|
72
|
-
|
|
73
|
-
@property
|
|
74
|
-
def _row_list(self):
|
|
75
|
-
"unpack data for all rows."
|
|
76
|
-
ordered_params = [(p, self.dm[p]) for p in self.params if p != 'self' and p in self.dm]
|
|
77
|
-
return L([self._row(nm, props) for nm,props in ordered_params])
|
|
78
|
-
|
|
79
|
-
@property
|
|
80
|
-
def _hdr_list(self): return [' '] + [_bold(l) for l in L(self._columns.values())]
|
|
81
|
-
|
|
82
|
-
@property
|
|
83
|
-
def hdr_str(self):
|
|
84
|
-
"The markdown string for the header portion of the table"
|
|
85
|
-
md = _list2row(self._hdr_list)
|
|
86
|
-
return md + '\n' + _list2row(['-' * len(l) for l in self._hdr_list])
|
|
87
|
-
|
|
88
|
-
@property
|
|
89
|
-
def params_str(self):
|
|
90
|
-
"The markdown string for the parameters portion of the table."
|
|
91
|
-
return '\n'.join(self._row_list.map(_list2row))
|
|
92
|
-
|
|
93
|
-
@property
|
|
94
|
-
def return_str(self):
|
|
95
|
-
"The markdown string for the returns portion of the table."
|
|
96
|
-
return _list2row(['**Returns**']+[_bold(_maybe_nm(self.dm['return'][c])) for c in self._columns])
|
|
20
|
+
__all__ = ['BasicMarkdownRenderer', 'show_doc', 'BasicHtmlRenderer', 'doc', 'showdoc_nm', 'colab_link']
|
|
97
21
|
|
|
98
|
-
|
|
99
|
-
if not self.has_docment: return ''
|
|
100
|
-
_tbl = [self.hdr_str, self.params_str]
|
|
101
|
-
if self.has_return: _tbl.append(self.return_str)
|
|
102
|
-
return '\n'.join(_tbl)
|
|
103
|
-
|
|
104
|
-
def __eq__(self,other): return self.__str__() == str(other).strip()
|
|
105
|
-
|
|
106
|
-
__str__ = _repr_markdown_
|
|
107
|
-
__repr__ = basic_repr()
|
|
108
|
-
|
|
109
|
-
# %% ../nbs/api/08_showdoc.ipynb 28
|
|
110
|
-
def _docstring(sym):
|
|
111
|
-
npdoc = parse_docstring(sym)
|
|
112
|
-
return '\n\n'.join([npdoc['Summary'], npdoc['Extended']]).strip()
|
|
113
|
-
|
|
114
|
-
# %% ../nbs/api/08_showdoc.ipynb 29
|
|
115
|
-
def _fullname(o):
|
|
116
|
-
module,name = getattr(o, "__module__", None),qual_name(o)
|
|
117
|
-
return name if module is None or module in ('__main__','builtins') else module + '.' + name
|
|
118
|
-
|
|
119
|
-
class ShowDocRenderer:
|
|
120
|
-
def __init__(self, sym, name:str|None=None, title_level:int=3):
|
|
121
|
-
"Show documentation for `sym`"
|
|
122
|
-
sym = getattr(sym, '__wrapped__', sym)
|
|
123
|
-
sym = getattr(sym, 'fget', None) or getattr(sym, 'fset', None) or sym
|
|
124
|
-
store_attr()
|
|
125
|
-
self.nm = name or qual_name(sym)
|
|
126
|
-
self.isfunc = inspect.isfunction(sym)
|
|
127
|
-
try: self.sig = signature_ex(sym, eval_str=True)
|
|
128
|
-
except (ValueError,TypeError): self.sig = None
|
|
129
|
-
self.docs = _docstring(sym)
|
|
130
|
-
self.dm = DocmentTbl(sym)
|
|
131
|
-
self.fn = _fullname(sym)
|
|
132
|
-
|
|
133
|
-
__repr__ = basic_repr()
|
|
134
|
-
|
|
135
|
-
# %% ../nbs/api/08_showdoc.ipynb 30
|
|
136
|
-
def _f_name(o): return f'<function {o.__name__}>' if isinstance(o, FunctionType) else None
|
|
137
|
-
def _fmt_anno(o): return inspect.formatannotation(o).strip("'").replace(' ','')
|
|
138
|
-
|
|
139
|
-
def _show_param(param):
|
|
140
|
-
"Like `Parameter.__str__` except removes: quotes in annos, spaces, ids in reprs"
|
|
141
|
-
kind,res,anno,default = param.kind,param._name,param._annotation,param._default
|
|
142
|
-
kind = '*' if kind==inspect._VAR_POSITIONAL else '**' if kind==inspect._VAR_KEYWORD else ''
|
|
143
|
-
res = kind+res
|
|
144
|
-
if anno is not inspect._empty: res += f':{_f_name(anno) or _fmt_anno(anno)}'
|
|
145
|
-
if default is not inspect._empty: res += f'={_f_name(default) or repr(default)}'
|
|
146
|
-
return res
|
|
147
|
-
|
|
148
|
-
# %% ../nbs/api/08_showdoc.ipynb 32
|
|
149
|
-
def _fmt_sig(sig):
|
|
150
|
-
if sig is None: return ''
|
|
151
|
-
p = {k:v for k,v in sig.parameters.items()}
|
|
152
|
-
_params = [_show_param(p[k]) for k in p.keys() if k != 'self']
|
|
153
|
-
return "(" + ', '.join(_params) + ")"
|
|
154
|
-
|
|
155
|
-
def _wrap_sig(s):
|
|
156
|
-
"wrap a signature to appear on multiple lines if necessary."
|
|
157
|
-
pad = '> ' + ' ' * 5
|
|
158
|
-
indent = pad + ' ' * (s.find('(') + 1)
|
|
159
|
-
return fill(s, width=80, initial_indent=pad, subsequent_indent=indent)
|
|
160
|
-
|
|
161
|
-
# %% ../nbs/api/08_showdoc.ipynb 34
|
|
22
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
162
23
|
def _ext_link(url, txt, xtra=""): return f'[{txt}]({url}){{target="_blank" {xtra}}}'
|
|
163
24
|
|
|
164
|
-
class BasicMarkdownRenderer(
|
|
25
|
+
class BasicMarkdownRenderer(MarkdownRenderer):
|
|
165
26
|
"Markdown renderer for `show_doc`"
|
|
166
27
|
def _repr_markdown_(self):
|
|
167
28
|
doc = '---\n\n'
|
|
@@ -169,14 +30,9 @@ class BasicMarkdownRenderer(ShowDocRenderer):
|
|
|
169
30
|
if src: doc += _ext_link(src, 'source', 'style="float:right; font-size:smaller"') + '\n\n'
|
|
170
31
|
h = '#'*self.title_level
|
|
171
32
|
doc += f'{h} {self.nm}\n\n'
|
|
172
|
-
|
|
173
|
-
doc += f'{sig}'
|
|
174
|
-
if self.docs: doc += f"\n\n*{self.docs}*"
|
|
175
|
-
if self.dm.has_docment: doc += f"\n\n{self.dm}"
|
|
176
|
-
return doc
|
|
177
|
-
__repr__=__str__=_repr_markdown_
|
|
33
|
+
return doc+super()._repr_markdown_()
|
|
178
34
|
|
|
179
|
-
# %% ../nbs/api/08_showdoc.ipynb
|
|
35
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
180
36
|
def show_doc(sym, # Symbol to document
|
|
181
37
|
renderer=None, # Optional renderer (defaults to markdown)
|
|
182
38
|
name:str|None=None, # Optionally override displayed name of `sym`
|
|
@@ -187,44 +43,71 @@ def show_doc(sym, # Symbol to document
|
|
|
187
43
|
elif isinstance(renderer,str):
|
|
188
44
|
p,m = renderer.rsplit('.', 1)
|
|
189
45
|
renderer = getattr(import_module(p), m)
|
|
190
|
-
if
|
|
46
|
+
if isinstance_str(sym, "Function"): pass
|
|
47
|
+
elif isinstance_str(sym, "TypeDispatch"): pass # use _str as TypeDispatch will be removed from fastcore
|
|
191
48
|
else:return renderer(sym or show_doc, name=name, title_level=title_level)
|
|
192
49
|
|
|
193
|
-
# %% ../nbs/api/08_showdoc.ipynb
|
|
50
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
51
|
+
def _create_html_table(table_str):
|
|
52
|
+
def split_row(row):
|
|
53
|
+
return re.findall(r'\|(?:(?:\\.|[^|\\])*)', row)
|
|
54
|
+
|
|
55
|
+
def unescape_cell(cell):
|
|
56
|
+
return cell.strip(' *|').replace(r'\|', '|')
|
|
57
|
+
|
|
58
|
+
lines = table_str.strip().split('\n')
|
|
59
|
+
header = [f"<th>{unescape_cell(cell)}</th>" for cell in split_row(lines[0])]
|
|
60
|
+
rows = [[f"<td>{unescape_cell(cell)}</td>" for cell in split_row(line)] for line in lines[2:]]
|
|
61
|
+
|
|
62
|
+
return f'''<table>
|
|
63
|
+
<thead><tr>{' '.join(header)}</tr></thead>
|
|
64
|
+
<tbody>{''.join(f'<tr>{" ".join(row)}</tr>' for row in rows)}</tbody>
|
|
65
|
+
</table>'''
|
|
66
|
+
|
|
67
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
194
68
|
def _html_link(url, txt): return f'<a href="{url}" target="_blank" rel="noreferrer noopener">{txt}</a>'
|
|
195
69
|
|
|
70
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
71
|
+
from fastcore.docments import _fmt_sig
|
|
72
|
+
|
|
73
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
196
74
|
class BasicHtmlRenderer(ShowDocRenderer):
|
|
197
|
-
"
|
|
75
|
+
"HTML renderer for `show_doc`"
|
|
198
76
|
def _repr_html_(self):
|
|
199
77
|
doc = '<hr/>\n'
|
|
78
|
+
src = NbdevLookup().code(self.fn)
|
|
200
79
|
doc += f'<h{self.title_level}>{self.nm}</h{self.title_level}>\n'
|
|
201
|
-
|
|
202
|
-
|
|
80
|
+
sig = _fmt_sig(self.sig) if self.sig else ''
|
|
81
|
+
# Escape < and > characters in the signature
|
|
82
|
+
sig = sig.replace('<', '<').replace('>', '>')
|
|
83
|
+
doc += f'<blockquote><pre><code>{self.nm} {sig}</code></pre></blockquote>'
|
|
84
|
+
if self.docs:
|
|
85
|
+
doc += f"<p><i>{self.docs}</i></p>"
|
|
86
|
+
if src: doc += f"<br/>{_html_link(src, 'source')}"
|
|
87
|
+
if self.dm.has_docment: doc += _create_html_table(str(self.dm))
|
|
203
88
|
return doc
|
|
204
89
|
|
|
205
90
|
def doc(self):
|
|
206
91
|
"Show `show_doc` info along with link to docs"
|
|
207
92
|
from IPython.display import display,HTML
|
|
208
93
|
res = self._repr_html_()
|
|
209
|
-
docs = NbdevLookup().doc(self.fn)
|
|
210
|
-
if docs is not None: res += '\n<p>' +_html_link(docs, "Show in docs") + '</p>'
|
|
211
94
|
display(HTML(res))
|
|
212
95
|
|
|
213
|
-
# %% ../nbs/api/08_showdoc.ipynb
|
|
96
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
214
97
|
def doc(elt):
|
|
215
98
|
"Show `show_doc` info along with link to docs"
|
|
216
99
|
BasicHtmlRenderer(elt).doc()
|
|
217
100
|
|
|
218
|
-
# %% ../nbs/api/08_showdoc.ipynb
|
|
101
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
219
102
|
def showdoc_nm(tree):
|
|
220
103
|
"Get the fully qualified name for showdoc."
|
|
221
104
|
return ifnone(patch_name(tree), tree.name)
|
|
222
105
|
|
|
223
|
-
# %% ../nbs/api/08_showdoc.ipynb
|
|
106
|
+
# %% ../nbs/api/08_showdoc.ipynb
|
|
224
107
|
def colab_link(path):
|
|
225
108
|
"Get a link to the notebook at `path` on Colab"
|
|
226
109
|
from IPython.display import Markdown
|
|
227
110
|
cfg = get_config()
|
|
228
111
|
pre = 'https://colab.research.google.com/github/'
|
|
229
|
-
res = f'{pre}{cfg.user}/{cfg.
|
|
112
|
+
res = f'{pre}{cfg.user}/{cfg.repo}/blob/{cfg.branch}/{cfg.nbs_path.name}/{path}.ipynb'
|
|
230
113
|
display(Markdown(f'[Open `{path}` in Colab]({res})'))
|
nbdev/sync.py
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
"""Propagate small changes in the library back to notebooks"""
|
|
2
|
+
|
|
1
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/api/06_sync.ipynb.
|
|
2
4
|
|
|
3
5
|
# %% auto 0
|
|
4
6
|
__all__ = ['absolute_import', 'nbdev_update']
|
|
5
7
|
|
|
6
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
8
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
7
9
|
from .imports import *
|
|
8
10
|
from .config import *
|
|
9
11
|
from .maker import *
|
|
@@ -17,9 +19,9 @@ from fastcore.script import *
|
|
|
17
19
|
from fastcore.xtras import *
|
|
18
20
|
|
|
19
21
|
import ast
|
|
20
|
-
|
|
22
|
+
import importlib
|
|
21
23
|
|
|
22
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
24
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
23
25
|
def absolute_import(name, fname, level):
|
|
24
26
|
"Unwarps a relative import in `name` according to `fname`"
|
|
25
27
|
if not level: return name
|
|
@@ -27,22 +29,25 @@ def absolute_import(name, fname, level):
|
|
|
27
29
|
if not name: return '.'.join(mods)
|
|
28
30
|
return '.'.join(mods[:len(mods)-level+1]) + f".{name}"
|
|
29
31
|
|
|
30
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
32
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
31
33
|
@functools.lru_cache(maxsize=None)
|
|
32
34
|
def _mod_files():
|
|
33
|
-
|
|
35
|
+
midx_spec = importlib.util.spec_from_file_location("_modidx", get_config().lib_path / "_modidx.py")
|
|
36
|
+
midx = importlib.util.module_from_spec(midx_spec)
|
|
37
|
+
midx_spec.loader.exec_module(midx)
|
|
38
|
+
|
|
34
39
|
return L(files for mod in midx.d['syms'].values() for _,files in mod.values()).unique()
|
|
35
40
|
|
|
36
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
37
|
-
_re_import = re.compile("from\s+\S+\s+import\s+\S")
|
|
41
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
42
|
+
_re_import = re.compile(r"from\s+\S+\s+import\s+\S")
|
|
38
43
|
|
|
39
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
44
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
40
45
|
def _to_absolute(code, py_path, lib_dir):
|
|
41
46
|
if not _re_import.search(code): return code
|
|
42
47
|
res = update_import(code, ast.parse(code).body, str(py_path.relative_to(lib_dir).parent), absolute_import)
|
|
43
48
|
return ''.join(res) if res else code
|
|
44
49
|
|
|
45
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
50
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
46
51
|
def _update_nb(nb_path, cells, lib_dir):
|
|
47
52
|
"Update notebook `nb_path` with contents from `cells`"
|
|
48
53
|
nbp = NBProcessor(nb_path, ExportModuleProc(), rm_directives=False)
|
|
@@ -54,19 +59,20 @@ def _update_nb(nb_path, cells, lib_dir):
|
|
|
54
59
|
nbcell.source = ''.join(dirs) + _to_absolute(cell.code, cell.py_path, lib_dir)
|
|
55
60
|
write_nb(nbp.nb, nb_path)
|
|
56
61
|
|
|
57
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
62
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
58
63
|
def _update_mod(py_path, lib_dir):
|
|
59
64
|
"Propagate changes from cells in module `py_path` to corresponding notebooks"
|
|
60
65
|
py_cells = L(_iter_py_cells(py_path)).filter(lambda o: o.nb != 'auto')
|
|
61
66
|
for nb_path,cells in groupby(py_cells, 'nb_path').items(): _update_nb(nb_path, cells, lib_dir)
|
|
62
67
|
|
|
63
|
-
# %% ../nbs/api/06_sync.ipynb
|
|
68
|
+
# %% ../nbs/api/06_sync.ipynb
|
|
64
69
|
@call_parse
|
|
65
70
|
def nbdev_update(fname:str=None): # A Python file name to update
|
|
66
71
|
"Propagate change in modules matching `fname` to notebooks that created them"
|
|
67
72
|
if fname and fname.endswith('.ipynb'): raise ValueError("`nbdev_update` operates on .py files. If you wish to convert notebooks instead, see `nbdev_export`.")
|
|
68
73
|
if os.environ.get('IN_TEST',0): return
|
|
69
74
|
cfg = get_config()
|
|
75
|
+
if not cfg.cell_number: raise ValueError("`nbdev_update` does not support without cell_number in .py files. Please check your settings.ini")
|
|
70
76
|
fname = Path(fname or cfg.lib_path)
|
|
71
77
|
lib_dir = cfg.lib_path.parent
|
|
72
78
|
files = globtastic(fname, file_glob='*.py', skip_folder_re='^[_.]').filter(lambda x: str(Path(x).absolute().relative_to(lib_dir) in _mod_files()))
|