nbdev 2.4.14__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nbdev/__init__.py +1 -1
- nbdev/_modidx.py +23 -9
- nbdev/clean.py +2 -2
- nbdev/cli.py +14 -14
- nbdev/config.py +210 -152
- nbdev/doclinks.py +4 -5
- nbdev/export.py +3 -5
- nbdev/migrate.py +96 -2
- nbdev/quarto.py +3 -3
- nbdev/release.py +14 -26
- nbdev/serve.py +2 -2
- nbdev/test.py +2 -2
- {nbdev-2.4.14.dist-info → nbdev-3.0.0.dist-info}/METADATA +9 -25
- nbdev-3.0.0.dist-info/RECORD +30 -0
- {nbdev-2.4.14.dist-info → nbdev-3.0.0.dist-info}/WHEEL +1 -1
- {nbdev-2.4.14.dist-info → nbdev-3.0.0.dist-info}/entry_points.txt +1 -0
- nbdev-2.4.14.dist-info/RECORD +0 -30
- {nbdev-2.4.14.dist-info → nbdev-3.0.0.dist-info}/licenses/LICENSE +0 -0
- {nbdev-2.4.14.dist-info → nbdev-3.0.0.dist-info}/top_level.txt +0 -0
nbdev/__init__.py
CHANGED
nbdev/_modidx.py
CHANGED
|
@@ -28,23 +28,31 @@ d = { 'settings': { 'branch': 'main',
|
|
|
28
28
|
'nbdev.cli.nbdev_new': ('api/cli.html#nbdev_new', 'nbdev/cli.py'),
|
|
29
29
|
'nbdev.cli.nbdev_update_license': ('api/cli.html#nbdev_update_license', 'nbdev/cli.py'),
|
|
30
30
|
'nbdev.cli.watch_export': ('api/cli.html#watch_export', 'nbdev/cli.py')},
|
|
31
|
-
'nbdev.config': { 'nbdev.config.
|
|
31
|
+
'nbdev.config': { 'nbdev.config.ConfigToml': ('api/config.html#configtoml', 'nbdev/config.py'),
|
|
32
|
+
'nbdev.config.ConfigToml.__getattr__': ('api/config.html#configtoml.__getattr__', 'nbdev/config.py'),
|
|
33
|
+
'nbdev.config.ConfigToml.__getitem__': ('api/config.html#configtoml.__getitem__', 'nbdev/config.py'),
|
|
34
|
+
'nbdev.config.ConfigToml.__init__': ('api/config.html#configtoml.__init__', 'nbdev/config.py'),
|
|
35
|
+
'nbdev.config.ConfigToml.d': ('api/config.html#configtoml.d', 'nbdev/config.py'),
|
|
36
|
+
'nbdev.config.ConfigToml.get': ('api/config.html#configtoml.get', 'nbdev/config.py'),
|
|
37
|
+
'nbdev.config.ConfigToml.path': ('api/config.html#configtoml.path', 'nbdev/config.py'),
|
|
38
|
+
'nbdev.config.ConfigToml.version': ('api/config.html#configtoml.version', 'nbdev/config.py'),
|
|
32
39
|
'nbdev.config._basic_export_nb': ('api/config.html#_basic_export_nb', 'nbdev/config.py'),
|
|
33
|
-
'nbdev.config._cfg2txt': ('api/config.html#_cfg2txt', 'nbdev/config.py'),
|
|
34
40
|
'nbdev.config._fetch_from_git': ('api/config.html#_fetch_from_git', 'nbdev/config.py'),
|
|
41
|
+
'nbdev.config._find_nbdev_pyproject': ('api/config.html#_find_nbdev_pyproject', 'nbdev/config.py'),
|
|
35
42
|
'nbdev.config._get_info': ('api/config.html#_get_info', 'nbdev/config.py'),
|
|
36
43
|
'nbdev.config._git_repo': ('api/config.html#_git_repo', 'nbdev/config.py'),
|
|
44
|
+
'nbdev.config._has_nbdev': ('api/config.html#_has_nbdev', 'nbdev/config.py'),
|
|
37
45
|
'nbdev.config._has_py': ('api/config.html#_has_py', 'nbdev/config.py'),
|
|
38
|
-
'nbdev.config.
|
|
39
|
-
'nbdev.config.
|
|
40
|
-
'nbdev.config._type': ('api/config.html#_type', 'nbdev/config.py'),
|
|
41
|
-
'nbdev.config._xdg_config_paths': ('api/config.html#_xdg_config_paths', 'nbdev/config.py'),
|
|
46
|
+
'nbdev.config._load_toml': ('api/config.html#_load_toml', 'nbdev/config.py'),
|
|
47
|
+
'nbdev.config._user_config': ('api/config.html#_user_config', 'nbdev/config.py'),
|
|
42
48
|
'nbdev.config.add_init': ('api/config.html#add_init', 'nbdev/config.py'),
|
|
43
|
-
'nbdev.config.
|
|
49
|
+
'nbdev.config.bump_version': ('api/config.html#bump_version', 'nbdev/config.py'),
|
|
44
50
|
'nbdev.config.create_output': ('api/config.html#create_output', 'nbdev/config.py'),
|
|
45
51
|
'nbdev.config.get_config': ('api/config.html#get_config', 'nbdev/config.py'),
|
|
46
52
|
'nbdev.config.is_nbdev': ('api/config.html#is_nbdev', 'nbdev/config.py'),
|
|
47
53
|
'nbdev.config.nbdev_create_config': ('api/config.html#nbdev_create_config', 'nbdev/config.py'),
|
|
54
|
+
'nbdev.config.read_version': ('api/config.html#read_version', 'nbdev/config.py'),
|
|
55
|
+
'nbdev.config.set_version': ('api/config.html#set_version', 'nbdev/config.py'),
|
|
48
56
|
'nbdev.config.show_src': ('api/config.html#show_src', 'nbdev/config.py'),
|
|
49
57
|
'nbdev.config.update_proj': ('api/config.html#update_proj', 'nbdev/config.py'),
|
|
50
58
|
'nbdev.config.update_version': ('api/config.html#update_version', 'nbdev/config.py'),
|
|
@@ -145,15 +153,20 @@ d = { 'settings': { 'branch': 'main',
|
|
|
145
153
|
'nbdev.merge.unpatch': ('api/merge.html#unpatch', 'nbdev/merge.py')},
|
|
146
154
|
'nbdev.migrate': { 'nbdev.migrate.MigrateProc': ('api/migrate.html#migrateproc', 'nbdev/migrate.py'),
|
|
147
155
|
'nbdev.migrate.MigrateProc.begin': ('api/migrate.html#migrateproc.begin', 'nbdev/migrate.py'),
|
|
156
|
+
'nbdev.migrate._build_classifiers': ('api/migrate.html#_build_classifiers', 'nbdev/migrate.py'),
|
|
148
157
|
'nbdev.migrate._cat_slug': ('api/migrate.html#_cat_slug', 'nbdev/migrate.py'),
|
|
149
158
|
'nbdev.migrate._co': ('api/migrate.html#_co', 'nbdev/migrate.py'),
|
|
150
159
|
'nbdev.migrate._convert_callout': ('api/migrate.html#_convert_callout', 'nbdev/migrate.py'),
|
|
151
160
|
'nbdev.migrate._convert_video': ('api/migrate.html#_convert_video', 'nbdev/migrate.py'),
|
|
152
161
|
'nbdev.migrate._file_slug': ('api/migrate.html#_file_slug', 'nbdev/migrate.py'),
|
|
162
|
+
'nbdev.migrate._fmt_script': ('api/migrate.html#_fmt_script', 'nbdev/migrate.py'),
|
|
153
163
|
'nbdev.migrate._fp_convert': ('api/migrate.html#_fp_convert', 'nbdev/migrate.py'),
|
|
154
164
|
'nbdev.migrate._fp_fm': ('api/migrate.html#_fp_fm', 'nbdev/migrate.py'),
|
|
155
165
|
'nbdev.migrate._fp_image': ('api/migrate.html#_fp_image', 'nbdev/migrate.py'),
|
|
156
166
|
'nbdev.migrate._is_jekyll_post': ('api/migrate.html#_is_jekyll_post', 'nbdev/migrate.py'),
|
|
167
|
+
'nbdev.migrate._migrate_workflows': ('api/migrate.html#_migrate_workflows', 'nbdev/migrate.py'),
|
|
168
|
+
'nbdev.migrate._nbdev_migrate_config': ('api/migrate.html#_nbdev_migrate_config', 'nbdev/migrate.py'),
|
|
169
|
+
'nbdev.migrate._py_val': ('api/migrate.html#_py_val', 'nbdev/migrate.py'),
|
|
157
170
|
'nbdev.migrate._re_v1': ('api/migrate.html#_re_v1', 'nbdev/migrate.py'),
|
|
158
171
|
'nbdev.migrate._repl_directives': ('api/migrate.html#_repl_directives', 'nbdev/migrate.py'),
|
|
159
172
|
'nbdev.migrate._repl_v1dir': ('api/migrate.html#_repl_v1dir', 'nbdev/migrate.py'),
|
|
@@ -161,11 +174,13 @@ d = { 'settings': { 'branch': 'main',
|
|
|
161
174
|
'nbdev.migrate._replace_fm': ('api/migrate.html#_replace_fm', 'nbdev/migrate.py'),
|
|
162
175
|
'nbdev.migrate._rm_quote': ('api/migrate.html#_rm_quote', 'nbdev/migrate.py'),
|
|
163
176
|
'nbdev.migrate._subv1': ('api/migrate.html#_subv1', 'nbdev/migrate.py'),
|
|
177
|
+
'nbdev.migrate._toml_val': ('api/migrate.html#_toml_val', 'nbdev/migrate.py'),
|
|
164
178
|
'nbdev.migrate._v': ('api/migrate.html#_v', 'nbdev/migrate.py'),
|
|
165
179
|
'nbdev.migrate.fp_md_fm': ('api/migrate.html#fp_md_fm', 'nbdev/migrate.py'),
|
|
166
180
|
'nbdev.migrate.migrate_md': ('api/migrate.html#migrate_md', 'nbdev/migrate.py'),
|
|
167
181
|
'nbdev.migrate.migrate_nb': ('api/migrate.html#migrate_nb', 'nbdev/migrate.py'),
|
|
168
|
-
'nbdev.migrate.nbdev_migrate': ('api/migrate.html#nbdev_migrate', 'nbdev/migrate.py')
|
|
182
|
+
'nbdev.migrate.nbdev_migrate': ('api/migrate.html#nbdev_migrate', 'nbdev/migrate.py'),
|
|
183
|
+
'nbdev.migrate.nbdev_migrate_config': ('api/migrate.html#nbdev_migrate_config', 'nbdev/migrate.py')},
|
|
169
184
|
'nbdev.process': { 'nbdev.process.NBProcessor': ('api/process.html#nbprocessor', 'nbdev/process.py'),
|
|
170
185
|
'nbdev.process.NBProcessor.__init__': ('api/process.html#nbprocessor.__init__', 'nbdev/process.py'),
|
|
171
186
|
'nbdev.process.NBProcessor._proc': ('api/process.html#nbprocessor._proc', 'nbdev/process.py'),
|
|
@@ -303,7 +318,6 @@ d = { 'settings': { 'branch': 'main',
|
|
|
303
318
|
'nbdev.release._run': ('api/release.html#_run', 'nbdev/release.py'),
|
|
304
319
|
'nbdev.release._write_yaml': ('api/release.html#_write_yaml', 'nbdev/release.py'),
|
|
305
320
|
'nbdev.release.anaconda_upload': ('api/release.html#anaconda_upload', 'nbdev/release.py'),
|
|
306
|
-
'nbdev.release.bump_version': ('api/release.html#bump_version', 'nbdev/release.py'),
|
|
307
321
|
'nbdev.release.changelog': ('api/release.html#changelog', 'nbdev/release.py'),
|
|
308
322
|
'nbdev.release.chk_conda_rel': ('api/release.html#chk_conda_rel', 'nbdev/release.py'),
|
|
309
323
|
'nbdev.release.conda_output_path': ('api/release.html#conda_output_path', 'nbdev/release.py'),
|
nbdev/clean.py
CHANGED
|
@@ -125,8 +125,8 @@ def process_write(warn_msg, proc_nb, f_in, f_out=None, disp=False):
|
|
|
125
125
|
def _nbdev_clean(nb, path=None, clear_all=None):
|
|
126
126
|
cfg = get_config(path=path)
|
|
127
127
|
clear_all = clear_all or cfg.clear_all
|
|
128
|
-
allowed_metadata_keys = cfg.get("allowed_metadata_keys")
|
|
129
|
-
allowed_cell_metadata_keys = cfg.get("allowed_cell_metadata_keys")
|
|
128
|
+
allowed_metadata_keys = cfg.get("allowed_metadata_keys") or []
|
|
129
|
+
allowed_cell_metadata_keys = cfg.get("allowed_cell_metadata_keys") or []
|
|
130
130
|
clean_nb(nb, clear_all, allowed_metadata_keys, allowed_cell_metadata_keys, cfg.clean_ids)
|
|
131
131
|
if path: nbdev_trust.__wrapped__(path)
|
|
132
132
|
|
nbdev/cli.py
CHANGED
|
@@ -65,7 +65,7 @@ def _render_nb(fn, cfg):
|
|
|
65
65
|
"Render templated values like `{{lib_name}}` in notebook at `fn` from `cfg`"
|
|
66
66
|
txt = fn.read_text()
|
|
67
67
|
txt = txt.replace('from your_lib.core', f'from {cfg.lib_path}.core') # for compatibility with old templates
|
|
68
|
-
for k,v in cfg.
|
|
68
|
+
for k,v in cfg.items(): txt = txt.replace('{{'+k+'}}', str(v))
|
|
69
69
|
fn.write_text(txt)
|
|
70
70
|
|
|
71
71
|
# %% ../nbs/api/13_cli.ipynb #dd385911-aa8f-44e7-8d46-7b8a20f3b010
|
|
@@ -87,10 +87,11 @@ def nbdev_new(**kwargs):
|
|
|
87
87
|
from ghapi.core import GhApi
|
|
88
88
|
nbdev_create_config.__wrapped__(**kwargs)
|
|
89
89
|
cfg = get_config()
|
|
90
|
-
_update_repo_meta(cfg)
|
|
90
|
+
if (Path('.git')).exists(): _update_repo_meta(cfg)
|
|
91
|
+
else: print(f"No git repo found. Run: gh repo create {cfg.user}/{cfg.repo} --public --source=.")
|
|
91
92
|
path = Path()
|
|
92
93
|
|
|
93
|
-
_ORG_OR_USR,_REPOSITORY = 'answerdotai','
|
|
94
|
+
_ORG_OR_USR,_REPOSITORY = 'answerdotai','nbdev3-template'
|
|
94
95
|
_TEMPLATE = f'{_ORG_OR_USR}/{_REPOSITORY}'
|
|
95
96
|
template = kwargs.get('template', _TEMPLATE)
|
|
96
97
|
try: org_or_usr, repo = template.split('/')
|
|
@@ -143,24 +144,23 @@ def nbdev_update_license(
|
|
|
143
144
|
avail_lic = GhApi().licenses.get_all_commonly_used().map(lambda x: x['key'])
|
|
144
145
|
|
|
145
146
|
cfg = get_config()
|
|
146
|
-
curr_lic = cfg
|
|
147
|
+
curr_lic = cfg.license
|
|
147
148
|
|
|
148
149
|
mapped = mapping.get(to, None)
|
|
149
|
-
|
|
150
150
|
if mapped not in avail_lic: raise ValueError(f"{to} is not an available license")
|
|
151
151
|
body = GhApi().licenses.get(mapped)['body']
|
|
152
152
|
|
|
153
|
-
|
|
154
|
-
body = body.replace('[year] [fullname]',
|
|
155
|
-
|
|
156
|
-
content = open("settings.ini", "r").read()
|
|
157
|
-
content = re.sub(r"^(license\s*=\s*).*?$", r"\1 " + to, content, flags=re.MULTILINE)
|
|
153
|
+
copyright = f"{datetime.now().year}, {cfg.author}"
|
|
154
|
+
body = body.replace('[year], [fullname]', copyright)
|
|
155
|
+
body = body.replace('[year] [fullname]', copyright)
|
|
158
156
|
|
|
159
|
-
|
|
160
|
-
|
|
157
|
+
# Update pyproject.toml
|
|
158
|
+
pyproj = cfg.config_file
|
|
159
|
+
content = pyproj.read_text()
|
|
160
|
+
content = re.sub(r'^(license\s*=\s*\{text\s*=\s*").*?(")', rf'\g<1>{to}\2', content, flags=re.MULTILINE)
|
|
161
|
+
pyproj.write_text(content)
|
|
161
162
|
|
|
162
|
-
|
|
163
|
-
lic.write(body)
|
|
163
|
+
Path('LICENSE').write_text(body)
|
|
164
164
|
print(f"License updated from {curr_lic} to {to}")
|
|
165
165
|
|
|
166
166
|
# %% ../nbs/api/13_cli.ipynb #412b4cd2
|
nbdev/config.py
CHANGED
|
@@ -3,8 +3,9 @@
|
|
|
3
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/api/01_config.ipynb.
|
|
4
4
|
|
|
5
5
|
# %% auto #0
|
|
6
|
-
__all__ = ['
|
|
7
|
-
'
|
|
6
|
+
__all__ = ['pyproject_nm', 'pyproject_tmpl', 'nbdev_defaults', 'pyproj_tmpl', 'nbdev_create_config', 'ConfigToml', 'get_config',
|
|
7
|
+
'is_nbdev', 'create_output', 'show_src', 'read_version', 'set_version', 'bump_version', 'update_version',
|
|
8
|
+
'update_proj', 'add_init', 'write_cells']
|
|
8
9
|
|
|
9
10
|
# %% ../nbs/api/01_config.ipynb #6fd14ecd
|
|
10
11
|
from datetime import datetime
|
|
@@ -15,71 +16,21 @@ from fastcore.script import *
|
|
|
15
16
|
from fastcore.style import *
|
|
16
17
|
from fastcore.xdg import *
|
|
17
18
|
|
|
18
|
-
import ast,warnings
|
|
19
|
+
import ast,warnings,tomli
|
|
19
20
|
from IPython.display import Markdown
|
|
20
21
|
from execnb.nbio import read_nb,NbCell
|
|
21
22
|
from urllib.error import HTTPError
|
|
22
23
|
|
|
23
24
|
# %% ../nbs/api/01_config.ipynb #117128e6
|
|
24
|
-
|
|
25
|
-
|
|
25
|
+
pyproject_nm = 'pyproject.toml'
|
|
26
|
+
_nbdev_home_dir = 'nbdev'
|
|
27
|
+
_user_cfg_name = 'config.toml'
|
|
26
28
|
|
|
27
29
|
# %% ../nbs/api/01_config.ipynb #adf0834f
|
|
28
30
|
def _git_repo():
|
|
29
31
|
try: return repo_details(run('git config --get remote.origin.url'))[1]
|
|
30
32
|
except OSError: return
|
|
31
33
|
|
|
32
|
-
# %% ../nbs/api/01_config.ipynb #efccb7f2
|
|
33
|
-
# When adding a named default to the list below, be sure that that name
|
|
34
|
-
# is also added to one of the sections in `_nbdev_cfg_sections` as well,
|
|
35
|
-
# or it won't get written by `nbdev_create_config`:
|
|
36
|
-
def _apply_defaults(
|
|
37
|
-
cfg,
|
|
38
|
-
lib_name='%(repo)s', # Package name
|
|
39
|
-
git_url='https://github.com/%(user)s/%(repo)s', # Repo URL
|
|
40
|
-
custom_sidebar:bool_arg=False, # Use a custom sidebar.yml?
|
|
41
|
-
nbs_path:Path='nbs', # Path to notebooks
|
|
42
|
-
lib_path:Path=None, # Path to package root (default: `repo` with `-` replaced by `_`)
|
|
43
|
-
doc_path:Path='_docs', # Path to rendered docs
|
|
44
|
-
tst_flags='notest', # Test flags
|
|
45
|
-
version='0.0.1', # Version of this release
|
|
46
|
-
doc_host='https://%(user)s.github.io', # Hostname for docs
|
|
47
|
-
doc_baseurl='/%(repo)s', # Base URL for docs
|
|
48
|
-
keywords='nbdev jupyter notebook python', # Package keywords
|
|
49
|
-
license='apache2', # License for the package
|
|
50
|
-
copyright:str=None, # Copyright for the package, defaults to '`current_year` onwards, `author`'
|
|
51
|
-
status='3', # Development status PyPI classifier
|
|
52
|
-
min_python='3.9', # Minimum Python version PyPI classifier
|
|
53
|
-
audience='Developers', # Intended audience PyPI classifier
|
|
54
|
-
language='English', # Language PyPI classifier
|
|
55
|
-
recursive:bool_arg=True, # Include subfolders in notebook globs?
|
|
56
|
-
black_formatting:bool_arg=False, # Format libraries with black?
|
|
57
|
-
readme_nb='index.ipynb', # Notebook to export as repo readme
|
|
58
|
-
title='%(lib_name)s', # Quarto website title
|
|
59
|
-
allowed_metadata_keys='', # Preserve the list of keys in the main notebook metadata
|
|
60
|
-
allowed_cell_metadata_keys='', # Preserve the list of keys in cell level metadata
|
|
61
|
-
jupyter_hooks:bool_arg=False, # Run Jupyter hooks?
|
|
62
|
-
clean_ids:bool_arg=True, # Remove ids from plaintext reprs?
|
|
63
|
-
clear_all:bool_arg=False, # Remove all cell metadata and cell outputs?
|
|
64
|
-
put_version_in_init:bool_arg=True, # Add the version to the main __init__.py in nbdev_export
|
|
65
|
-
update_pyproject:bool_arg=True, # Create/update pyproject.toml with correct project name
|
|
66
|
-
skip_procs:str='', # A comma-separated list of processors that you want to skip
|
|
67
|
-
):
|
|
68
|
-
"Apply default settings where missing in `cfg`."
|
|
69
|
-
if 'cell_number' in cfg:
|
|
70
|
-
warnings.warn("`cell_number` in settings.ini is deprecated and ignored. Cell IDs are now used instead.", DeprecationWarning)
|
|
71
|
-
if getattr(cfg,'repo',None) is None:
|
|
72
|
-
cfg.repo = _git_repo()
|
|
73
|
-
if cfg.repo is None:
|
|
74
|
-
_parent = Path.cwd()
|
|
75
|
-
cfg.repo = _parent.parent.name if _parent.name=='nbs' else _parent.name
|
|
76
|
-
if lib_path is None: lib_path = cfg.repo.replace('-', '_')
|
|
77
|
-
if copyright is None: copyright = f"{datetime.now().year} onwards, %(author)s"
|
|
78
|
-
for k,v in locals().items():
|
|
79
|
-
if k.startswith('_') or k == 'cfg' or cfg.get(k) is not None: continue
|
|
80
|
-
cfg[k] = v
|
|
81
|
-
return cfg
|
|
82
|
-
|
|
83
34
|
# %% ../nbs/api/01_config.ipynb #6eeafafd
|
|
84
35
|
def _get_info(owner, repo, default_branch='main', default_kw='nbdev'):
|
|
85
36
|
from ghapi.all import GhApi
|
|
@@ -88,7 +39,7 @@ def _get_info(owner, repo, default_branch='main', default_kw='nbdev'):
|
|
|
88
39
|
try: r = api.repos.get()
|
|
89
40
|
except HTTPError:
|
|
90
41
|
msg= [f"""Could not access repo: {owner}/{repo} to find your default branch - `{default_branch}` assumed.
|
|
91
|
-
Edit `
|
|
42
|
+
Edit `pyproject.toml` if this is incorrect.
|
|
92
43
|
In the future, you can allow nbdev to see private repos by setting the environment variable GITHUB_TOKEN as described here:
|
|
93
44
|
https://nbdev.fast.ai/api/release.html#setup"""]
|
|
94
45
|
print(''.join(msg))
|
|
@@ -98,125 +49,204 @@ https://nbdev.fast.ai/api/release.html#setup"""]
|
|
|
98
49
|
|
|
99
50
|
# %% ../nbs/api/01_config.ipynb #35d5c037
|
|
100
51
|
def _fetch_from_git(raise_err=False):
|
|
101
|
-
"Get information for
|
|
52
|
+
"Get information for pyproject.toml from git."
|
|
102
53
|
res={}
|
|
103
54
|
try:
|
|
104
55
|
url = run('git config --get remote.origin.url')
|
|
105
56
|
res['user'],res['repo'] = repo_details(url)
|
|
106
57
|
res['branch'],res['keywords'],desc = _get_info(owner=res['user'], repo=res['repo'])
|
|
107
58
|
if desc: res['description'] = desc
|
|
108
|
-
res['author'] = run('git config --get user.name').strip()
|
|
59
|
+
res['author'] = run('git config --get user.name').strip()
|
|
109
60
|
res['author_email'] = run('git config --get user.email').strip()
|
|
110
61
|
except OSError as e:
|
|
111
62
|
if raise_err: raise(e)
|
|
112
63
|
else: res['lib_name'] = res['repo'].replace('-','_')
|
|
113
64
|
return res
|
|
114
65
|
|
|
115
|
-
# %% ../nbs/api/01_config.ipynb #
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
for k,v in cfg.items():
|
|
120
|
-
inf = inferred.get(k,None)
|
|
121
|
-
msg = S.light_blue(k) + ' = '
|
|
122
|
-
if v is None:
|
|
123
|
-
if inf is None: res[k] = input(f'# Please enter a value for {k}\n'+msg)
|
|
124
|
-
else:
|
|
125
|
-
res[k] = inf
|
|
126
|
-
print(msg+res[k]+' # Automatically inferred from git')
|
|
127
|
-
return res
|
|
128
|
-
|
|
129
|
-
# %% ../nbs/api/01_config.ipynb #8348a963
|
|
130
|
-
def _cfg2txt(cfg, head, sections, tail=''):
|
|
131
|
-
"Render `cfg` with commented sections."
|
|
132
|
-
nm = cfg.d.name
|
|
133
|
-
res = f'[{nm}]\n'+head
|
|
134
|
-
for t,ks in sections.items():
|
|
135
|
-
res += f'### {t} ###\n'
|
|
136
|
-
for k in ks.split(): res += f"{k} = {cfg._cfg.get(nm, k, raw=True)}\n" # TODO: add `raw` to `Config.get`
|
|
137
|
-
res += '\n'
|
|
138
|
-
res += tail
|
|
139
|
-
return res.strip()
|
|
140
|
-
|
|
141
|
-
# %% ../nbs/api/01_config.ipynb #a4ef6546
|
|
142
|
-
_nbdev_cfg_head = '''# All sections below are required unless otherwise specified.
|
|
143
|
-
# See https://github.com/AnswerDotAI/nbdev/blob/main/settings.ini for examples.
|
|
66
|
+
# %% ../nbs/api/01_config.ipynb #05aae09f
|
|
67
|
+
pyproject_tmpl = '''[build-system]
|
|
68
|
+
requires = ["setuptools>=64"]
|
|
69
|
+
build-backend = "setuptools.build_meta"
|
|
144
70
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
71
|
+
[project]
|
|
72
|
+
name = "{name}"
|
|
73
|
+
dynamic = ["version"]
|
|
74
|
+
description = "{description}"
|
|
75
|
+
readme = "README.md"
|
|
76
|
+
requires-python = ">={min_python}"
|
|
77
|
+
license = {{text = "{license}"}}
|
|
78
|
+
authors = [{{name = "{author}", email = "{author_email}"}}]
|
|
79
|
+
keywords = {keywords}
|
|
80
|
+
classifiers = [
|
|
81
|
+
"Programming Language :: Python :: 3",
|
|
82
|
+
"Programming Language :: Python :: 3 :: Only",
|
|
83
|
+
]
|
|
84
|
+
dependencies = []
|
|
85
|
+
|
|
86
|
+
[project.urls]
|
|
87
|
+
Repository = "{git_url}"
|
|
88
|
+
Documentation = "{doc_url}"
|
|
89
|
+
|
|
90
|
+
[project.entry-points.nbdev]
|
|
91
|
+
{lib_path} = "{lib_path}._modidx:d"
|
|
92
|
+
|
|
93
|
+
[tool.setuptools.dynamic]
|
|
94
|
+
version = {{attr = "{lib_path}.__version__"}}
|
|
95
|
+
|
|
96
|
+
[tool.setuptools.packages.find]
|
|
97
|
+
include = ["{lib_path}"]
|
|
98
|
+
|
|
99
|
+
[tool.nbdev]
|
|
156
100
|
'''
|
|
157
101
|
|
|
158
|
-
# %% ../nbs/api/01_config.ipynb #
|
|
102
|
+
# %% ../nbs/api/01_config.ipynb #3fc5c33f
|
|
159
103
|
@call_parse
|
|
160
|
-
@delegates(_apply_defaults, but='cfg')
|
|
161
104
|
def nbdev_create_config(
|
|
162
105
|
repo:str=None, # Repo name
|
|
163
|
-
branch:str=
|
|
106
|
+
branch:str='main', # Repo default branch
|
|
164
107
|
user:str=None, # Repo username
|
|
165
108
|
author:str=None, # Package author's name
|
|
166
109
|
author_email:str=None, # Package author's email address
|
|
167
|
-
description:str=
|
|
110
|
+
description:str='', # Short summary of the package
|
|
168
111
|
path:str='.', # Path to create config file
|
|
169
|
-
|
|
170
|
-
|
|
112
|
+
min_python:str='3.10', # Minimum Python version
|
|
113
|
+
license:str='Apache-2.0', # License (SPDX identifier)
|
|
171
114
|
):
|
|
172
|
-
"Create a config file."
|
|
173
|
-
|
|
115
|
+
"Create a pyproject.toml config file."
|
|
116
|
+
path = Path(path)
|
|
117
|
+
path.mkdir(exist_ok=True, parents=True)
|
|
118
|
+
|
|
119
|
+
# Infer from git if not provided
|
|
174
120
|
inf = _fetch_from_git()
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
if
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
121
|
+
repo = repo or inf.get('repo') or path.resolve().name
|
|
122
|
+
user = user or inf.get('user', '')
|
|
123
|
+
if not user: raise ValueError("Could not infer `user` from git. Please pass --user explicitly.")
|
|
124
|
+
author = author or inf.get('author', '')
|
|
125
|
+
if not author: raise ValueError("Could not infer `author` from git. Please pass --author explicitly.")
|
|
126
|
+
author_email = author_email or inf.get('author_email', '')
|
|
127
|
+
if not author_email: raise ValueError("Could not infer `author_email` from git. Please pass --author_email explicitly.")
|
|
128
|
+
branch = branch or inf.get('branch', 'main')
|
|
129
|
+
description = description or inf.get('description', '')
|
|
130
|
+
|
|
131
|
+
lib_path = repo.replace('-', '_')
|
|
132
|
+
git_url = f"https://github.com/{user}/{repo}" if user else ''
|
|
133
|
+
doc_url = f"https://{user}.github.io/{repo}/" if user else ''
|
|
134
|
+
keywords = inf.get('keywords', 'nbdev').split()
|
|
135
|
+
|
|
136
|
+
txt = pyproject_tmpl.format(name=repo, lib_path=lib_path, description=description, min_python=min_python, license=license,
|
|
137
|
+
author=author, author_email=author_email, keywords=keywords, git_url=git_url, doc_url=doc_url, branch=branch)
|
|
138
|
+
|
|
139
|
+
cfg_file = path / pyproject_nm
|
|
140
|
+
if cfg_file.exists(): warn(f'{cfg_file} already exists')
|
|
141
|
+
else:
|
|
142
|
+
cfg_file.write_text(txt)
|
|
143
|
+
print(f'{cfg_file} created.')
|
|
183
144
|
|
|
184
145
|
# %% ../nbs/api/01_config.ipynb #0e56064f
|
|
185
|
-
def
|
|
186
|
-
|
|
187
|
-
return
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
146
|
+
def _load_toml(p):
|
|
147
|
+
"Load TOML file at `p` into a dict"
|
|
148
|
+
return tomli.loads(Path(p).read_text(encoding='utf-8'))
|
|
149
|
+
|
|
150
|
+
def _has_nbdev(p):
|
|
151
|
+
"True if pyproject.toml at `p` has [tool.nbdev]"
|
|
152
|
+
try: return bool(_load_toml(p).get('tool', {}).get('nbdev', {}))
|
|
153
|
+
except Exception: return False
|
|
154
|
+
|
|
155
|
+
def _find_nbdev_pyproject(path=None):
|
|
156
|
+
"Find nearest pyproject.toml containing [tool.nbdev], walking up from `path`"
|
|
157
|
+
p = Path(path or Path.cwd()).resolve()
|
|
158
|
+
for d in [p] + list(p.parents):
|
|
159
|
+
f = d/pyproject_nm
|
|
160
|
+
if f.exists() and _has_nbdev(f): return f
|
|
193
161
|
|
|
194
162
|
# %% ../nbs/api/01_config.ipynb #3dac70e0
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
163
|
+
nbdev_defaults = dict(nbs_path='nbs', doc_path='_docs', tst_flags='notest', recursive=True, readme_nb='index.ipynb',
|
|
164
|
+
clean_ids=True, clear_all=False, put_version_in_init=True, jupyter_hooks=False, black_formatting=False, branch='main')
|
|
165
|
+
|
|
166
|
+
_path_keys = 'lib_path', 'nbs_path', 'doc_path'
|
|
167
|
+
|
|
168
|
+
# %% ../nbs/api/01_config.ipynb #165ff301
|
|
169
|
+
class ConfigToml(AttrDict):
|
|
170
|
+
def __init__(self, d, proj, cfg_file):
|
|
171
|
+
super().__init__({**nbdev_defaults, **d})
|
|
172
|
+
self.config_file = cfg_file
|
|
173
|
+
self.config_path = cfg_file.parent
|
|
174
|
+
|
|
175
|
+
self.lib_name = proj.get('name', '')
|
|
176
|
+
self.title = self.get('title') or self.lib_name
|
|
177
|
+
self.description = proj.get('description', '')
|
|
178
|
+
self.keywords = proj.get('keywords', [])
|
|
179
|
+
self.min_python = (proj.get('requires-python') or '>=3.9').lstrip('>=')
|
|
180
|
+
auths = proj.get('authors') or [{}]
|
|
181
|
+
self.author = auths[0].get('name')
|
|
182
|
+
self.author_email = auths[0].get('email')
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
urls = proj.get('urls') or {}
|
|
186
|
+
self.git_url = (urls.get('Repository') or urls.get('Source') or '').rstrip('/')
|
|
187
|
+
self.doc_url = urls.get('Documentation') or ''
|
|
188
|
+
self.user, self.repo = repo_details(self.git_url) if self.git_url else ('', '')
|
|
189
|
+
# Derive doc_host and doc_baseurl from doc_url
|
|
190
|
+
from urllib.parse import urlparse
|
|
191
|
+
u = urlparse(self.doc_url)
|
|
192
|
+
self.doc_host = f"{u.scheme}://{u.netloc}" if u.scheme else ''
|
|
193
|
+
self.doc_baseurl = (u.path or '/').rstrip('/') or '/'
|
|
194
|
+
if 'lib_path' not in self: self['lib_path'] = self.lib_name.replace('-', '_')
|
|
195
|
+
|
|
196
|
+
@property
|
|
197
|
+
def version(self):
|
|
198
|
+
return read_version(self.config_path / self['lib_path']) or '0.0.1'
|
|
199
|
+
|
|
200
|
+
@property
|
|
201
|
+
def d(self): return {k:v for k,v in super().items()}
|
|
202
|
+
|
|
203
|
+
def __getattr__(self, k): return stop(AttributeError(k)) if k=='d' or k not in self.d else self.get(k)
|
|
204
|
+
def __getitem__(self, k): return stop(IndexError(k)) if k not in self.d else self.get(k)
|
|
205
|
+
|
|
206
|
+
def get(self, k, default=None):
|
|
207
|
+
v = self.d.get(k, default)
|
|
208
|
+
if v is None: return None
|
|
209
|
+
return self.config_path / v if k in _path_keys else v
|
|
210
|
+
|
|
211
|
+
def path(self, k, default=None):
|
|
212
|
+
v = self.d.get(k, default)
|
|
213
|
+
return v if v is None else self.config_path / v
|
|
214
|
+
|
|
215
|
+
# %% ../nbs/api/01_config.ipynb #6fed9c91
|
|
216
|
+
def _user_config():
|
|
217
|
+
"Load user config from ~/.config/nbdev/config.toml if it exists"
|
|
218
|
+
p = xdg_config_home() / _nbdev_home_dir / _user_cfg_name
|
|
219
|
+
if p.exists(): return _load_toml(p)
|
|
220
|
+
return {}
|
|
221
|
+
|
|
222
|
+
# %% ../nbs/api/01_config.ipynb #f6660849
|
|
223
|
+
def get_config(path=None, also_settings=False):
|
|
200
224
|
"Return nbdev config."
|
|
201
|
-
cfg_file =
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
225
|
+
cfg_file = _find_nbdev_pyproject(path)
|
|
226
|
+
if cfg_file is not None:
|
|
227
|
+
# Check for old settings.ini and complain loudly
|
|
228
|
+
old_cfg = cfg_file.parent / 'settings.ini'
|
|
229
|
+
if old_cfg.exists() and not also_settings:
|
|
230
|
+
raise ValueError(f"Found old settings.ini at {old_cfg}. Please migrate to pyproject.toml using `nbdev_migrate`. See https://nbdev.fast.ai/getting_started.html for details.")
|
|
231
|
+
d = _load_toml(cfg_file)
|
|
232
|
+
user = _user_config()
|
|
233
|
+
nbdev = {**user, **d.get('tool', {}).get('nbdev', {})}
|
|
234
|
+
return ConfigToml(nbdev, d.get('project', {}), cfg_file)
|
|
235
|
+
if also_settings:
|
|
236
|
+
from fastcore.foundation import Config
|
|
237
|
+
cfg = Config.find('settings.ini', path)
|
|
238
|
+
if cfg: return cfg
|
|
239
|
+
cfg_path = Path(path or Path.cwd()).expanduser().absolute()
|
|
240
|
+
return ConfigToml(nbdev_defaults, {}, cfg_path/'pyproject.toml')
|
|
211
241
|
|
|
212
242
|
# %% ../nbs/api/01_config.ipynb #6939b40e
|
|
213
|
-
def is_nbdev(): return
|
|
243
|
+
def is_nbdev(path=None): return _find_nbdev_pyproject(path) is not None
|
|
214
244
|
|
|
215
245
|
# %% ../nbs/api/01_config.ipynb #6e89fe6c
|
|
216
246
|
def create_output(txt, mime):
|
|
217
247
|
"Add a cell output containing `txt` of the `mime` text MIME sub-type"
|
|
218
|
-
return [
|
|
219
|
-
|
|
248
|
+
return [dict(data={f"text/{mime}": str(txt).splitlines(True)},
|
|
249
|
+
execution_count=1, metadata={}, output_type="execute_result")]
|
|
220
250
|
|
|
221
251
|
# %% ../nbs/api/01_config.ipynb #5a4d8e52
|
|
222
252
|
def show_src(src, lang='python'): return Markdown(f'```{lang}\n{src}\n```')
|
|
@@ -229,30 +259,57 @@ build-backend = "setuptools.build_meta"
|
|
|
229
259
|
[project]
|
|
230
260
|
name = "FILL_IN"
|
|
231
261
|
requires-python="FILL_IN"
|
|
232
|
-
dynamic = [ "keywords", "description", "version", "dependencies", "optional-dependencies", "readme",
|
|
262
|
+
dynamic = [ "keywords", "description", "version", "dependencies", "optional-dependencies", "readme",
|
|
263
|
+
"license", "authors", "classifiers", "entry-points", "scripts", "urls"]
|
|
233
264
|
|
|
234
265
|
[tool.uv]
|
|
235
|
-
cache-keys = [{ file = "pyproject.toml" }, { file = "
|
|
266
|
+
cache-keys = [{ file = "pyproject.toml" }, { file = "setup.py" }]
|
|
236
267
|
"""
|
|
237
268
|
|
|
238
269
|
# %% ../nbs/api/01_config.ipynb #f1c85f45
|
|
239
|
-
_re_version = re.compile(r'^__version__\s
|
|
270
|
+
_re_version = re.compile(r'^__version__\s*=\s*[\'"]([^\'"]+)[\'"]', re.MULTILINE)
|
|
240
271
|
_re_proj = re.compile(r'^name\s*=\s*".*$', re.MULTILINE)
|
|
241
272
|
_re_reqpy = re.compile(r'^requires-python\s*=\s*".*$', re.MULTILINE)
|
|
242
273
|
_init = '__init__.py'
|
|
243
274
|
_pyproj = 'pyproject.toml'
|
|
244
275
|
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
276
|
+
# %% ../nbs/api/01_config.ipynb #680e62a5
|
|
277
|
+
def read_version(path):
|
|
278
|
+
"Read __version__ from `path/__init__.py`, or None if not found"
|
|
279
|
+
fname = Path(path)/_init
|
|
280
|
+
if not fname.exists(): return None
|
|
281
|
+
m = _re_version.search(fname.read_text())
|
|
282
|
+
return m.group(1) if m else None
|
|
283
|
+
|
|
284
|
+
# %% ../nbs/api/01_config.ipynb #cf0fc6a3
|
|
285
|
+
def set_version(path, version):
|
|
286
|
+
"Set __version__ in `path/__init__.py`"
|
|
287
|
+
path = Path(path)
|
|
288
|
+
path.mkdir(exist_ok=True, parents=True)
|
|
248
289
|
fname = path/_init
|
|
249
290
|
if not fname.exists(): fname.touch()
|
|
250
|
-
|
|
291
|
+
ver_line = f'__version__ = "{version}"'
|
|
251
292
|
code = fname.read_text()
|
|
252
|
-
if _re_version.search(code) is None: code =
|
|
253
|
-
else: code = _re_version.sub(
|
|
293
|
+
if _re_version.search(code) is None: code = ver_line + "\n" + code
|
|
294
|
+
else: code = _re_version.sub(ver_line, code)
|
|
254
295
|
fname.write_text(code)
|
|
255
296
|
|
|
297
|
+
# %% ../nbs/api/01_config.ipynb #d00889e5
|
|
298
|
+
def bump_version(v, part=2, unbump=False):
|
|
299
|
+
"Bump semver string `v` at index `part` (0=major, 1=minor, 2=patch)"
|
|
300
|
+
parts = (v or '0.0.0').split('.')
|
|
301
|
+
parts += ['0'] * (3 - len(parts))
|
|
302
|
+
parts[part] = str(int(parts[part]) + (-1 if unbump else 1))
|
|
303
|
+
for i in range(part+1, 3): parts[i] = '0'
|
|
304
|
+
return '.'.join(parts[:3])
|
|
305
|
+
|
|
306
|
+
# %% ../nbs/api/01_config.ipynb #e32583e6
|
|
307
|
+
def update_version(path=None):
|
|
308
|
+
"Add __version__ to `path/__init__.py` if it doesn't exist"
|
|
309
|
+
path = Path(path or get_config().lib_path)
|
|
310
|
+
if read_version(path) is None: set_version(path, get_config().version)
|
|
311
|
+
|
|
312
|
+
# %% ../nbs/api/01_config.ipynb #275ef862
|
|
256
313
|
def _has_py(fs): return any(1 for f in fs if f.endswith('.py'))
|
|
257
314
|
|
|
258
315
|
def update_proj(path):
|
|
@@ -260,10 +317,11 @@ def update_proj(path):
|
|
|
260
317
|
fname = path/_pyproj
|
|
261
318
|
if not fname.exists(): fname.write_text(pyproj_tmpl)
|
|
262
319
|
txt = fname.read_text()
|
|
263
|
-
txt = _re_proj.sub(f'name="{get_config().lib_name}"', txt)
|
|
264
|
-
txt = _re_reqpy.sub(f'requires-python=">={get_config().min_python}"', txt)
|
|
320
|
+
txt = _re_proj.sub(f'name = "{get_config().lib_name}"', txt)
|
|
321
|
+
txt = _re_reqpy.sub(f'requires-python = ">={get_config().min_python}"', txt)
|
|
265
322
|
fname.write_text(txt)
|
|
266
323
|
|
|
324
|
+
# %% ../nbs/api/01_config.ipynb #bdf57184
|
|
267
325
|
def add_init(path=None):
|
|
268
326
|
"Add `__init__.py` in all subdirs of `path` containing python files if it's not there already."
|
|
269
327
|
# we add the lowest-level `__init__.py` files first, which ensures _has_py succeeds for parent modules
|
nbdev/doclinks.py
CHANGED
|
@@ -107,8 +107,7 @@ def _build_modidx(dest=None, nbs_path=None, skip_exists=False):
|
|
|
107
107
|
with contextlib.suppress(FileNotFoundError): idxfile.unlink()
|
|
108
108
|
if idxfile.exists(): res = exec_local(idxfile.read_text(encoding='utf-8'), 'd')
|
|
109
109
|
else: res = dict(syms={}, settings={})
|
|
110
|
-
res['settings'] = {k:v for k,v in get_config().d.items()
|
|
111
|
-
if k in ('doc_host','doc_baseurl','lib_path','git_url','branch')}
|
|
110
|
+
res['settings'] = {k:v for k,v in get_config().d.items() if k in ('doc_host','doc_baseurl','lib_path','git_url','branch')}
|
|
112
111
|
code_root = dest.parent.resolve()
|
|
113
112
|
for file in globtastic(dest, file_glob="*.py", skip_file_re='^_', skip_folder_re=r"\.ipynb_checkpoints"):
|
|
114
113
|
try: res['syms'].update(_get_modidx((dest.parent/file).resolve(), code_root, nbs_path=nbs_path))
|
|
@@ -229,12 +228,12 @@ _re_backticks = re.compile(r'`([^`\s]+?)(?:\(\))?`')
|
|
|
229
228
|
# %% ../nbs/api/05_doclinks.ipynb #3a24b883
|
|
230
229
|
@lru_cache(None)
|
|
231
230
|
def _build_lookup_table(strip_libs=None, incl_libs=None, skip_mods=None):
|
|
232
|
-
cfg = get_config()
|
|
231
|
+
cfg = get_config(also_settings=True)
|
|
233
232
|
if strip_libs is None:
|
|
234
|
-
try: strip_libs = cfg.get('strip_libs'
|
|
233
|
+
try: strip_libs = cfg.get('strip_libs') or cfg.lib_name
|
|
235
234
|
except FileNotFoundError: strip_libs = 'nbdev'
|
|
236
235
|
skip_mods = setify(skip_mods)
|
|
237
|
-
strip_libs = L(strip_libs)
|
|
236
|
+
strip_libs = L(strip_libs.split() if isinstance(strip_libs, str) else strip_libs)
|
|
238
237
|
if incl_libs is not None: incl_libs = (L(incl_libs)+strip_libs).unique()
|
|
239
238
|
entries = {}
|
|
240
239
|
try: eps = entry_points(group='nbdev')
|
nbdev/export.py
CHANGED
|
@@ -35,10 +35,9 @@ class ExportModuleProc:
|
|
|
35
35
|
|
|
36
36
|
# %% ../nbs/api/04_export.ipynb #6f524839
|
|
37
37
|
def black_format(cell, # Cell to format
|
|
38
|
-
force=False): # Turn black formatting on regardless of
|
|
38
|
+
force=False): # Turn black formatting on regardless of pyproject.toml
|
|
39
39
|
"Processor to format code with `black`"
|
|
40
|
-
|
|
41
|
-
except FileNotFoundError: return
|
|
40
|
+
cfg = get_config()
|
|
42
41
|
if (not cfg.black_formatting and not force) or cell.cell_type != 'code': return
|
|
43
42
|
try: import black
|
|
44
43
|
except: raise ImportError("You must install black: `pip install black` if you wish to use black formatting with nbdev")
|
|
@@ -53,8 +52,7 @@ _magics_pattern = re.compile(r'^\s*(%%|%).*\n?', re.MULTILINE)
|
|
|
53
52
|
|
|
54
53
|
def scrub_magics(cell): # Cell to format
|
|
55
54
|
"Processor to remove cell magics from exported code"
|
|
56
|
-
|
|
57
|
-
except FileNotFoundError: return
|
|
55
|
+
cfg = get_config()
|
|
58
56
|
if cell.cell_type != 'code': return
|
|
59
57
|
try: cell.source = _magics_pattern.sub('', cell.source)
|
|
60
58
|
except: pass
|
nbdev/migrate.py
CHANGED
|
@@ -3,14 +3,14 @@
|
|
|
3
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/api/16_migrate.ipynb.
|
|
4
4
|
|
|
5
5
|
# %% auto #0
|
|
6
|
-
__all__ = ['MigrateProc', 'fp_md_fm', 'migrate_nb', 'migrate_md', 'nbdev_migrate']
|
|
6
|
+
__all__ = ['MigrateProc', 'fp_md_fm', 'migrate_nb', 'migrate_md', 'nbdev_migrate', 'nbdev_migrate_config']
|
|
7
7
|
|
|
8
8
|
# %% ../nbs/api/16_migrate.ipynb #5b687fa0-dc50-48df-8bfc-e98df34e7572
|
|
9
9
|
from .process import *
|
|
10
10
|
from .frontmatter import *
|
|
11
11
|
from .frontmatter import _fm2dict, _re_fm_md, _dict2fm, _insertfm
|
|
12
12
|
from .processors import *
|
|
13
|
-
from .config import get_config, read_nb
|
|
13
|
+
from .config import get_config, read_nb, set_version, pyproject_tmpl, nbdev_defaults
|
|
14
14
|
from .sync import write_nb
|
|
15
15
|
from .showdoc import show_doc
|
|
16
16
|
from fastcore.all import *
|
|
@@ -179,3 +179,97 @@ def nbdev_migrate(
|
|
|
179
179
|
if f.name.endswith('.ipynb'): migrate_nb(f)
|
|
180
180
|
if f.name.endswith('.md'): migrate_md(f)
|
|
181
181
|
except Exception as e: raise Exception(f'Error in migrating file: {f}') from e
|
|
182
|
+
|
|
183
|
+
# %% ../nbs/api/16_migrate.ipynb #ad76e503
|
|
184
|
+
_license_map = {'apache2': 'Apache-2.0', 'mit': 'MIT', 'gpl2': 'GPL-2.0', 'gpl3': 'GPL-3.0', 'bsd3': 'BSD-3-Clause'}
|
|
185
|
+
|
|
186
|
+
# %% ../nbs/api/16_migrate.ipynb #7647b587
|
|
187
|
+
def _migrate_workflows(path):
|
|
188
|
+
"Update GitHub workflow files to use nbdev3 workflows"
|
|
189
|
+
wf_path = Path(path) / '.github/workflows'
|
|
190
|
+
if not wf_path.exists(): return
|
|
191
|
+
replacements = [
|
|
192
|
+
('fastai/workflows/quarto-ghp@', 'fastai/workflows/quarto-ghp3@'),
|
|
193
|
+
('fastai/workflows/nbdev-ci@', 'fastai/workflows/nbdev-ci3@'),
|
|
194
|
+
]
|
|
195
|
+
for f in wf_path.glob('*.yml'):
|
|
196
|
+
txt = f.read_text()
|
|
197
|
+
for old, new in replacements: txt = txt.replace(old, new)
|
|
198
|
+
f.write_text(txt)
|
|
199
|
+
|
|
200
|
+
# %% ../nbs/api/16_migrate.ipynb #1ca2d1b3
|
|
201
|
+
def _toml_val(v):
|
|
202
|
+
if v.lower() in ('true','false'): return v.lower()
|
|
203
|
+
return repr(v)
|
|
204
|
+
def _py_val(v):
|
|
205
|
+
try: return str2bool(v)
|
|
206
|
+
except: return v
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def _fmt_script(s):
|
|
210
|
+
name,val = s.strip().split('=')
|
|
211
|
+
return f'{name.strip()} = "{val.strip()}"'
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _build_classifiers(d):
|
|
215
|
+
"Build list of classifier strings from config dict"
|
|
216
|
+
_status_map = {'1': 'Planning', '2': 'Pre-Alpha', '3': 'Alpha', '4': 'Beta', '5': 'Production/Stable', '6': 'Mature', '7': 'Inactive'}
|
|
217
|
+
cs = ['Programming Language :: Python :: 3', 'Programming Language :: Python :: 3 :: Only']
|
|
218
|
+
status,audience,language = d.get('status', '').strip(), d.get('audience', '').strip(), d.get('language', '').strip()
|
|
219
|
+
if status and status in _status_map: cs.insert(0, f"Development Status :: {status} - {_status_map[status]}")
|
|
220
|
+
if audience: cs.insert(0, f"Intended Audience :: {audience}")
|
|
221
|
+
if language: cs.insert(0, f"Natural Language :: {language}")
|
|
222
|
+
return cs
|
|
223
|
+
|
|
224
|
+
# %% ../nbs/api/16_migrate.ipynb #796a2c4b
|
|
225
|
+
def _nbdev_migrate_config(d, path): # Config dict from settings.ini
|
|
226
|
+
"Migrate settings.ini dict to pyproject.toml string"
|
|
227
|
+
repo = d.get('repo', '')
|
|
228
|
+
user = d.get('user', '')
|
|
229
|
+
lib_path = d.get('lib_path', repo.replace('-', '_'))
|
|
230
|
+
branch = d.get('branch', 'main')
|
|
231
|
+
|
|
232
|
+
git_url = d.get('git_url') or (f"https://github.com/{user}/{repo}" if user else '')
|
|
233
|
+
doc_host = d.get('doc_host', '')
|
|
234
|
+
doc_baseurl = d.get('doc_baseurl', '')
|
|
235
|
+
doc_url = (doc_host.rstrip('/') + doc_baseurl) if doc_host else (f"https://{user}.github.io/{repo}/" if user else '')
|
|
236
|
+
set_version(path/lib_path, d.get('version', '0.0.1'))
|
|
237
|
+
lib_name = d.get('lib_name', repo)
|
|
238
|
+
txt = pyproject_tmpl.format(name=lib_name, lib_path=lib_path, description=d.get('description', ''),
|
|
239
|
+
min_python=d.get('min_python', '3.10'), license=_license_map.get(d.get('license', ''), d.get('license', 'Apache-2.0')),
|
|
240
|
+
author=d.get('author', ''), author_email=d.get('author_email', ''),
|
|
241
|
+
keywords=d.get('keywords', 'nbdev').split(), git_url=git_url, doc_url=doc_url, branch=branch)
|
|
242
|
+
# Add dependencies (combine requirements + pip_requirements)
|
|
243
|
+
reqs = d.get('requirements', '').split() + d.get('pip_requirements', '').split()
|
|
244
|
+
if reqs: txt = txt.replace('dependencies = []', f'dependencies = {reqs}')
|
|
245
|
+
dev_reqs = d.get('dev_requirements', '').split()
|
|
246
|
+
if dev_reqs: txt = txt.replace('[tool.setuptools', f'[project.optional-dependencies]\ndev = {dev_reqs}\n\n[tool.setuptools', 1)
|
|
247
|
+
# Add console_scripts
|
|
248
|
+
scripts = d.get('console_scripts', '').strip()
|
|
249
|
+
if scripts:
|
|
250
|
+
scripts_lines = [_fmt_script(s) for s in scripts.split('\n') if s.strip()]
|
|
251
|
+
scripts_toml = '\n[project.scripts]\n' + '\n'.join(scripts_lines)
|
|
252
|
+
txt = txt.replace('[tool.setuptools', scripts_toml + '\n\n[tool.setuptools', 1)
|
|
253
|
+
_classifiers_default = 'classifiers = [\n "Programming Language :: Python :: 3",\n "Programming Language :: Python :: 3 :: Only",\n]'
|
|
254
|
+
txt = txt.replace(_classifiers_default, 'classifiers = ' + repr(_build_classifiers(d)).replace("'", '"'))
|
|
255
|
+
|
|
256
|
+
nbdev_settings = {k:d[k] for k in ('nbs_path','doc_path','branch','recursive','readme_nb','tst_flags',
|
|
257
|
+
'clean_ids','clear_all','put_version_in_init','jupyter_hooks','custom_sidebar','title')
|
|
258
|
+
if k in d and _py_val(d[k]) != nbdev_defaults.get(k) and not (k=='title' and d[k]==repo)}
|
|
259
|
+
if nbdev_settings:
|
|
260
|
+
nbdev_toml = '\n'.join(f'{k} = {_toml_val(v)}' for k,v in nbdev_settings.items())
|
|
261
|
+
txt = txt.rstrip() + '\n' + nbdev_toml + '\n'
|
|
262
|
+
_migrate_workflows(path)
|
|
263
|
+
return txt
|
|
264
|
+
|
|
265
|
+
# %% ../nbs/api/16_migrate.ipynb #a9534478
|
|
266
|
+
@call_parse
|
|
267
|
+
def nbdev_migrate_config(path:str='.'): # Project root containing settings.ini
|
|
268
|
+
"Migrate settings.ini to pyproject.toml"
|
|
269
|
+
path = Path(path)
|
|
270
|
+
ini = path/'settings.ini'
|
|
271
|
+
if not ini.exists(): return print(f"No settings.ini found at {ini}")
|
|
272
|
+
cfg = Config(path, 'settings.ini')
|
|
273
|
+
txt = _nbdev_migrate_config(cfg.d, path)
|
|
274
|
+
(path/'pyproject.toml').write_text(txt)
|
|
275
|
+
print(f"Created {path/'pyproject.toml'}. You can now delete {ini} and setup.py (if present)")
|
nbdev/quarto.py
CHANGED
|
@@ -110,7 +110,7 @@ class IndentDumper(yaml.Dumper):
|
|
|
110
110
|
def nbdev_sidebar(
|
|
111
111
|
path:str=None, # Path to notebooks
|
|
112
112
|
printit:bool=False, # Print YAML for debugging
|
|
113
|
-
force:bool=False, # Create sidebar even if
|
|
113
|
+
force:bool=False, # Create sidebar even if custom_sidebar=false in pyproject.toml
|
|
114
114
|
skip_folder_re:str=r'(?:^[_.]|^www\$)', # Skip folders matching regex
|
|
115
115
|
**kwargs):
|
|
116
116
|
"Create sidebar.yml"
|
|
@@ -177,7 +177,7 @@ website:
|
|
|
177
177
|
|
|
178
178
|
# %% ../nbs/api/14_quarto.ipynb #38124450
|
|
179
179
|
def refresh_quarto_yml():
|
|
180
|
-
"Generate `_quarto.yml` from `
|
|
180
|
+
"Generate `_quarto.yml` from `pyproject.toml`."
|
|
181
181
|
cfg = get_config()
|
|
182
182
|
ny = cfg.nbs_path/'nbdev.yml'
|
|
183
183
|
vals = {k:cfg[k] for k in ['title', 'description', 'branch', 'git_url', 'doc_host', 'doc_baseurl']}
|
|
@@ -185,7 +185,7 @@ def refresh_quarto_yml():
|
|
|
185
185
|
if 'title' not in vals: vals['title'] = vals['lib_name']
|
|
186
186
|
ny.write_text(_nbdev_yml.format(**vals))
|
|
187
187
|
qy = cfg.nbs_path/'_quarto.yml'
|
|
188
|
-
if 'custom_quarto_yml' in cfg
|
|
188
|
+
if 'custom_quarto_yml' in cfg: print("NB: `_quarto.yml` is no longer auto-updated. Remove `custom_quarto_yml` from `pyproject.toml`")
|
|
189
189
|
if qy.exists() and not str2bool(cfg.get('custom_quarto_yml', True)): qy.unlink()
|
|
190
190
|
if not qy.exists(): qy.write_text(_quarto_yml)
|
|
191
191
|
|
nbdev/release.py
CHANGED
|
@@ -5,8 +5,7 @@
|
|
|
5
5
|
# %% auto #0
|
|
6
6
|
__all__ = ['GH_HOST', 'CONDA_WARNING', 'Release', 'changelog', 'push_release', 'release_git', 'release_gh', 'pypi_json',
|
|
7
7
|
'latest_pypi', 'pypi_details', 'conda_output_path', 'write_conda_meta', 'write_requirements',
|
|
8
|
-
'anaconda_upload', 'release_conda', 'chk_conda_rel', 'release_pypi', 'release_both', '
|
|
9
|
-
'nbdev_bump_version']
|
|
8
|
+
'anaconda_upload', 'release_conda', 'chk_conda_rel', 'release_pypi', 'release_both', 'nbdev_bump_version']
|
|
10
9
|
|
|
11
10
|
# %% ../nbs/api/18_release.ipynb #c35cc2b8
|
|
12
11
|
from fastcore.all import *
|
|
@@ -22,10 +21,7 @@ from .doclinks import *
|
|
|
22
21
|
GH_HOST = "https://api.github.com"
|
|
23
22
|
|
|
24
23
|
# %% ../nbs/api/18_release.ipynb #e220cefa
|
|
25
|
-
def _find_config(
|
|
26
|
-
cfg_path = Path().absolute()
|
|
27
|
-
while cfg_path != cfg_path.parent and not (cfg_path/cfg_name).exists(): cfg_path = cfg_path.parent
|
|
28
|
-
return Config(cfg_path, cfg_name)
|
|
24
|
+
def _find_config(): return get_config()
|
|
29
25
|
|
|
30
26
|
# %% ../nbs/api/18_release.ipynb #1972609a
|
|
31
27
|
def _issue_txt(issue):
|
|
@@ -109,7 +105,7 @@ def latest_notes(self:Release):
|
|
|
109
105
|
@call_parse
|
|
110
106
|
def changelog(
|
|
111
107
|
debug:store_true=False, # Print info to be added to CHANGELOG, instead of updating file
|
|
112
|
-
repo:str=None, # repo to use instead of `lib_name` from
|
|
108
|
+
repo:str=None, # repo to use instead of `lib_name` from pyproject.toml
|
|
113
109
|
):
|
|
114
110
|
"Create a CHANGELOG.md file from closed and labeled GitHub issues"
|
|
115
111
|
res = Release(repo=repo).changelog(debug=debug)
|
|
@@ -211,8 +207,8 @@ def _get_conda_meta():
|
|
|
211
207
|
|
|
212
208
|
hostreqs = ['packaging', f'python >={cfg.min_python}']
|
|
213
209
|
reqs = hostreqs+[]
|
|
214
|
-
if cfg.get('requirements'): reqs += cfg.requirements
|
|
215
|
-
if cfg.get('conda_requirements'): reqs += cfg.conda_requirements
|
|
210
|
+
if cfg.get('requirements'): reqs += cfg.requirements
|
|
211
|
+
if cfg.get('conda_requirements'): reqs += cfg.conda_requirements
|
|
216
212
|
|
|
217
213
|
pypi = pypi_json(f'{name}/{ver}')
|
|
218
214
|
rel = [o for o in pypi['urls'] if o['packagetype']=='sdist'][0]
|
|
@@ -250,10 +246,10 @@ def write_conda_meta(path='conda'):
|
|
|
250
246
|
# %% ../nbs/api/18_release.ipynb #7550557f
|
|
251
247
|
@call_parse
|
|
252
248
|
def write_requirements(path:str=''):
|
|
253
|
-
"Writes a `requirements.txt` file to `directory` based on
|
|
249
|
+
"Writes a `requirements.txt` file to `directory` based on pyproject.toml."
|
|
254
250
|
cfg = get_config()
|
|
255
251
|
d = Path(path) if path else cfg.config_path
|
|
256
|
-
req = '\n'.join([cfg.get(k
|
|
252
|
+
req = '\n'.join(['\n'.join(cfg.get(k) or []) for k in ['requirements', 'pip_requirements']])
|
|
257
253
|
(d/'requirements.txt').mk_write(req)
|
|
258
254
|
|
|
259
255
|
# %% ../nbs/api/18_release.ipynb #715ae3ac
|
|
@@ -297,7 +293,7 @@ def release_conda(
|
|
|
297
293
|
loc = outs[0]
|
|
298
294
|
if skip_upload: return print(loc)
|
|
299
295
|
if not upload_user: upload_user = get_config().conda_user
|
|
300
|
-
if not upload_user: return print("`conda_user` not in
|
|
296
|
+
if not upload_user: return print("`conda_user` not in pyproject.toml and no `upload_user` passed. Cannot upload")
|
|
301
297
|
if 'anaconda upload' not in res: return print(f"{res}\n\nFailed. Check auto-upload not set in .condarc. Try `--do_build False`.")
|
|
302
298
|
return anaconda_upload(name, loc)
|
|
303
299
|
|
|
@@ -344,24 +340,16 @@ def release_both(
|
|
|
344
340
|
release_conda.__wrapped__(path, do_build=do_build, build_args=build_args, skip_upload=skip_upload, mambabuild=mambabuild, upload_user=upload_user)
|
|
345
341
|
nbdev_bump_version.__wrapped__()
|
|
346
342
|
|
|
347
|
-
# %% ../nbs/api/18_release.ipynb #6380dd5a
|
|
348
|
-
def bump_version(version, part=2, unbump=False):
|
|
349
|
-
version = version.split('.')
|
|
350
|
-
incr = -1 if unbump else 1
|
|
351
|
-
version[part] = str(int(version[part]) + incr)
|
|
352
|
-
for i in range(part+1, 3): version[i] = '0'
|
|
353
|
-
return '.'.join(version)
|
|
354
|
-
|
|
355
343
|
# %% ../nbs/api/18_release.ipynb #c0f64b2c
|
|
356
344
|
@call_parse
|
|
357
345
|
def nbdev_bump_version(
|
|
358
346
|
part:int=2, # Part of version to bump
|
|
359
347
|
unbump:bool=False): # Reduce version instead of increasing it
|
|
360
|
-
"Increment version in
|
|
348
|
+
"Increment version in __init__.py by one"
|
|
361
349
|
cfg = get_config()
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
350
|
+
old_ver = read_version(cfg.lib_path)
|
|
351
|
+
print(f'Old version: {old_ver}')
|
|
352
|
+
new_ver = bump_version(old_ver, part, unbump=unbump)
|
|
353
|
+
set_version(cfg.lib_path, new_ver)
|
|
366
354
|
nbdev_export.__wrapped__()
|
|
367
|
-
print(f'New version: {
|
|
355
|
+
print(f'New version: {new_ver}')
|
nbdev/serve.py
CHANGED
|
@@ -36,7 +36,7 @@ def _is_qpy(path:Path):
|
|
|
36
36
|
|
|
37
37
|
# %% ../nbs/api/17_serve.ipynb #abc3835a
|
|
38
38
|
def _proc_file(s, cache, path, mtime=None):
|
|
39
|
-
skips = ('_proc', '_docs', '_site', '
|
|
39
|
+
skips = ('_proc', '_docs', '_site', 'pyproject.toml')
|
|
40
40
|
if not s.is_file() or any(o[0]=='.' or o in skips for o in s.parts): return
|
|
41
41
|
d = cache/s.relative_to(path)
|
|
42
42
|
if s.suffix=='.py': d = d.with_suffix('')
|
|
@@ -69,7 +69,7 @@ def proc_nbs(
|
|
|
69
69
|
if (path/'_brand.yml').exists(): files.append(path/'_brand.yml')
|
|
70
70
|
if (path/'_extensions').exists(): files.extend(nbglob(path/'_extensions', func=Path, file_glob='', file_re='', skip_file_re='^[.]'))
|
|
71
71
|
|
|
72
|
-
# If
|
|
72
|
+
# If pyproject.toml or filter script newer than cache folder modified, delete cache
|
|
73
73
|
chk_mtime = max(cfg.config_file.stat().st_mtime, Path(__file__).stat().st_mtime)
|
|
74
74
|
cache.mkdir(parents=True, exist_ok=True)
|
|
75
75
|
cache_mtime = cache.stat().st_mtime
|
nbdev/test.py
CHANGED
|
@@ -78,7 +78,7 @@ def nbdev_test(
|
|
|
78
78
|
ignore_fname:str='.notest', # Filename that will result in siblings being ignored
|
|
79
79
|
**kwargs):
|
|
80
80
|
"Test in parallel notebooks matching `path`, passing along `flags`"
|
|
81
|
-
skip_flags = get_config().tst_flags
|
|
81
|
+
skip_flags = get_config().tst_flags
|
|
82
82
|
force_flags = flags.split()
|
|
83
83
|
files = nbglob(path, as_path=True, **kwargs)
|
|
84
84
|
files = [f.absolute() for f in sorted(files) if _keep_file(f, ignore_fname)]
|
|
@@ -86,7 +86,7 @@ def nbdev_test(
|
|
|
86
86
|
|
|
87
87
|
if n_workers is None: n_workers = 0 if len(files)==1 else min(num_cpus(), 8)
|
|
88
88
|
if IN_NOTEBOOK: kw = {'method':'spawn'} if os.name=='nt' else {'method':'forkserver'}
|
|
89
|
-
else: kw = {}
|
|
89
|
+
else: kw = {'method':'forkserver'} if sys.platform=='darwin' else {}
|
|
90
90
|
wd_pth = get_config().nbs_path
|
|
91
91
|
with working_directory(wd_pth if (wd_pth and wd_pth.exists()) else os.getcwd()):
|
|
92
92
|
results = parallel(test_nb, files, skip_flags=skip_flags, force_flags=force_flags, n_workers=n_workers,
|
|
@@ -1,25 +1,21 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nbdev
|
|
3
|
-
Version:
|
|
3
|
+
Version: 3.0.0
|
|
4
4
|
Summary: Create delightful software with Jupyter Notebooks
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
License: Apache Software License 2.0
|
|
5
|
+
Author-email: "Jeremy Howard and the fast.ai community" <j@fast.ai>
|
|
6
|
+
License: Apache-2.0
|
|
7
|
+
Project-URL: Repository, https://github.com/AnswerDotAI/nbdev
|
|
9
8
|
Project-URL: Documentation, https://nbdev.fast.ai/
|
|
10
|
-
Keywords: nbdev
|
|
9
|
+
Keywords: nbdev,fastai,jupyter,notebook,export
|
|
11
10
|
Classifier: Development Status :: 5 - Production/Stable
|
|
12
11
|
Classifier: Intended Audience :: Developers
|
|
13
12
|
Classifier: Natural Language :: English
|
|
14
|
-
Classifier: Programming Language :: Python :: 3
|
|
15
|
-
Classifier: Programming Language :: Python :: 3
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
18
15
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
19
16
|
Requires-Python: >=3.9
|
|
20
17
|
Description-Content-Type: text/markdown
|
|
21
18
|
License-File: LICENSE
|
|
22
|
-
Requires-Dist: packaging
|
|
23
19
|
Requires-Dist: fastcore>=1.12.3
|
|
24
20
|
Requires-Dist: execnb>=0.1.12
|
|
25
21
|
Requires-Dist: astunparse
|
|
@@ -29,7 +25,8 @@ Requires-Dist: asttokens
|
|
|
29
25
|
Requires-Dist: setuptools
|
|
30
26
|
Requires-Dist: build
|
|
31
27
|
Requires-Dist: fastgit
|
|
32
|
-
Requires-Dist:
|
|
28
|
+
Requires-Dist: pyyaml
|
|
29
|
+
Requires-Dist: tomli; python_version < "3.11"
|
|
33
30
|
Provides-Extra: dev
|
|
34
31
|
Requires-Dist: ipywidgets; extra == "dev"
|
|
35
32
|
Requires-Dist: nbdev-numpy; extra == "dev"
|
|
@@ -43,20 +40,7 @@ Requires-Dist: pysymbol_llm; extra == "dev"
|
|
|
43
40
|
Requires-Dist: llms-txt; extra == "dev"
|
|
44
41
|
Requires-Dist: sphinx; extra == "dev"
|
|
45
42
|
Requires-Dist: plum-dispatch; extra == "dev"
|
|
46
|
-
Dynamic: author
|
|
47
|
-
Dynamic: author-email
|
|
48
|
-
Dynamic: classifier
|
|
49
|
-
Dynamic: description
|
|
50
|
-
Dynamic: description-content-type
|
|
51
|
-
Dynamic: home-page
|
|
52
|
-
Dynamic: keywords
|
|
53
|
-
Dynamic: license
|
|
54
43
|
Dynamic: license-file
|
|
55
|
-
Dynamic: project-url
|
|
56
|
-
Dynamic: provides-extra
|
|
57
|
-
Dynamic: requires-dist
|
|
58
|
-
Dynamic: requires-python
|
|
59
|
-
Dynamic: summary
|
|
60
44
|
|
|
61
45
|
# Getting Started
|
|
62
46
|
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
nbdev/__init__.py,sha256=FZcrF2UcGvIIv-NsgjStcZfMWxygrZaRyFodnwjqpcU,89
|
|
2
|
+
nbdev/_modidx.py,sha256=7jB1wx0EmQtah_L-jwRW360KwP02VB-TOuK9qY2o3ZI,40082
|
|
3
|
+
nbdev/clean.py,sha256=817PYPBGh0Pn-rmhzOFNVfKtaLJGkMF7Vzqb2Q5PCeI,9744
|
|
4
|
+
nbdev/cli.py,sha256=ZYJdR5npjv1kbyJrsWLycgzaAES8tR41mCHhDqdoY54,7772
|
|
5
|
+
nbdev/config.py,sha256=l7WJVg2BP5fm4DShYDlIea81pTMs4XBHgqAOwlobyPI,14821
|
|
6
|
+
nbdev/diff.py,sha256=8vzFbmk8jmn4LW66wXEDJVxsJTf4eegNMbBVV4jI708,3794
|
|
7
|
+
nbdev/doclinks.py,sha256=oSvmFibewmC3vNOrNLNzcOU7Fv6ZaJPTcQx0h9AMWeM,12829
|
|
8
|
+
nbdev/export.py,sha256=7Lg9MZXlp2YVxpODpe8EuNtDJcz7B_8U0lxEGbq1B3k,4040
|
|
9
|
+
nbdev/extract_attachments.py,sha256=O4mS4EFIOXL_yQ3jmsnBStrWxGR_nPNvxLYXHtLeimw,2208
|
|
10
|
+
nbdev/frontmatter.py,sha256=CdJj6B1NyodH--uOCfWhGK0vI7gnKp-i2EzKce47kQQ,2820
|
|
11
|
+
nbdev/imports.py,sha256=f5Ynco14hsJyFCf43-uP_YARMhHADe6lM-20Mc_vXhw,95
|
|
12
|
+
nbdev/maker.py,sha256=OG0l-L-_zzErA5boWpt-0Ozi8eZU_2SSbY_-peo13_E,10011
|
|
13
|
+
nbdev/merge.py,sha256=WOjbqKwDfNK16L9P-eZ-IPUTgC1brOWg6jaKfDziDIc,4432
|
|
14
|
+
nbdev/migrate.py,sha256=-GuFzUAZJivPPCI6tlQI_ZpSWCKsBFQyqkTo23p-AGI,12860
|
|
15
|
+
nbdev/process.py,sha256=cSKAdQ-KDPv6UNLqii61SnmFsw8qmcY-YwBPUbwRd18,6163
|
|
16
|
+
nbdev/processors.py,sha256=SL4Y4ha1ZQU-dV1nZyyb68viq0X2tE2PfBX26_LEzYY,12364
|
|
17
|
+
nbdev/qmd.py,sha256=4_paV81SjRh2TmDS5tyu8Y2TW1X_yg0PYGAi6fOw3ek,3129
|
|
18
|
+
nbdev/quarto.py,sha256=2uxekOFtFJ47NalegwLoZw6Jd-C3gf5BJWhHsraf2J0,14126
|
|
19
|
+
nbdev/release.py,sha256=KbnwAeNer9zVCFXR6C3ZmO-Ggb8PTOMcNazzbGLDkIk,14632
|
|
20
|
+
nbdev/serve.py,sha256=UQD30QsO3KFhN95kcLTwXyXcg6GrrpFP8Ek5OCcKIRs,3191
|
|
21
|
+
nbdev/serve_drv.py,sha256=IZ2acem_KKsXYYe0iUECiR_orkYLBkT1ZG_258ZS7SQ,657
|
|
22
|
+
nbdev/showdoc.py,sha256=rgVpHGVQi14RBdLYnh3ukPPG9EEKGyTMW_zGAH7HM7o,2657
|
|
23
|
+
nbdev/sync.py,sha256=eMYZYzu3RMDWjY-apr2TyzB7cTEB9v5-svF_Z--xyj8,3391
|
|
24
|
+
nbdev/test.py,sha256=KBJL4dJjxaGGqKqaQONnLP-ugX79RV4B0TDUG3MnHnw,4579
|
|
25
|
+
nbdev-3.0.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
26
|
+
nbdev-3.0.0.dist-info/METADATA,sha256=for9KQEuuTdd4LJthmMGNn3J4WGeeonVhFK0h49r6sc,10475
|
|
27
|
+
nbdev-3.0.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
28
|
+
nbdev-3.0.0.dist-info/entry_points.txt,sha256=N55SDYwtagnT3kgSUdZigVdm5yp_mYgLZzd0GelV23s,1511
|
|
29
|
+
nbdev-3.0.0.dist-info/top_level.txt,sha256=3cWYLMuaXsZjz3TQRGEkWGs9Z8ieEDmYcq8TZS3y3vU,6
|
|
30
|
+
nbdev-3.0.0.dist-info/RECORD,,
|
|
@@ -16,6 +16,7 @@ nbdev_install_hooks = nbdev.clean:nbdev_install_hooks
|
|
|
16
16
|
nbdev_install_quarto = nbdev.quarto:install_quarto
|
|
17
17
|
nbdev_merge = nbdev.merge:nbdev_merge
|
|
18
18
|
nbdev_migrate = nbdev.migrate:nbdev_migrate
|
|
19
|
+
nbdev_migrate_config = nbdev.migrate:nbdev_migrate_config
|
|
19
20
|
nbdev_new = nbdev.cli:nbdev_new
|
|
20
21
|
nbdev_prepare = nbdev.quarto:prepare
|
|
21
22
|
nbdev_preview = nbdev.quarto:nbdev_preview
|
nbdev-2.4.14.dist-info/RECORD
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
nbdev/__init__.py,sha256=zJpD-99JRntw637921eGhSPcki1g8j7Ugz-MUxnPi-8,90
|
|
2
|
-
nbdev/_modidx.py,sha256=9Ha0XafZNcnsUIju5aJx7GVqkgwX35vVGanoCgfbN-s,38372
|
|
3
|
-
nbdev/clean.py,sha256=rTLJsz_gNqd9TdTBLmEuexXN6lZPCq6PVS_Xd4JGMZ0,9748
|
|
4
|
-
nbdev/cli.py,sha256=csOPIFIz8fouyjTx-OX_0it1Fts-Z_c72dnIOxXxGDc,7588
|
|
5
|
-
nbdev/config.py,sha256=01DWLwkfdsn7TkYklO52LagvzI8UenfEga1GwyAxc-o,13760
|
|
6
|
-
nbdev/diff.py,sha256=8vzFbmk8jmn4LW66wXEDJVxsJTf4eegNMbBVV4jI708,3794
|
|
7
|
-
nbdev/doclinks.py,sha256=_t7frF7hdboJLJmYoPfjOiSvkSBtTKlh4AcjyTeD3XQ,12806
|
|
8
|
-
nbdev/export.py,sha256=CVJGQ8suzO2LZHqVCzJKjmoR42R5yTRWA76KY1sWnQA,4122
|
|
9
|
-
nbdev/extract_attachments.py,sha256=O4mS4EFIOXL_yQ3jmsnBStrWxGR_nPNvxLYXHtLeimw,2208
|
|
10
|
-
nbdev/frontmatter.py,sha256=CdJj6B1NyodH--uOCfWhGK0vI7gnKp-i2EzKce47kQQ,2820
|
|
11
|
-
nbdev/imports.py,sha256=f5Ynco14hsJyFCf43-uP_YARMhHADe6lM-20Mc_vXhw,95
|
|
12
|
-
nbdev/maker.py,sha256=OG0l-L-_zzErA5boWpt-0Ozi8eZU_2SSbY_-peo13_E,10011
|
|
13
|
-
nbdev/merge.py,sha256=WOjbqKwDfNK16L9P-eZ-IPUTgC1brOWg6jaKfDziDIc,4432
|
|
14
|
-
nbdev/migrate.py,sha256=uhmkStmHPb082VDKnAPlD2vOVUi1iYOYdKzqHwF12cc,7902
|
|
15
|
-
nbdev/process.py,sha256=cSKAdQ-KDPv6UNLqii61SnmFsw8qmcY-YwBPUbwRd18,6163
|
|
16
|
-
nbdev/processors.py,sha256=SL4Y4ha1ZQU-dV1nZyyb68viq0X2tE2PfBX26_LEzYY,12364
|
|
17
|
-
nbdev/qmd.py,sha256=4_paV81SjRh2TmDS5tyu8Y2TW1X_yg0PYGAi6fOw3ek,3129
|
|
18
|
-
nbdev/quarto.py,sha256=Hfxj_bCrm0OlKGh6S954d1E2BCfPL1q0901RRvC4bCA,14119
|
|
19
|
-
nbdev/release.py,sha256=8PuCYqLb2XpPyrCPo70fVWIRlua_kbRWUtEJH68Cics,15126
|
|
20
|
-
nbdev/serve.py,sha256=XqTB9Mjgs_X3U5VNJ-vPbx2rs8ayBog5cfPiFumBPt4,3187
|
|
21
|
-
nbdev/serve_drv.py,sha256=IZ2acem_KKsXYYe0iUECiR_orkYLBkT1ZG_258ZS7SQ,657
|
|
22
|
-
nbdev/showdoc.py,sha256=rgVpHGVQi14RBdLYnh3ukPPG9EEKGyTMW_zGAH7HM7o,2657
|
|
23
|
-
nbdev/sync.py,sha256=eMYZYzu3RMDWjY-apr2TyzB7cTEB9v5-svF_Z--xyj8,3391
|
|
24
|
-
nbdev/test.py,sha256=74rkaOEY4KOeosB84V3DYvUsv-klVDdubuw_SWL7-vo,4532
|
|
25
|
-
nbdev-2.4.14.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
26
|
-
nbdev-2.4.14.dist-info/METADATA,sha256=inKYYMmfF-Myy0ELMNLm80NJRYR0JIFTvmpccXX60CQ,10829
|
|
27
|
-
nbdev-2.4.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
28
|
-
nbdev-2.4.14.dist-info/entry_points.txt,sha256=1ADLbIIJxZeLgOD8NpizkPULSsd_fgUQxwAkbGk45b8,1453
|
|
29
|
-
nbdev-2.4.14.dist-info/top_level.txt,sha256=3cWYLMuaXsZjz3TQRGEkWGs9Z8ieEDmYcq8TZS3y3vU,6
|
|
30
|
-
nbdev-2.4.14.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|