ipykernel-helper 0.0.14__py3-none-any.whl → 0.0.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ipykernel_helper/__init__.py +1 -1
- ipykernel_helper/_modidx.py +10 -0
- ipykernel_helper/core.py +114 -44
- {ipykernel_helper-0.0.14.dist-info → ipykernel_helper-0.0.20.dist-info}/METADATA +4 -2
- ipykernel_helper-0.0.20.dist-info/RECORD +9 -0
- ipykernel_helper-0.0.14.dist-info/RECORD +0 -9
- {ipykernel_helper-0.0.14.dist-info → ipykernel_helper-0.0.20.dist-info}/WHEEL +0 -0
- {ipykernel_helper-0.0.14.dist-info → ipykernel_helper-0.0.20.dist-info}/entry_points.txt +0 -0
- {ipykernel_helper-0.0.14.dist-info → ipykernel_helper-0.0.20.dist-info}/licenses/LICENSE +0 -0
- {ipykernel_helper-0.0.14.dist-info → ipykernel_helper-0.0.20.dist-info}/top_level.txt +0 -0
ipykernel_helper/__init__.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "0.0.
|
|
1
|
+
__version__ = "0.0.20"
|
|
2
2
|
from .core import *
|
ipykernel_helper/_modidx.py
CHANGED
|
@@ -21,13 +21,23 @@ d = { 'settings': { 'branch': 'main',
|
|
|
21
21
|
'ipykernel_helper/core.py'),
|
|
22
22
|
'ipykernel_helper.core.InteractiveShell.xpush': ( 'core.html#interactiveshell.xpush',
|
|
23
23
|
'ipykernel_helper/core.py'),
|
|
24
|
+
'ipykernel_helper.core._absolutify_imgs': ('core.html#_absolutify_imgs', 'ipykernel_helper/core.py'),
|
|
25
|
+
'ipykernel_helper.core._aify_imgs': ('core.html#_aify_imgs', 'ipykernel_helper/core.py'),
|
|
26
|
+
'ipykernel_helper.core._convert_math': ('core.html#_convert_math', 'ipykernel_helper/core.py'),
|
|
27
|
+
'ipykernel_helper.core._extract_section': ('core.html#_extract_section', 'ipykernel_helper/core.py'),
|
|
28
|
+
'ipykernel_helper.core._get_math_mode': ('core.html#_get_math_mode', 'ipykernel_helper/core.py'),
|
|
24
29
|
'ipykernel_helper.core._get_schema': ('core.html#_get_schema', 'ipykernel_helper/core.py'),
|
|
25
30
|
'ipykernel_helper.core._rank': ('core.html#_rank', 'ipykernel_helper/core.py'),
|
|
26
31
|
'ipykernel_helper.core._safe_repr': ('core.html#_safe_repr', 'ipykernel_helper/core.py'),
|
|
27
32
|
'ipykernel_helper.core._signatures': ('core.html#_signatures', 'ipykernel_helper/core.py'),
|
|
33
|
+
'ipykernel_helper.core.fix_editable_priority': ( 'core.html#fix_editable_priority',
|
|
34
|
+
'ipykernel_helper/core.py'),
|
|
28
35
|
'ipykernel_helper.core.get_md': ('core.html#get_md', 'ipykernel_helper/core.py'),
|
|
36
|
+
'ipykernel_helper.core.gh_blob_to_raw': ('core.html#gh_blob_to_raw', 'ipykernel_helper/core.py'),
|
|
29
37
|
'ipykernel_helper.core.load_ipython_extension': ( 'core.html#load_ipython_extension',
|
|
30
38
|
'ipykernel_helper/core.py'),
|
|
39
|
+
'ipykernel_helper.core.parse_gh_url': ('core.html#parse_gh_url', 'ipykernel_helper/core.py'),
|
|
40
|
+
'ipykernel_helper.core.read_gh_repo': ('core.html#read_gh_repo', 'ipykernel_helper/core.py'),
|
|
31
41
|
'ipykernel_helper.core.read_url': ('core.html#read_url', 'ipykernel_helper/core.py'),
|
|
32
42
|
'ipykernel_helper.core.run_cmd': ('core.html#run_cmd', 'ipykernel_helper/core.py'),
|
|
33
43
|
'ipykernel_helper.core.scrape_url': ('core.html#scrape_url', 'ipykernel_helper/core.py'),
|
ipykernel_helper/core.py
CHANGED
|
@@ -3,7 +3,8 @@
|
|
|
3
3
|
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/00_core.ipynb.
|
|
4
4
|
|
|
5
5
|
# %% auto 0
|
|
6
|
-
__all__ = ['transient', 'run_cmd', 'get_md', 'scrape_url', '
|
|
6
|
+
__all__ = ['transient', 'run_cmd', 'get_md', 'scrape_url', 'parse_gh_url', 'gh_blob_to_raw', 'read_gh_repo', 'read_url',
|
|
7
|
+
'fix_editable_priority', 'load_ipython_extension']
|
|
7
8
|
|
|
8
9
|
# %% ../nbs/00_core.ipynb
|
|
9
10
|
from fastcore.meta import delegates
|
|
@@ -16,8 +17,11 @@ from textwrap import dedent
|
|
|
16
17
|
from cloudscraper import create_scraper
|
|
17
18
|
from toolslm.funccall import *
|
|
18
19
|
from ast import literal_eval
|
|
20
|
+
from urllib.parse import urlparse, urljoin
|
|
21
|
+
from fastcore.net import HTTP404NotFoundError
|
|
22
|
+
from ghapi.all import GhApi
|
|
19
23
|
|
|
20
|
-
import typing,warnings,re
|
|
24
|
+
import typing,warnings,re,os,html2text,base64
|
|
21
25
|
|
|
22
26
|
from IPython.core.interactiveshell import InteractiveShell
|
|
23
27
|
from IPython.core.completer import ProvisionalCompleterWarning
|
|
@@ -115,7 +119,7 @@ def get_vars(self:InteractiveShell, vs:list, literal=True):
|
|
|
115
119
|
def _get_schema(ns: dict, t):
|
|
116
120
|
"Check if tool `t` has errors."
|
|
117
121
|
if t not in ns: return f"`{t}` not found. Did you run it?"
|
|
118
|
-
try: return get_schema(ns[t])
|
|
122
|
+
try: return {'type':'function', 'function':get_schema(ns[t], pname='parameters', evalable=True, skip_hidden=True)}
|
|
119
123
|
except Exception as e: return f"`{t}`: {e}."
|
|
120
124
|
|
|
121
125
|
@patch
|
|
@@ -146,56 +150,122 @@ def run_cmd(cmd, data='', meta=None, update=False, **kw):
|
|
|
146
150
|
transient(data, meta=meta, update=update, cmd=cmd, **kw)
|
|
147
151
|
|
|
148
152
|
# %% ../nbs/00_core.ipynb
|
|
149
|
-
def
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
153
|
+
def _absolutify_imgs(md, base_url):
|
|
154
|
+
def fix(m):
|
|
155
|
+
alt,img_url = m.group(1),m.group(2)
|
|
156
|
+
if not img_url.startswith('http'): img_url = urljoin(base_url, img_url)
|
|
157
|
+
alt = alt.replace('\\','')
|
|
158
|
+
return f''
|
|
159
|
+
return re.sub(r'!\[(.*?)\]\((.*?)\)', fix, md)
|
|
160
|
+
|
|
161
|
+
# %% ../nbs/00_core.ipynb
|
|
162
|
+
def get_md(html, url='', mmode=None, ignore_links=False, ignore_images=False, mark_code=True):
|
|
163
|
+
"Convert HTML to markdown with absolute image URLs and optional math mode"
|
|
164
|
+
h = html2text.HTML2Text()
|
|
165
|
+
h.body_width = 0
|
|
166
|
+
h.ignore_links, h.ignore_images, h.mark_code = ignore_links, ignore_images, mark_code
|
|
167
|
+
res = _absolutify_imgs(h.handle(str(html)), url)
|
|
168
|
+
if mmode == 'safe': res = res.replace(r'\\(',r'\(').replace(r'\\)',r'\)')
|
|
169
|
+
return re.sub(r'\[code]\s*\n(.*?)\n\[/code]', lambda m: f'```\n{dedent(m.group(1))}\n```', res, flags=re.DOTALL).strip()
|
|
158
170
|
|
|
159
171
|
# %% ../nbs/00_core.ipynb
|
|
160
172
|
def scrape_url(url): return create_scraper().get(url)
|
|
161
173
|
|
|
162
174
|
# %% ../nbs/00_core.ipynb
|
|
163
|
-
def
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
175
|
+
def _get_math_mode():
|
|
176
|
+
v = os.getenv('USE_KATEX', '')
|
|
177
|
+
if v.lower() in {'0', 'false', 'none', ''}: return None
|
|
178
|
+
return 'dollar' if v.lower().startswith('d') else 'safe'
|
|
179
|
+
|
|
180
|
+
# %% ../nbs/00_core.ipynb
|
|
181
|
+
def _aify_imgs(md): return re.sub(r'!\[(.*?)\]\((.*?)\)', r'', md)
|
|
182
|
+
|
|
183
|
+
# %% ../nbs/00_core.ipynb
|
|
184
|
+
def parse_gh_url(url):
|
|
185
|
+
"Parse GitHub URL into (owner, repo, type, ref, path) or None"
|
|
186
|
+
m = re.match(r'https?://(?:www\.)?github\.com/([^/]+)/([^/]+)(?:/([^/]+)(?:/([^/]+)(?:/(.+))?)?)?', url)
|
|
187
|
+
return dict(zip('owner repo typ ref path'.split(), m.groups())) if m else None
|
|
188
|
+
|
|
189
|
+
# %% ../nbs/00_core.ipynb
|
|
190
|
+
def gh_blob_to_raw(url):
|
|
191
|
+
"Convert github.com/user/repo/blob/... URL to raw.githubusercontent.com URL"
|
|
192
|
+
m = re.match(r'https?://(?:www\.)?github\.com/([^/]+)/([^/]+)/blob/([^/]+)/(.+)', url)
|
|
193
|
+
if not m: return url
|
|
194
|
+
owner, repo, ref, path = m.groups()
|
|
195
|
+
return f'https://raw.githubusercontent.com/{owner}/{repo}/{ref}/{path}'
|
|
196
|
+
|
|
197
|
+
# %% ../nbs/00_core.ipynb
|
|
198
|
+
def _extract_section(soup, url, selector=None):
|
|
199
|
+
"Extract a specific section from soup, or the whole thing"
|
|
200
|
+
if selector: return '\n\n'.join(str(s) for s in soup.select(selector))
|
|
201
|
+
parsed = urlparse(url)
|
|
202
|
+
if not parsed.fragment: return str(soup)
|
|
203
|
+
section = soup.find(id=parsed.fragment)
|
|
204
|
+
if not section: return ''
|
|
205
|
+
elements = [section]
|
|
206
|
+
current = section.next_sibling
|
|
207
|
+
while current:
|
|
208
|
+
if hasattr(current, 'name') and current.name == section.name: break
|
|
209
|
+
elements.append(current)
|
|
210
|
+
current = current.next_sibling
|
|
211
|
+
return ''.join(str(el) for el in elements)
|
|
212
|
+
|
|
213
|
+
# %% ../nbs/00_core.ipynb
|
|
214
|
+
def _convert_math(soup, mode):
|
|
215
|
+
for math in soup.find_all('math'):
|
|
216
|
+
annot = math.find('annotation', {'encoding': 'application/x-tex'})
|
|
217
|
+
if not annot: continue
|
|
218
|
+
tex,display = annot.text.strip(), math.get('display') == 'block'
|
|
219
|
+
if mode == 'dollar': wrap = f'$${tex}$$' if display else f'${tex}$'
|
|
220
|
+
else: wrap = f'$${tex}$$' if display else f'\({tex}\)'
|
|
221
|
+
math.replace_with(wrap)
|
|
222
|
+
|
|
223
|
+
# %% ../nbs/00_core.ipynb
|
|
224
|
+
def read_gh_repo(owner, repo, ref=None, path=''):
|
|
225
|
+
"Read GitHub repo info: description, file list, and README"
|
|
226
|
+
api = GhApi()
|
|
227
|
+
info = api.repos.get(owner, repo)
|
|
228
|
+
res = [f"# {info.full_name}", info.description or '']
|
|
229
|
+
ref = ref or info.default_branch
|
|
230
|
+
contents = api.repos.get_content(owner, repo, path or '', ref=ref)
|
|
231
|
+
files = [f"- {'📁 ' if c.type=='dir' else ''}{c.name}" for c in contents]
|
|
232
|
+
res.append(f'\n## /{path or ""} Files\n' + '\n'.join(files))
|
|
233
|
+
if not path:
|
|
234
|
+
try:
|
|
235
|
+
readme = api.repos.get_readme(owner, repo, ref=ref)
|
|
236
|
+
res.append('\n## README\n' + base64.b64decode(readme.content).decode())
|
|
237
|
+
except HTTP404NotFoundError: pass
|
|
238
|
+
return '\n'.join(res)
|
|
239
|
+
|
|
240
|
+
# %% ../nbs/00_core.ipynb
|
|
241
|
+
def read_url(url:str, as_md:bool=True, extract_section:bool=True, selector:str=None, ai_img:bool=False):
|
|
242
|
+
"Read url from web"
|
|
171
243
|
from bs4 import BeautifulSoup
|
|
172
|
-
|
|
244
|
+
gh = parse_gh_url(url)
|
|
245
|
+
if gh:
|
|
246
|
+
if gh['typ']=='blob': url = gh_blob_to_raw(url)
|
|
247
|
+
elif gh['typ'] in (None, 'tree'): return read_gh_repo(gh['owner'], gh['repo'], gh['ref'], gh['path'])
|
|
173
248
|
o = scrape_url(url)
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
if
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
section = soup.find(id=parsed.fragment)
|
|
186
|
-
if section:
|
|
187
|
-
tag_name = section.name
|
|
188
|
-
elements = [section]
|
|
189
|
-
current = section.next_sibling
|
|
190
|
-
while current:
|
|
191
|
-
if hasattr(current, 'name') and current.name == tag_name: break
|
|
192
|
-
elements.append(current)
|
|
193
|
-
current = current.next_sibling
|
|
194
|
-
res = ''.join(str(el) for el in elements)
|
|
195
|
-
else: res = ''
|
|
196
|
-
if as_md and ctype == 'text/html': return get_md(res)
|
|
249
|
+
ctype = (o.headers.get('content-type') or 'text/plain').split(';')[0]
|
|
250
|
+
res = o.text
|
|
251
|
+
if ctype == 'text/html':
|
|
252
|
+
soup = BeautifulSoup(res, 'lxml')
|
|
253
|
+
if ('#' in url and extract_section) or selector: soup = BeautifulSoup(_extract_section(soup, url, selector), 'lxml')
|
|
254
|
+
mmode = _get_math_mode()
|
|
255
|
+
if mmode: _convert_math(soup, mmode)
|
|
256
|
+
base = soup.find('base')
|
|
257
|
+
base_url = urljoin(url, base['href'] if base else '')
|
|
258
|
+
res = get_md(soup, base_url, mmode) if as_md else str(soup)
|
|
259
|
+
if ai_img: res = _aify_imgs(res)
|
|
197
260
|
return res
|
|
198
261
|
|
|
262
|
+
# %% ../nbs/00_core.ipynb
|
|
263
|
+
def fix_editable_priority():
|
|
264
|
+
import sys
|
|
265
|
+
from importlib.machinery import PathFinder
|
|
266
|
+
try: sys.meta_path.append(sys.meta_path.pop(sys.meta_path.index(PathFinder)))
|
|
267
|
+
except ValueError: pass
|
|
268
|
+
|
|
199
269
|
# %% ../nbs/00_core.ipynb
|
|
200
270
|
@patch
|
|
201
271
|
def _get_info(self:Inspector, obj, oname='', formatter=None, info=None, detail_level=0, omit_sections=()):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ipykernel-helper
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.20
|
|
4
4
|
Summary: Helpers for ipykernel and friends
|
|
5
5
|
Home-page: https://github.com/AnswerDotAI/ipykernel-helper
|
|
6
6
|
Author: Jeremy Howard
|
|
@@ -19,13 +19,15 @@ Requires-Python: >=3.9
|
|
|
19
19
|
Description-Content-Type: text/markdown
|
|
20
20
|
License-File: LICENSE
|
|
21
21
|
Requires-Dist: fastcore
|
|
22
|
-
Requires-Dist: toolslm>=0.
|
|
22
|
+
Requires-Dist: toolslm>=0.3.11
|
|
23
23
|
Requires-Dist: jedi
|
|
24
24
|
Requires-Dist: ipython
|
|
25
25
|
Requires-Dist: ipykernel
|
|
26
26
|
Requires-Dist: beautifulsoup4
|
|
27
|
+
Requires-Dist: lxml
|
|
27
28
|
Requires-Dist: html2text
|
|
28
29
|
Requires-Dist: cloudscraper
|
|
30
|
+
Requires-Dist: ghapi
|
|
29
31
|
Provides-Extra: dev
|
|
30
32
|
Dynamic: author
|
|
31
33
|
Dynamic: author-email
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
ipykernel_helper/__init__.py,sha256=mkmmPV_a7e3EgojncsapZLZfAn0jyTfnZIQiO1GWA0M,43
|
|
2
|
+
ipykernel_helper/_modidx.py,sha256=QxDoY2rsZgiO12nSsD7YkAqIri_1_Y38uxVGxLJkd2Y,5032
|
|
3
|
+
ipykernel_helper/core.py,sha256=0BEYTAdT1Wohip_glNJFmpumhCcOI22vfuCoOeheQE4,11961
|
|
4
|
+
ipykernel_helper-0.0.20.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
5
|
+
ipykernel_helper-0.0.20.dist-info/METADATA,sha256=dIl05mep_Des63g4tVx0cD9ZP7VzPFDbDy4SzaVT2ts,2751
|
|
6
|
+
ipykernel_helper-0.0.20.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
7
|
+
ipykernel_helper-0.0.20.dist-info/entry_points.txt,sha256=HWiK9xz75QtZUaPaYrwpyH5B8MbW0Ea_vi11UmwBImM,54
|
|
8
|
+
ipykernel_helper-0.0.20.dist-info/top_level.txt,sha256=_diD--64d9MauLE0pTxzZ58lkI8DvCrVc1hVAJsyc_Q,17
|
|
9
|
+
ipykernel_helper-0.0.20.dist-info/RECORD,,
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
ipykernel_helper/__init__.py,sha256=z-0Rh9e-quhc6YCgBCQPxO9WBChy0UBbaCcAmIPEqfE,43
|
|
2
|
-
ipykernel_helper/_modidx.py,sha256=AEG4MccZfb25rQi8UWrOpUrs1VIYHbexKEQGWnwYkRg,3706
|
|
3
|
-
ipykernel_helper/core.py,sha256=yPDNnVGPURLVcNb7NcikqKvbdifxybduIdkyJdzHz1w,8420
|
|
4
|
-
ipykernel_helper-0.0.14.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
5
|
-
ipykernel_helper-0.0.14.dist-info/METADATA,sha256=9kObKdTx58-Kw5Ecll9WelZw8LfYBViyzJ0rhAfLGb4,2709
|
|
6
|
-
ipykernel_helper-0.0.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
7
|
-
ipykernel_helper-0.0.14.dist-info/entry_points.txt,sha256=HWiK9xz75QtZUaPaYrwpyH5B8MbW0Ea_vi11UmwBImM,54
|
|
8
|
-
ipykernel_helper-0.0.14.dist-info/top_level.txt,sha256=_diD--64d9MauLE0pTxzZ58lkI8DvCrVc1hVAJsyc_Q,17
|
|
9
|
-
ipykernel_helper-0.0.14.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|