scholarinboxcli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scholarinboxcli/__init__.py +1 -0
- scholarinboxcli/api/client.py +355 -0
- scholarinboxcli/cli.py +524 -0
- scholarinboxcli/config.py +52 -0
- scholarinboxcli/formatters/json_fmt.py +10 -0
- scholarinboxcli/formatters/table.py +66 -0
- scholarinboxcli-0.1.0.dist-info/METADATA +236 -0
- scholarinboxcli-0.1.0.dist-info/RECORD +10 -0
- scholarinboxcli-0.1.0.dist-info/WHEEL +4 -0
- scholarinboxcli-0.1.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
"""HTTP client for Scholar Inbox web API."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import time
|
|
8
|
+
from urllib.parse import urlparse, parse_qs
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from http.cookiejar import Cookie
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
import httpx
|
|
14
|
+
|
|
15
|
+
from scholarinboxcli.config import Config, load_config, save_config
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class ApiError(Exception):
|
|
20
|
+
message: str
|
|
21
|
+
status_code: int | None = None
|
|
22
|
+
detail: Any | None = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _cookie_to_dict(cookie: Cookie) -> dict[str, Any]:
|
|
26
|
+
return {
|
|
27
|
+
"name": cookie.name,
|
|
28
|
+
"value": cookie.value,
|
|
29
|
+
"domain": cookie.domain,
|
|
30
|
+
"path": cookie.path,
|
|
31
|
+
"expires": cookie.expires,
|
|
32
|
+
"secure": cookie.secure,
|
|
33
|
+
"httponly": cookie.has_nonstandard_attr("HttpOnly"),
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _cookies_to_list(cookies: httpx.Cookies) -> list[dict[str, Any]]:
|
|
38
|
+
return [_cookie_to_dict(c) for c in cookies.jar]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _cookies_from_list(items: list[dict[str, Any]] | None) -> httpx.Cookies:
|
|
42
|
+
jar = httpx.Cookies()
|
|
43
|
+
if not items:
|
|
44
|
+
return jar
|
|
45
|
+
for item in items:
|
|
46
|
+
jar.set(
|
|
47
|
+
item.get("name"),
|
|
48
|
+
item.get("value"),
|
|
49
|
+
domain=item.get("domain"),
|
|
50
|
+
path=item.get("path") or "/",
|
|
51
|
+
)
|
|
52
|
+
return jar
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _normalize_cookie_domains(cookies: httpx.Cookies, api_base: str) -> httpx.Cookies:
|
|
56
|
+
host = api_base.replace("https://", "").replace("http://", "").split("/")[0]
|
|
57
|
+
extra: list[tuple[str, str, str]] = []
|
|
58
|
+
for c in cookies.jar:
|
|
59
|
+
domain = c.domain or ""
|
|
60
|
+
if domain.startswith("www."):
|
|
61
|
+
base = domain[len("www.") :]
|
|
62
|
+
else:
|
|
63
|
+
base = domain
|
|
64
|
+
if base and host.endswith(base) and domain != host:
|
|
65
|
+
extra.append((c.name, c.value, host))
|
|
66
|
+
if base and not base.startswith("."):
|
|
67
|
+
extra.append((c.name, c.value, f".{base}"))
|
|
68
|
+
for name, value, domain in extra:
|
|
69
|
+
cookies.set(name, value, domain=domain, path="/")
|
|
70
|
+
return cookies
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _is_paper_list(data: Any) -> bool:
|
|
74
|
+
if isinstance(data, dict):
|
|
75
|
+
for key in ("papers", "results", "items", "data", "digest_df"):
|
|
76
|
+
val = data.get(key)
|
|
77
|
+
if isinstance(val, list):
|
|
78
|
+
return True
|
|
79
|
+
if isinstance(data, list):
|
|
80
|
+
return True
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class ScholarInboxClient:
|
|
85
|
+
def __init__(self, api_base: str | None = None, no_retry: bool = False):
|
|
86
|
+
self.no_retry = no_retry
|
|
87
|
+
self.cfg = load_config()
|
|
88
|
+
self.api_base = api_base or os.environ.get("SCHOLAR_INBOX_API_BASE") or self.cfg.api_base
|
|
89
|
+
cookies = _cookies_from_list(self.cfg.cookies)
|
|
90
|
+
self.client = httpx.Client(base_url=self.api_base, timeout=30.0, cookies=_normalize_cookie_domains(cookies, self.api_base))
|
|
91
|
+
|
|
92
|
+
def close(self) -> None:
|
|
93
|
+
self.client.close()
|
|
94
|
+
|
|
95
|
+
def save_cookies(self) -> None:
|
|
96
|
+
self.cfg.cookies = _cookies_to_list(self.client.cookies)
|
|
97
|
+
self.cfg.api_base = self.api_base
|
|
98
|
+
save_config(self.cfg)
|
|
99
|
+
|
|
100
|
+
def login_with_magic_link(self, login_url: str) -> None:
|
|
101
|
+
sha_key = None
|
|
102
|
+
try:
|
|
103
|
+
qs = parse_qs(urlparse(login_url).query)
|
|
104
|
+
sha_key = qs.get("sha_key", [None])[0]
|
|
105
|
+
except Exception:
|
|
106
|
+
sha_key = None
|
|
107
|
+
|
|
108
|
+
if sha_key:
|
|
109
|
+
resp = self.client.get(f"/api/login/{sha_key}/")
|
|
110
|
+
if resp.status_code >= 400:
|
|
111
|
+
raise ApiError("Login failed", resp.status_code, resp.text)
|
|
112
|
+
self.save_cookies()
|
|
113
|
+
return
|
|
114
|
+
|
|
115
|
+
with httpx.Client(follow_redirects=True, timeout=30.0) as client:
|
|
116
|
+
resp = client.get(login_url)
|
|
117
|
+
if resp.status_code >= 400:
|
|
118
|
+
raise ApiError("Login failed", resp.status_code, resp.text)
|
|
119
|
+
self.client.cookies = _normalize_cookie_domains(client.cookies, self.api_base)
|
|
120
|
+
self.save_cookies()
|
|
121
|
+
|
|
122
|
+
def _request(self, method: str, url: str, **kwargs: Any) -> Any:
|
|
123
|
+
retries = 0
|
|
124
|
+
while True:
|
|
125
|
+
resp = self.client.request(method, url, **kwargs)
|
|
126
|
+
if 300 <= resp.status_code < 400 and "/api/logout" in resp.headers.get("location", ""):
|
|
127
|
+
raise ApiError("Not authenticated (redirected to logout)", resp.status_code, resp.text)
|
|
128
|
+
if resp.status_code == 429 and not self.no_retry and retries < 3:
|
|
129
|
+
time.sleep(1.5 * (2**retries))
|
|
130
|
+
retries += 1
|
|
131
|
+
continue
|
|
132
|
+
if resp.status_code >= 400:
|
|
133
|
+
raise ApiError("Request failed", resp.status_code, resp.text)
|
|
134
|
+
if "application/json" in resp.headers.get("content-type", ""):
|
|
135
|
+
return resp.json()
|
|
136
|
+
# try json anyway
|
|
137
|
+
try:
|
|
138
|
+
return resp.json()
|
|
139
|
+
except Exception:
|
|
140
|
+
return resp.text
|
|
141
|
+
|
|
142
|
+
def _post_first(self, endpoints: list[str], payload: dict[str, Any]) -> Any:
|
|
143
|
+
last_error: ApiError | None = None
|
|
144
|
+
for ep in endpoints:
|
|
145
|
+
try:
|
|
146
|
+
return self._request("POST", ep, json=payload)
|
|
147
|
+
except ApiError as e:
|
|
148
|
+
last_error = e
|
|
149
|
+
try:
|
|
150
|
+
return self._request("POST", ep, data=payload)
|
|
151
|
+
except ApiError as e:
|
|
152
|
+
last_error = e
|
|
153
|
+
if last_error:
|
|
154
|
+
raise last_error
|
|
155
|
+
raise ApiError("No endpoints tried")
|
|
156
|
+
|
|
157
|
+
def session_info(self) -> Any:
|
|
158
|
+
return self._request("GET", "/api/session_info")
|
|
159
|
+
|
|
160
|
+
def get_digest(self, date: str | None = None) -> Any:
|
|
161
|
+
if date:
|
|
162
|
+
return self._request("GET", f"/api/?date={date}")
|
|
163
|
+
return self._request("GET", "/api/")
|
|
164
|
+
|
|
165
|
+
def get_trending(self, category: str = "ALL", days: int = 7, sort: str = "hype", asc: bool = False) -> Any:
|
|
166
|
+
asc_val = "1" if asc else "0"
|
|
167
|
+
return self._request(
|
|
168
|
+
"GET",
|
|
169
|
+
f"/api/trending?column={sort}&category={category}&ascending={asc_val}&dates={days}",
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
def search(self, query: str, sort: str | None = None, limit: int | None = None, offset: int | None = None) -> Any:
|
|
173
|
+
payload: dict[str, Any] = {
|
|
174
|
+
"search_prompt": query,
|
|
175
|
+
"n_results": limit if limit is not None else 10,
|
|
176
|
+
"p": offset if offset is not None else 0,
|
|
177
|
+
"correct_search_prompt": True,
|
|
178
|
+
}
|
|
179
|
+
if sort:
|
|
180
|
+
payload["orderBy"] = sort
|
|
181
|
+
return self._request("POST", "/api/get_search_results/", json=payload)
|
|
182
|
+
|
|
183
|
+
def semantic_search(self, text: str, limit: int | None = None, offset: int | None = None) -> Any:
|
|
184
|
+
payload: dict[str, Any] = {
|
|
185
|
+
"text_input": text,
|
|
186
|
+
"embedding": "tfidf",
|
|
187
|
+
"p": offset if offset is not None else 0,
|
|
188
|
+
}
|
|
189
|
+
if limit is not None:
|
|
190
|
+
payload["n_results"] = limit
|
|
191
|
+
return self._request("POST", "/api/semantic-search", json=payload)
|
|
192
|
+
|
|
193
|
+
def interactions(self, type_: str = "all", sort: str = "ranking_score", asc: bool = False) -> Any:
|
|
194
|
+
asc_val = "1" if asc else "0"
|
|
195
|
+
return self._request(
|
|
196
|
+
"GET",
|
|
197
|
+
f"/api/interactions?column={sort}&type={type_}&ascending={asc_val}",
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
def bookmarks(self) -> Any:
|
|
201
|
+
return self._request("GET", "/api/bookmarks")
|
|
202
|
+
|
|
203
|
+
def bookmark_add(self, paper_id: str) -> Any:
|
|
204
|
+
payload = {"bookmarked": True, "id": paper_id}
|
|
205
|
+
try:
|
|
206
|
+
return self._request("POST", "/api/bookmark_paper/", json=payload)
|
|
207
|
+
except ApiError:
|
|
208
|
+
return self._request("POST", "/api/bookmark_paper/", data=payload)
|
|
209
|
+
|
|
210
|
+
def bookmark_remove(self, paper_id: str) -> Any:
|
|
211
|
+
payload = {"bookmarked": False, "id": paper_id}
|
|
212
|
+
try:
|
|
213
|
+
return self._request("POST", "/api/bookmark_paper/", json=payload)
|
|
214
|
+
except ApiError:
|
|
215
|
+
return self._request("POST", "/api/bookmark_paper/", data=payload)
|
|
216
|
+
|
|
217
|
+
def collections_list(self) -> Any:
|
|
218
|
+
try:
|
|
219
|
+
return self._request("GET", "/api/get_all_user_collections")
|
|
220
|
+
except ApiError:
|
|
221
|
+
return self._request("GET", "/api/collections")
|
|
222
|
+
|
|
223
|
+
def collections_expanded(self) -> Any:
|
|
224
|
+
return self._request("GET", "/api/get_expanded_collections")
|
|
225
|
+
|
|
226
|
+
def collections_map(self) -> Any:
|
|
227
|
+
return self._request("GET", "/api/collections")
|
|
228
|
+
|
|
229
|
+
def collection_create(self, name: str) -> Any:
|
|
230
|
+
payload = {"name": name, "collection_name": name}
|
|
231
|
+
endpoints = [
|
|
232
|
+
"/api/create_collection/",
|
|
233
|
+
"/api/collections",
|
|
234
|
+
"/api/collection-create/",
|
|
235
|
+
]
|
|
236
|
+
return self._post_first(endpoints, payload)
|
|
237
|
+
|
|
238
|
+
def collection_rename(self, collection_id: str, new_name: str) -> Any:
|
|
239
|
+
payload = {
|
|
240
|
+
"collection_id": collection_id,
|
|
241
|
+
"id": collection_id,
|
|
242
|
+
"name": new_name,
|
|
243
|
+
"new_name": new_name,
|
|
244
|
+
}
|
|
245
|
+
endpoints = [
|
|
246
|
+
"/api/rename_collection/",
|
|
247
|
+
"/api/collection-rename/",
|
|
248
|
+
"/api/collections/rename",
|
|
249
|
+
]
|
|
250
|
+
return self._post_first(endpoints, payload)
|
|
251
|
+
|
|
252
|
+
def collection_delete(self, collection_id: str) -> Any:
|
|
253
|
+
payload = {"collection_id": collection_id, "id": collection_id}
|
|
254
|
+
endpoints = [
|
|
255
|
+
"/api/delete_collection/",
|
|
256
|
+
"/api/collection-delete/",
|
|
257
|
+
"/api/collections/delete",
|
|
258
|
+
]
|
|
259
|
+
return self._post_first(endpoints, payload)
|
|
260
|
+
|
|
261
|
+
def collection_add_paper(self, collection_id: str, paper_id: str) -> Any:
|
|
262
|
+
payload = {"collection_id": collection_id, "paper_id": paper_id}
|
|
263
|
+
endpoints = [
|
|
264
|
+
"/api/add_paper_to_collection/",
|
|
265
|
+
"/api/collection-add-paper/",
|
|
266
|
+
"/api/add_to_collection/",
|
|
267
|
+
]
|
|
268
|
+
return self._post_first(endpoints, payload)
|
|
269
|
+
|
|
270
|
+
def collection_remove_paper(self, collection_id: str, paper_id: str) -> Any:
|
|
271
|
+
payload = {"collection_id": collection_id, "paper_id": paper_id}
|
|
272
|
+
endpoints = [
|
|
273
|
+
"/api/remove_paper_from_collection/",
|
|
274
|
+
"/api/collection-remove-paper/",
|
|
275
|
+
"/api/remove_from_collection/",
|
|
276
|
+
]
|
|
277
|
+
return self._post_first(endpoints, payload)
|
|
278
|
+
|
|
279
|
+
def collection_papers(self, collection_id: str, limit: int | None = None, offset: int | None = None) -> Any:
|
|
280
|
+
params: dict[str, Any] = {"collection_id": collection_id}
|
|
281
|
+
if limit is not None:
|
|
282
|
+
params["limit"] = limit
|
|
283
|
+
if offset is not None:
|
|
284
|
+
params["offset"] = offset
|
|
285
|
+
try:
|
|
286
|
+
return self._request("GET", "/api/collection-papers", params=params)
|
|
287
|
+
except ApiError:
|
|
288
|
+
# fallback without paging
|
|
289
|
+
return self._request("GET", "/api/collection-papers", params={"collection_id": collection_id})
|
|
290
|
+
|
|
291
|
+
def collections_similar(self, collection_ids: list[str], limit: int | None = None, offset: int | None = None) -> Any:
|
|
292
|
+
schemas = [
|
|
293
|
+
"json_collection_ids_p",
|
|
294
|
+
"json_collection_ids",
|
|
295
|
+
"json_collection_id",
|
|
296
|
+
"form_collection_ids",
|
|
297
|
+
"get_params",
|
|
298
|
+
]
|
|
299
|
+
if self.cfg.collections_similar_schema:
|
|
300
|
+
schemas = [self.cfg.collections_similar_schema] + [s for s in schemas if s != self.cfg.collections_similar_schema]
|
|
301
|
+
|
|
302
|
+
for schema in schemas:
|
|
303
|
+
try:
|
|
304
|
+
data = self._collections_similar_with_schema(schema, collection_ids, limit, offset)
|
|
305
|
+
if _is_paper_list(data):
|
|
306
|
+
self.cfg.collections_similar_schema = schema
|
|
307
|
+
save_config(self.cfg)
|
|
308
|
+
return data
|
|
309
|
+
except ApiError:
|
|
310
|
+
continue
|
|
311
|
+
raise ApiError("Unable to fetch similar papers for collections")
|
|
312
|
+
|
|
313
|
+
def _collections_similar_with_schema(
|
|
314
|
+
self, schema: str, collection_ids: list[str], limit: int | None, offset: int | None
|
|
315
|
+
) -> Any:
|
|
316
|
+
if schema == "json_collection_ids_p":
|
|
317
|
+
payload: dict[str, Any] = {"collectionIds": collection_ids, "p": offset if offset is not None else 0}
|
|
318
|
+
if limit is not None:
|
|
319
|
+
payload["n_results"] = limit
|
|
320
|
+
return self._request("POST", "/api/get_collections_similar_papers/", json=payload)
|
|
321
|
+
if schema == "json_collection_ids":
|
|
322
|
+
payload: dict[str, Any] = {"collection_ids": collection_ids}
|
|
323
|
+
if limit is not None:
|
|
324
|
+
payload["limit"] = limit
|
|
325
|
+
if offset is not None:
|
|
326
|
+
payload["offset"] = offset
|
|
327
|
+
return self._request("POST", "/api/get_collections_similar_papers/", json=payload)
|
|
328
|
+
if schema == "json_collection_id" and len(collection_ids) == 1:
|
|
329
|
+
payload = {"collection_id": collection_ids[0]}
|
|
330
|
+
if limit is not None:
|
|
331
|
+
payload["limit"] = limit
|
|
332
|
+
if offset is not None:
|
|
333
|
+
payload["offset"] = offset
|
|
334
|
+
return self._request("POST", "/api/get_collections_similar_papers/", json=payload)
|
|
335
|
+
if schema == "form_collection_ids":
|
|
336
|
+
payload = {"collection_ids": ",".join(collection_ids)}
|
|
337
|
+
if limit is not None:
|
|
338
|
+
payload["limit"] = limit
|
|
339
|
+
if offset is not None:
|
|
340
|
+
payload["offset"] = offset
|
|
341
|
+
return self._request("POST", "/api/get_collections_similar_papers/", data=payload)
|
|
342
|
+
if schema == "get_params":
|
|
343
|
+
params = {"collection_id": ",".join(collection_ids)}
|
|
344
|
+
if limit is not None:
|
|
345
|
+
params["limit"] = limit
|
|
346
|
+
if offset is not None:
|
|
347
|
+
params["offset"] = offset
|
|
348
|
+
return self._request("GET", "/api/get_collections_similar_papers/", params=params)
|
|
349
|
+
raise ApiError("Unknown schema")
|
|
350
|
+
|
|
351
|
+
def conference_list(self) -> Any:
|
|
352
|
+
return self._request("GET", "/api/conference_list")
|
|
353
|
+
|
|
354
|
+
def conference_explorer(self) -> Any:
|
|
355
|
+
return self._request("GET", "/api/conference-explorer")
|
scholarinboxcli/cli.py
ADDED
|
@@ -0,0 +1,524 @@
|
|
|
1
|
+
"""Scholar Inbox CLI."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
|
|
10
|
+
from scholarinboxcli.api.client import ApiError, ScholarInboxClient
|
|
11
|
+
from scholarinboxcli.formatters.json_fmt import format_json
|
|
12
|
+
from scholarinboxcli.formatters.table import format_table
|
|
13
|
+
|
|
14
|
+
app = typer.Typer(
|
|
15
|
+
help=(
|
|
16
|
+
"Scholar Inbox CLI.\n\n"
|
|
17
|
+
"Examples:\n"
|
|
18
|
+
" scholarinboxcli auth login --url \"https://www.scholar-inbox.com/login?sha_key=...&date=MM-DD-YYYY\"\n"
|
|
19
|
+
" scholarinboxcli digest --date 01-30-2026 --json\n"
|
|
20
|
+
" scholarinboxcli search \"transformers\" --limit 5 --json\n"
|
|
21
|
+
" scholarinboxcli collection papers \"AIAgents\" --json\n"
|
|
22
|
+
" scholarinboxcli conference explore --json\n"
|
|
23
|
+
)
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
auth_app = typer.Typer(help="Authentication commands", no_args_is_help=True)
|
|
28
|
+
collection_app = typer.Typer(help="Collection commands", no_args_is_help=True)
|
|
29
|
+
bookmark_app = typer.Typer(help="Bookmark commands", no_args_is_help=True)
|
|
30
|
+
conference_app = typer.Typer(help="Conference commands", no_args_is_help=True)
|
|
31
|
+
|
|
32
|
+
app.add_typer(auth_app, name="auth")
|
|
33
|
+
app.add_typer(collection_app, name="collection")
|
|
34
|
+
app.add_typer(bookmark_app, name="bookmark")
|
|
35
|
+
app.add_typer(conference_app, name="conference")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _print_output(data, use_json: bool, title: str | None = None) -> None:
|
|
39
|
+
if use_json or not sys.stdout.isatty():
|
|
40
|
+
typer.echo(format_json(data))
|
|
41
|
+
return
|
|
42
|
+
table = format_table(data, title=title)
|
|
43
|
+
if table == "(no results)":
|
|
44
|
+
typer.echo(table)
|
|
45
|
+
return
|
|
46
|
+
typer.echo(table)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _handle_error(err: ApiError) -> None:
|
|
50
|
+
if not sys.stdout.isatty():
|
|
51
|
+
typer.echo(format_json({"error": err.message, "status_code": err.status_code, "detail": err.detail}))
|
|
52
|
+
else:
|
|
53
|
+
typer.echo(f"Error: {err.message}", err=True)
|
|
54
|
+
if err.status_code:
|
|
55
|
+
typer.echo(f"Status: {err.status_code}", err=True)
|
|
56
|
+
raise typer.Exit(1)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _normalize_name(name: str) -> str:
|
|
60
|
+
return name.strip().lower()
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _resolve_collection_id(client: ScholarInboxClient, identifier: str) -> str:
|
|
64
|
+
if identifier.isdigit():
|
|
65
|
+
return identifier
|
|
66
|
+
data = client.collections_list()
|
|
67
|
+
items = _collection_items_from_response(data)
|
|
68
|
+
candidates = _collection_candidates(items)
|
|
69
|
+
if not _candidates_have_ids(candidates):
|
|
70
|
+
try:
|
|
71
|
+
data = client.collections_expanded()
|
|
72
|
+
items = _collection_items_from_response(data)
|
|
73
|
+
candidates = _collection_candidates(items)
|
|
74
|
+
except ApiError:
|
|
75
|
+
pass
|
|
76
|
+
if not _candidates_have_ids(candidates):
|
|
77
|
+
try:
|
|
78
|
+
data = client.collections_map()
|
|
79
|
+
mapped = _collection_candidates_from_map(data)
|
|
80
|
+
if mapped:
|
|
81
|
+
candidates = mapped
|
|
82
|
+
except ApiError:
|
|
83
|
+
pass
|
|
84
|
+
if not _candidates_have_ids(candidates):
|
|
85
|
+
matched = _match_collection_name(candidates, identifier)
|
|
86
|
+
if matched:
|
|
87
|
+
# Only names are available; fall back to name as identifier.
|
|
88
|
+
return matched
|
|
89
|
+
raise ApiError("Unable to resolve collection name (no IDs available)")
|
|
90
|
+
candidates = [(name, cid) for name, cid in candidates if cid]
|
|
91
|
+
target = _normalize_name(identifier)
|
|
92
|
+
for name, cid in candidates:
|
|
93
|
+
if _normalize_name(name) == target:
|
|
94
|
+
return cid
|
|
95
|
+
# prefix match
|
|
96
|
+
prefix = [c for c in candidates if _normalize_name(c[0]).startswith(target)]
|
|
97
|
+
if len(prefix) == 1:
|
|
98
|
+
return prefix[0][1]
|
|
99
|
+
if len(prefix) > 1:
|
|
100
|
+
names = ", ".join([f"{n}({cid})" for n, cid in prefix[:10]])
|
|
101
|
+
raise ApiError(f"Ambiguous collection name. Matches: {names}")
|
|
102
|
+
# contains match
|
|
103
|
+
contains = [c for c in candidates if target in _normalize_name(c[0])]
|
|
104
|
+
if len(contains) == 1:
|
|
105
|
+
return contains[0][1]
|
|
106
|
+
if len(contains) > 1:
|
|
107
|
+
names = ", ".join([f"{n}({cid})" for n, cid in contains[:10]])
|
|
108
|
+
raise ApiError(f"Ambiguous collection name. Matches: {names}")
|
|
109
|
+
raise ApiError("Collection name not found")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _collection_candidates(items: object) -> list[tuple[str, str]]:
|
|
113
|
+
if not isinstance(items, list):
|
|
114
|
+
return []
|
|
115
|
+
candidates: list[tuple[str, str]] = []
|
|
116
|
+
for item in items:
|
|
117
|
+
if isinstance(item, dict):
|
|
118
|
+
name = item.get("name") or item.get("collection_name") or ""
|
|
119
|
+
cid = str(item.get("id") or item.get("collection_id") or "")
|
|
120
|
+
elif isinstance(item, str):
|
|
121
|
+
name = item
|
|
122
|
+
cid = ""
|
|
123
|
+
else:
|
|
124
|
+
continue
|
|
125
|
+
if name:
|
|
126
|
+
candidates.append((name, cid))
|
|
127
|
+
return candidates
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _collection_items_from_response(data: object) -> object:
|
|
131
|
+
if isinstance(data, dict):
|
|
132
|
+
for key in ("collections", "expanded_collections", "collection_names"):
|
|
133
|
+
if key in data:
|
|
134
|
+
return data.get(key)
|
|
135
|
+
return data
|
|
136
|
+
return data
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _collection_candidates_from_map(data: object) -> list[tuple[str, str]]:
|
|
140
|
+
if not isinstance(data, dict):
|
|
141
|
+
return []
|
|
142
|
+
mapping = data.get("collection_names_to_ids_dict")
|
|
143
|
+
if not isinstance(mapping, dict):
|
|
144
|
+
return []
|
|
145
|
+
candidates: list[tuple[str, str]] = []
|
|
146
|
+
for name, cid in mapping.items():
|
|
147
|
+
if name and cid is not None:
|
|
148
|
+
candidates.append((str(name), str(cid)))
|
|
149
|
+
return candidates
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _candidates_have_ids(candidates: list[tuple[str, str]]) -> bool:
|
|
153
|
+
for _, cid in candidates:
|
|
154
|
+
if cid:
|
|
155
|
+
return True
|
|
156
|
+
return False
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _match_collection_name(candidates: list[tuple[str, str]], identifier: str) -> str | None:
|
|
160
|
+
target = _normalize_name(identifier)
|
|
161
|
+
names = [(name, cid) for name, cid in candidates if name]
|
|
162
|
+
for name, _ in names:
|
|
163
|
+
if _normalize_name(name) == target:
|
|
164
|
+
return name
|
|
165
|
+
prefix = [c for c in names if _normalize_name(c[0]).startswith(target)]
|
|
166
|
+
if len(prefix) == 1:
|
|
167
|
+
return prefix[0][0]
|
|
168
|
+
if len(prefix) > 1:
|
|
169
|
+
names_str = ", ".join([n for n, _ in prefix[:10]])
|
|
170
|
+
raise ApiError(f"Ambiguous collection name. Matches: {names_str}")
|
|
171
|
+
contains = [c for c in names if target in _normalize_name(c[0])]
|
|
172
|
+
if len(contains) == 1:
|
|
173
|
+
return contains[0][0]
|
|
174
|
+
if len(contains) > 1:
|
|
175
|
+
names_str = ", ".join([n for n, _ in contains[:10]])
|
|
176
|
+
raise ApiError(f"Ambiguous collection name. Matches: {names_str}")
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@auth_app.command("login")
|
|
181
|
+
def auth_login(
|
|
182
|
+
url: str = typer.Option(..., "--url", help="Magic login URL with sha_key"),
|
|
183
|
+
):
|
|
184
|
+
client = ScholarInboxClient()
|
|
185
|
+
try:
|
|
186
|
+
client.login_with_magic_link(url)
|
|
187
|
+
typer.echo("Login successful")
|
|
188
|
+
except ApiError as e:
|
|
189
|
+
_handle_error(e)
|
|
190
|
+
finally:
|
|
191
|
+
client.close()
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@auth_app.command("status")
|
|
195
|
+
def auth_status(json_output: bool = typer.Option(False, "--json", help="Output as JSON")):
|
|
196
|
+
client = ScholarInboxClient()
|
|
197
|
+
try:
|
|
198
|
+
data = client.session_info()
|
|
199
|
+
_print_output(data, json_output, title="Session")
|
|
200
|
+
except ApiError as e:
|
|
201
|
+
_handle_error(e)
|
|
202
|
+
finally:
|
|
203
|
+
client.close()
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@auth_app.command("logout")
|
|
207
|
+
def auth_logout():
|
|
208
|
+
from scholarinboxcli.config import save_config, Config
|
|
209
|
+
|
|
210
|
+
save_config(Config())
|
|
211
|
+
typer.echo("Logged out")
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@app.command("digest")
|
|
215
|
+
def digest(
|
|
216
|
+
date: Optional[str] = typer.Option(None, "--date", help="Digest date (MM-DD-YYYY)"),
|
|
217
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
218
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
219
|
+
):
|
|
220
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
221
|
+
try:
|
|
222
|
+
data = client.get_digest(date)
|
|
223
|
+
_print_output(data, json_output, title="Digest")
|
|
224
|
+
except ApiError as e:
|
|
225
|
+
_handle_error(e)
|
|
226
|
+
finally:
|
|
227
|
+
client.close()
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
@app.command("trending")
|
|
231
|
+
def trending(
|
|
232
|
+
category: str = typer.Option("ALL", "--category", help="Category filter"),
|
|
233
|
+
days: int = typer.Option(7, "--days", help="Lookback window in days"),
|
|
234
|
+
sort: str = typer.Option("hype", "--sort", help="Sort column"),
|
|
235
|
+
asc: bool = typer.Option(False, "--asc", help="Sort ascending"),
|
|
236
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
237
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
238
|
+
):
|
|
239
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
240
|
+
try:
|
|
241
|
+
data = client.get_trending(category=category, days=days, sort=sort, asc=asc)
|
|
242
|
+
_print_output(data, json_output, title="Trending")
|
|
243
|
+
except ApiError as e:
|
|
244
|
+
_handle_error(e)
|
|
245
|
+
finally:
|
|
246
|
+
client.close()
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
@app.command("search")
|
|
250
|
+
def search(
|
|
251
|
+
query: str = typer.Argument(..., help="Search query"),
|
|
252
|
+
sort: Optional[str] = typer.Option(None, "--sort", help="Sort option"),
|
|
253
|
+
limit: Optional[int] = typer.Option(None, "--limit", "-n", help="Limit results"),
|
|
254
|
+
offset: Optional[int] = typer.Option(None, "--offset", help="Pagination offset"),
|
|
255
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
256
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
257
|
+
):
|
|
258
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
259
|
+
try:
|
|
260
|
+
data = client.search(query=query, sort=sort, limit=limit, offset=offset)
|
|
261
|
+
_print_output(data, json_output, title="Search")
|
|
262
|
+
except ApiError as e:
|
|
263
|
+
_handle_error(e)
|
|
264
|
+
finally:
|
|
265
|
+
client.close()
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
@app.command("semantic")
|
|
269
|
+
def semantic_search(
|
|
270
|
+
text: Optional[str] = typer.Argument(None, help="Semantic search text"),
|
|
271
|
+
file: Optional[str] = typer.Option(None, "--file", help="Read query text from file"),
|
|
272
|
+
limit: Optional[int] = typer.Option(None, "--limit", "-n", help="Limit results"),
|
|
273
|
+
offset: Optional[int] = typer.Option(None, "--offset", help="Pagination offset"),
|
|
274
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
275
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
276
|
+
):
|
|
277
|
+
if not text and not file:
|
|
278
|
+
typer.echo("Provide text or --file", err=True)
|
|
279
|
+
raise typer.Exit(1)
|
|
280
|
+
if file:
|
|
281
|
+
text = open(file, "r", encoding="utf-8").read()
|
|
282
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
283
|
+
try:
|
|
284
|
+
data = client.semantic_search(text=text or "", limit=limit, offset=offset)
|
|
285
|
+
_print_output(data, json_output, title="Semantic Search")
|
|
286
|
+
except ApiError as e:
|
|
287
|
+
_handle_error(e)
|
|
288
|
+
finally:
|
|
289
|
+
client.close()
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
@app.command("interactions")
|
|
293
|
+
def interactions(
|
|
294
|
+
type_: str = typer.Option("all", "--type", help="Interaction type (all/up/down)"),
|
|
295
|
+
sort: str = typer.Option("ranking_score", "--sort", help="Sort column"),
|
|
296
|
+
asc: bool = typer.Option(False, "--asc", help="Sort ascending"),
|
|
297
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
298
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
299
|
+
):
|
|
300
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
301
|
+
try:
|
|
302
|
+
data = client.interactions(type_=type_, sort=sort, asc=asc)
|
|
303
|
+
_print_output(data, json_output, title="Interactions")
|
|
304
|
+
except ApiError as e:
|
|
305
|
+
_handle_error(e)
|
|
306
|
+
finally:
|
|
307
|
+
client.close()
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
@bookmark_app.command("list")
|
|
311
|
+
def bookmark_list(
|
|
312
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
313
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
314
|
+
):
|
|
315
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
316
|
+
try:
|
|
317
|
+
data = client.bookmarks()
|
|
318
|
+
_print_output(data, json_output, title="Bookmarks")
|
|
319
|
+
except ApiError as e:
|
|
320
|
+
_handle_error(e)
|
|
321
|
+
finally:
|
|
322
|
+
client.close()
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
@bookmark_app.command("add")
|
|
326
|
+
def bookmark_add(
|
|
327
|
+
paper_id: str = typer.Argument(..., help="Paper ID"),
|
|
328
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
329
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
330
|
+
):
|
|
331
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
332
|
+
try:
|
|
333
|
+
data = client.bookmark_add(paper_id)
|
|
334
|
+
_print_output(data, json_output, title="Bookmark added")
|
|
335
|
+
except ApiError as e:
|
|
336
|
+
_handle_error(e)
|
|
337
|
+
finally:
|
|
338
|
+
client.close()
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
@bookmark_app.command("remove")
|
|
342
|
+
def bookmark_remove(
|
|
343
|
+
paper_id: str = typer.Argument(..., help="Paper ID"),
|
|
344
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
345
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
346
|
+
):
|
|
347
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
348
|
+
try:
|
|
349
|
+
data = client.bookmark_remove(paper_id)
|
|
350
|
+
_print_output(data, json_output, title="Bookmark removed")
|
|
351
|
+
except ApiError as e:
|
|
352
|
+
_handle_error(e)
|
|
353
|
+
finally:
|
|
354
|
+
client.close()
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
@collection_app.command("list")
|
|
358
|
+
def collection_list(
|
|
359
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
360
|
+
expanded: bool = typer.Option(False, "--expanded", help="Use expanded collection metadata"),
|
|
361
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
362
|
+
):
|
|
363
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
364
|
+
try:
|
|
365
|
+
data = client.collections_expanded() if expanded else client.collections_list()
|
|
366
|
+
_print_output(data, json_output, title="Collections")
|
|
367
|
+
except ApiError as e:
|
|
368
|
+
_handle_error(e)
|
|
369
|
+
finally:
|
|
370
|
+
client.close()
|
|
371
|
+
|
|
372
|
+
@collection_app.command("create")
|
|
373
|
+
def collection_create(
|
|
374
|
+
name: str = typer.Argument(..., help="Collection name"),
|
|
375
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
376
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
377
|
+
):
|
|
378
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
379
|
+
try:
|
|
380
|
+
data = client.collection_create(name)
|
|
381
|
+
_print_output(data, json_output, title="Collection created")
|
|
382
|
+
except ApiError as e:
|
|
383
|
+
_handle_error(e)
|
|
384
|
+
finally:
|
|
385
|
+
client.close()
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
@collection_app.command("rename")
|
|
389
|
+
def collection_rename(
|
|
390
|
+
collection_id: str = typer.Argument(..., help="Collection ID or name"),
|
|
391
|
+
new_name: str = typer.Argument(..., help="New collection name"),
|
|
392
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
393
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
394
|
+
):
|
|
395
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
396
|
+
try:
|
|
397
|
+
cid = _resolve_collection_id(client, collection_id)
|
|
398
|
+
data = client.collection_rename(cid, new_name)
|
|
399
|
+
_print_output(data, json_output, title="Collection renamed")
|
|
400
|
+
except ApiError as e:
|
|
401
|
+
_handle_error(e)
|
|
402
|
+
finally:
|
|
403
|
+
client.close()
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
@collection_app.command("delete")
|
|
407
|
+
def collection_delete(
|
|
408
|
+
collection_id: str = typer.Argument(..., help="Collection ID or name"),
|
|
409
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
410
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
411
|
+
):
|
|
412
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
413
|
+
try:
|
|
414
|
+
cid = _resolve_collection_id(client, collection_id)
|
|
415
|
+
data = client.collection_delete(cid)
|
|
416
|
+
_print_output(data, json_output, title="Collection deleted")
|
|
417
|
+
except ApiError as e:
|
|
418
|
+
_handle_error(e)
|
|
419
|
+
finally:
|
|
420
|
+
client.close()
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
@collection_app.command("add")
|
|
424
|
+
def collection_add(
|
|
425
|
+
collection_id: str = typer.Argument(..., help="Collection ID or name"),
|
|
426
|
+
paper_id: str = typer.Argument(..., help="Paper ID"),
|
|
427
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
428
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
429
|
+
):
|
|
430
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
431
|
+
try:
|
|
432
|
+
cid = _resolve_collection_id(client, collection_id)
|
|
433
|
+
data = client.collection_add_paper(cid, paper_id)
|
|
434
|
+
_print_output(data, json_output, title="Collection add paper")
|
|
435
|
+
except ApiError as e:
|
|
436
|
+
_handle_error(e)
|
|
437
|
+
finally:
|
|
438
|
+
client.close()
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
@collection_app.command("remove")
|
|
442
|
+
def collection_remove(
|
|
443
|
+
collection_id: str = typer.Argument(..., help="Collection ID or name"),
|
|
444
|
+
paper_id: str = typer.Argument(..., help="Paper ID"),
|
|
445
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
446
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
447
|
+
):
|
|
448
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
449
|
+
try:
|
|
450
|
+
cid = _resolve_collection_id(client, collection_id)
|
|
451
|
+
data = client.collection_remove_paper(cid, paper_id)
|
|
452
|
+
_print_output(data, json_output, title="Collection remove paper")
|
|
453
|
+
except ApiError as e:
|
|
454
|
+
_handle_error(e)
|
|
455
|
+
finally:
|
|
456
|
+
client.close()
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
@collection_app.command("papers")
|
|
460
|
+
def collection_papers(
|
|
461
|
+
collection_id: str = typer.Argument(..., help="Collection ID or name"),
|
|
462
|
+
limit: Optional[int] = typer.Option(None, "--limit", "-n", help="Limit results"),
|
|
463
|
+
offset: Optional[int] = typer.Option(None, "--offset", help="Pagination offset"),
|
|
464
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
465
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
466
|
+
):
|
|
467
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
468
|
+
try:
|
|
469
|
+
cid = _resolve_collection_id(client, collection_id)
|
|
470
|
+
data = client.collection_papers(cid, limit=limit, offset=offset)
|
|
471
|
+
_print_output(data, json_output, title=f"Collection {cid}")
|
|
472
|
+
except ApiError as e:
|
|
473
|
+
_handle_error(e)
|
|
474
|
+
finally:
|
|
475
|
+
client.close()
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
@collection_app.command("similar")
|
|
479
|
+
def collection_similar(
|
|
480
|
+
collection_ids: list[str] = typer.Argument(..., help="Collection ID(s) or names"),
|
|
481
|
+
limit: Optional[int] = typer.Option(None, "--limit", "-n", help="Limit results"),
|
|
482
|
+
offset: Optional[int] = typer.Option(None, "--offset", help="Pagination offset"),
|
|
483
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
484
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
485
|
+
):
|
|
486
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
487
|
+
try:
|
|
488
|
+
resolved = [_resolve_collection_id(client, cid) for cid in collection_ids]
|
|
489
|
+
data = client.collections_similar(resolved, limit=limit, offset=offset)
|
|
490
|
+
_print_output(data, json_output, title="Similar Papers")
|
|
491
|
+
except ApiError as e:
|
|
492
|
+
_handle_error(e)
|
|
493
|
+
finally:
|
|
494
|
+
client.close()
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
@conference_app.command("list")
|
|
498
|
+
def conference_list(
|
|
499
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
500
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
501
|
+
):
|
|
502
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
503
|
+
try:
|
|
504
|
+
data = client.conference_list()
|
|
505
|
+
_print_output(data, json_output, title="Conferences")
|
|
506
|
+
except ApiError as e:
|
|
507
|
+
_handle_error(e)
|
|
508
|
+
finally:
|
|
509
|
+
client.close()
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
@conference_app.command("explore")
|
|
513
|
+
def conference_explore(
|
|
514
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
515
|
+
no_retry: bool = typer.Option(False, "--no-retry", help="Disable retry on rate limits"),
|
|
516
|
+
):
|
|
517
|
+
client = ScholarInboxClient(no_retry=no_retry)
|
|
518
|
+
try:
|
|
519
|
+
data = client.conference_explorer()
|
|
520
|
+
_print_output(data, json_output, title="Conference Explorer")
|
|
521
|
+
except ApiError as e:
|
|
522
|
+
_handle_error(e)
|
|
523
|
+
finally:
|
|
524
|
+
client.close()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Config and session storage for scholarinboxcli."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
CONFIG_DIR = Path(os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config")) / "scholarinboxcli"
|
|
12
|
+
CONFIG_PATH = CONFIG_DIR / "config.json"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class Config:
|
|
17
|
+
api_base: str = "https://api.scholar-inbox.com"
|
|
18
|
+
base_url: str = "https://www.scholar-inbox.com"
|
|
19
|
+
cookies: list[dict[str, Any]] | None = None
|
|
20
|
+
collections_similar_schema: str | None = None
|
|
21
|
+
|
|
22
|
+
def to_dict(self) -> dict[str, Any]:
|
|
23
|
+
return {
|
|
24
|
+
"api_base": self.api_base,
|
|
25
|
+
"base_url": self.base_url,
|
|
26
|
+
"cookies": self.cookies,
|
|
27
|
+
"collections_similar_schema": self.collections_similar_schema,
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def from_dict(cls, data: dict[str, Any]) -> "Config":
|
|
32
|
+
return cls(
|
|
33
|
+
api_base=data.get("api_base", cls.api_base),
|
|
34
|
+
base_url=data.get("base_url", cls.base_url),
|
|
35
|
+
cookies=data.get("cookies"),
|
|
36
|
+
collections_similar_schema=data.get("collections_similar_schema"),
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def load_config() -> Config:
|
|
41
|
+
if not CONFIG_PATH.exists():
|
|
42
|
+
return Config()
|
|
43
|
+
try:
|
|
44
|
+
data = json.loads(CONFIG_PATH.read_text())
|
|
45
|
+
except Exception:
|
|
46
|
+
return Config()
|
|
47
|
+
return Config.from_dict(data)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def save_config(cfg: Config) -> None:
|
|
51
|
+
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
|
|
52
|
+
CONFIG_PATH.write_text(json.dumps(cfg.to_dict(), indent=2))
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Table output formatting using Rich."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
from rich.table import Table
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _get_authors(paper: dict[str, Any]) -> str:
|
|
12
|
+
authors = paper.get("authors") or paper.get("author") or paper.get("authorNames")
|
|
13
|
+
if isinstance(authors, list):
|
|
14
|
+
names = []
|
|
15
|
+
for a in authors:
|
|
16
|
+
if isinstance(a, str):
|
|
17
|
+
names.append(a)
|
|
18
|
+
elif isinstance(a, dict):
|
|
19
|
+
names.append(a.get("name") or a.get("author") or "")
|
|
20
|
+
return ", ".join([n for n in names if n])
|
|
21
|
+
if isinstance(authors, str):
|
|
22
|
+
return authors
|
|
23
|
+
return ""
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _extract_papers(data: Any) -> list[dict[str, Any]]:
|
|
27
|
+
if isinstance(data, list):
|
|
28
|
+
return [d for d in data if isinstance(d, dict)]
|
|
29
|
+
if isinstance(data, dict):
|
|
30
|
+
for key in ("papers", "results", "items", "data", "digest_df"):
|
|
31
|
+
val = data.get(key)
|
|
32
|
+
if isinstance(val, list):
|
|
33
|
+
return [d for d in val if isinstance(d, dict)]
|
|
34
|
+
return []
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def format_table(data: Any, title: str | None = None) -> str:
|
|
38
|
+
papers = _extract_papers(data)
|
|
39
|
+
if not papers:
|
|
40
|
+
return "(no results)"
|
|
41
|
+
|
|
42
|
+
table = Table(title=title)
|
|
43
|
+
table.add_column("Title", overflow="fold")
|
|
44
|
+
table.add_column("Authors", overflow="fold")
|
|
45
|
+
table.add_column("Year", justify="right")
|
|
46
|
+
table.add_column("Venue", overflow="fold")
|
|
47
|
+
table.add_column("ID", overflow="fold")
|
|
48
|
+
|
|
49
|
+
for p in papers:
|
|
50
|
+
title_val = str(p.get("title") or p.get("paper_title") or "")
|
|
51
|
+
authors_val = _get_authors(p)
|
|
52
|
+
year_val = str(p.get("year") or p.get("publication_year") or "")
|
|
53
|
+
venue_val = str(p.get("venue") or p.get("conference") or p.get("journal") or "")
|
|
54
|
+
pid = str(
|
|
55
|
+
p.get("paper_id")
|
|
56
|
+
or p.get("paperId")
|
|
57
|
+
or p.get("id")
|
|
58
|
+
or p.get("corpusid")
|
|
59
|
+
or ""
|
|
60
|
+
)
|
|
61
|
+
table.add_row(title_val, authors_val, year_val, venue_val, pid)
|
|
62
|
+
|
|
63
|
+
console = Console()
|
|
64
|
+
with console.capture() as capture:
|
|
65
|
+
console.print(table)
|
|
66
|
+
return capture.get()
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: scholarinboxcli
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: CLI for Scholar Inbox (authenticated web API)
|
|
5
|
+
License-Expression: MIT
|
|
6
|
+
Keywords: bibliography,cli,research,scholar
|
|
7
|
+
Classifier: Development Status :: 3 - Alpha
|
|
8
|
+
Classifier: Environment :: Console
|
|
9
|
+
Classifier: Intended Audience :: Science/Research
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Topic :: Scientific/Engineering
|
|
16
|
+
Requires-Python: >=3.10
|
|
17
|
+
Requires-Dist: httpx>=0.25.0
|
|
18
|
+
Requires-Dist: rich>=13.0.0
|
|
19
|
+
Requires-Dist: typer>=0.9.0
|
|
20
|
+
Description-Content-Type: text/markdown
|
|
21
|
+
|
|
22
|
+
# scholarinboxcli
|
|
23
|
+
|
|
24
|
+
CLI for Scholar Inbox, for humans and agents alike.
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
pip install scholarinboxcli
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
Or with uv:
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
uv pip install scholarinboxcli
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Or run directly with uvx (no install):
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
uvx scholarinboxcli auth login --url "<magic-link-url>"
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Auth
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
# Log in with the magic-link URL from the web app
|
|
48
|
+
scholarinboxcli auth login --url "https://www.scholar-inbox.com/login?sha_key=...&date=MM-DD-YYYY"
|
|
49
|
+
|
|
50
|
+
# Check current session and user info
|
|
51
|
+
scholarinboxcli auth status
|
|
52
|
+
|
|
53
|
+
# Clear local session config
|
|
54
|
+
scholarinboxcli auth logout
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
Note: `auth login` extracts `sha_key` from the URL and authenticates via the API.
|
|
58
|
+
|
|
59
|
+
Config is stored at `~/.config/scholarinboxcli/config.json`. You can override the API base with `SCHOLAR_INBOX_API_BASE`.
|
|
60
|
+
|
|
61
|
+
## Command reference
|
|
62
|
+
|
|
63
|
+
Top-level commands:
|
|
64
|
+
|
|
65
|
+
- `auth` (login/status/logout)
|
|
66
|
+
- `digest`
|
|
67
|
+
- `trending`
|
|
68
|
+
- `search`
|
|
69
|
+
- `semantic`
|
|
70
|
+
- `interactions`
|
|
71
|
+
- `bookmark` (list/add/remove)
|
|
72
|
+
- `collection` (list/create/rename/delete/add/remove/papers/similar)
|
|
73
|
+
- `conference` (list/explore)
|
|
74
|
+
|
|
75
|
+
Run `scholarinboxcli --help` or `scholarinboxcli <command> --help` for full options.
|
|
76
|
+
|
|
77
|
+
## Quickstart
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
# Fetch a daily digest by date (MM-DD-YYYY)
|
|
81
|
+
scholarinboxcli digest --date 01-30-2026 --json
|
|
82
|
+
|
|
83
|
+
# Trending papers (last 7 days)
|
|
84
|
+
scholarinboxcli trending --category ALL --days 7 --json
|
|
85
|
+
|
|
86
|
+
# Keyword search
|
|
87
|
+
scholarinboxcli search "transformers" --limit 5 --json
|
|
88
|
+
|
|
89
|
+
# Semantic search
|
|
90
|
+
scholarinboxcli semantic "graph neural networks" --limit 5 --json
|
|
91
|
+
|
|
92
|
+
# List your bookmarks
|
|
93
|
+
scholarinboxcli bookmark list --json
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## Collections
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
# List collections
|
|
100
|
+
scholarinboxcli collection list
|
|
101
|
+
|
|
102
|
+
# Expanded collection names (marks which collections are expanded server-side)
|
|
103
|
+
scholarinboxcli collection list --expanded
|
|
104
|
+
|
|
105
|
+
# Create, rename, delete
|
|
106
|
+
scholarinboxcli collection create "My Collection"
|
|
107
|
+
|
|
108
|
+
# Rename by ID (or name)
|
|
109
|
+
scholarinboxcli collection rename 10759 "New Name"
|
|
110
|
+
|
|
111
|
+
# Delete by ID (or name)
|
|
112
|
+
scholarinboxcli collection delete 10759
|
|
113
|
+
|
|
114
|
+
# Add/remove papers
|
|
115
|
+
scholarinboxcli collection add 10759 4559909
|
|
116
|
+
scholarinboxcli collection remove 10759 4559909
|
|
117
|
+
|
|
118
|
+
# Show papers in a collection
|
|
119
|
+
scholarinboxcli collection papers 10759
|
|
120
|
+
|
|
121
|
+
# Similar papers for one or more collections
|
|
122
|
+
scholarinboxcli collection similar 10759 12345
|
|
123
|
+
|
|
124
|
+
# You can also use collection names (case-insensitive). The CLI will
|
|
125
|
+
# automatically fetch collection ID mappings from the API when needed.
|
|
126
|
+
scholarinboxcli collection papers "AIAgents"
|
|
127
|
+
scholarinboxcli collection similar "AIAgents" "Benchmark"
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
Collection name matching is exact → prefix → contains. If multiple matches exist, the CLI reports ambiguity and shows candidate IDs.
|
|
131
|
+
|
|
132
|
+
## Search
|
|
133
|
+
|
|
134
|
+
```bash
|
|
135
|
+
# Full-text keyword search
|
|
136
|
+
scholarinboxcli search "transformers" --limit 5
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
## Semantic Search
|
|
140
|
+
|
|
141
|
+
```bash
|
|
142
|
+
# Semantic similarity search
|
|
143
|
+
scholarinboxcli semantic "graph neural networks" --limit 5
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
## Other commands
|
|
147
|
+
|
|
148
|
+
```bash
|
|
149
|
+
# Daily digest view (MM-DD-YYYY)
|
|
150
|
+
scholarinboxcli digest --date 01-30-2026
|
|
151
|
+
|
|
152
|
+
# Trending papers by category
|
|
153
|
+
scholarinboxcli trending --category ALL --days 7
|
|
154
|
+
|
|
155
|
+
# Read/like/dislike interactions feed
|
|
156
|
+
scholarinboxcli interactions --type all
|
|
157
|
+
|
|
158
|
+
# List bookmarks
|
|
159
|
+
scholarinboxcli bookmark list
|
|
160
|
+
|
|
161
|
+
# List known conferences
|
|
162
|
+
scholarinboxcli conference list
|
|
163
|
+
|
|
164
|
+
# Explore conference indices
|
|
165
|
+
scholarinboxcli conference explore
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
## Output modes
|
|
169
|
+
|
|
170
|
+
- TTY: Rich tables
|
|
171
|
+
- `--json`: pretty JSON
|
|
172
|
+
- Piped: pretty JSON (auto)
|
|
173
|
+
|
|
174
|
+
Examples for agents/scripting:
|
|
175
|
+
|
|
176
|
+
```bash
|
|
177
|
+
# Auto-JSON when piped
|
|
178
|
+
scholarinboxcli collection list | jq '.'
|
|
179
|
+
|
|
180
|
+
# Explicit JSON output
|
|
181
|
+
scholarinboxcli collection papers "AIAgents" --json
|
|
182
|
+
|
|
183
|
+
# JSON for automation (stable keys)
|
|
184
|
+
scholarinboxcli search "diffusion" --json
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
## Tested (2026-02-01)
|
|
188
|
+
|
|
189
|
+
The following commands were exercised against the live API (with a valid magic-link login) to confirm behavior:
|
|
190
|
+
|
|
191
|
+
```bash
|
|
192
|
+
scholarinboxcli --help
|
|
193
|
+
scholarinboxcli auth status --json
|
|
194
|
+
scholarinboxcli digest --date 01-30-2026 --json
|
|
195
|
+
scholarinboxcli trending --category ALL --days 7 --json
|
|
196
|
+
scholarinboxcli search "transformers" --limit 5 --json
|
|
197
|
+
scholarinboxcli semantic "graph neural networks" --limit 5 --json
|
|
198
|
+
scholarinboxcli interactions --type all --json
|
|
199
|
+
scholarinboxcli bookmark list --json
|
|
200
|
+
scholarinboxcli bookmark add 3302478 --json
|
|
201
|
+
scholarinboxcli bookmark remove 3302478 --json
|
|
202
|
+
scholarinboxcli collection list --json
|
|
203
|
+
scholarinboxcli collection list --expanded --json
|
|
204
|
+
scholarinboxcli collection papers "AIAgents" --json
|
|
205
|
+
scholarinboxcli collection similar "AIAgents" --json
|
|
206
|
+
scholarinboxcli conference list --json
|
|
207
|
+
scholarinboxcli conference explore --json
|
|
208
|
+
```
|
|
209
|
+
|
|
210
|
+
## Notes
|
|
211
|
+
|
|
212
|
+
- Some collection mutations (create/rename/delete/add/remove) rely on best-effort endpoints that may change on the service side. If a mutation fails, try again or use the web UI to validate the current behavior.
|
|
213
|
+
- Similar papers for collections uses the server endpoint used by the web UI. Results typically appear under `digest_df` in JSON responses.
|
|
214
|
+
|
|
215
|
+
## Publish to PyPI
|
|
216
|
+
|
|
217
|
+
```bash
|
|
218
|
+
# 1) Build sdist + wheel
|
|
219
|
+
uv run --with build python -m build
|
|
220
|
+
|
|
221
|
+
# 2) Validate metadata/rendering
|
|
222
|
+
uvx twine check dist/*
|
|
223
|
+
|
|
224
|
+
# 3) (Optional) test publish first
|
|
225
|
+
uvx twine upload --repository testpypi dist/*
|
|
226
|
+
|
|
227
|
+
# 4) Publish to PyPI
|
|
228
|
+
uvx twine upload dist/*
|
|
229
|
+
```
|
|
230
|
+
|
|
231
|
+
If using an API token:
|
|
232
|
+
|
|
233
|
+
```bash
|
|
234
|
+
export TWINE_USERNAME=__token__
|
|
235
|
+
export TWINE_PASSWORD=<your-pypi-token>
|
|
236
|
+
```
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
scholarinboxcli/__init__.py,sha256=kUR5RAFc7HCeiqdlX36dZOHkUI5wI6V_43RpEcD8b-0,22
|
|
2
|
+
scholarinboxcli/cli.py,sha256=bugBfT66k5EuVPashcG6Pz0IRM0KtRz_c6lNOalm4pk,19242
|
|
3
|
+
scholarinboxcli/config.py,sha256=cxp1RzNwzT6Iu225EPvs8NhH2YTTMg9fQBjaYIRVDoc,1545
|
|
4
|
+
scholarinboxcli/api/client.py,sha256=TE8pIRXh7pHhQGto9CvRpPPT61SJp9SzNhRnQ-2U4M4,13723
|
|
5
|
+
scholarinboxcli/formatters/json_fmt.py,sha256=Ntcp4EqHugCXg79RIF62c7QHa-lexptLDDTT3IEP65U,197
|
|
6
|
+
scholarinboxcli/formatters/table.py,sha256=GnzpmSJ7M_yq-R-c8no8SE9vXbycvWWPUu6hV4tcJAA,2133
|
|
7
|
+
scholarinboxcli-0.1.0.dist-info/METADATA,sha256=JZISdlP49Ezyh-UoLv2FlJIrliHUvZrnj0IvFNWKCMw,6062
|
|
8
|
+
scholarinboxcli-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
9
|
+
scholarinboxcli-0.1.0.dist-info/entry_points.txt,sha256=iescoEMF_CPwSNSmvlzNDl5pT2VpBL9_1bIq_FFIAKc,60
|
|
10
|
+
scholarinboxcli-0.1.0.dist-info/RECORD,,
|