academia-mcp 1.8.1__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
academia_mcp/files.py CHANGED
@@ -1,7 +1,8 @@
1
- import os
2
1
  from typing import Optional
3
2
  from pathlib import Path
4
3
 
4
+ from academia_mcp.settings import settings
5
+
5
6
  DIR_PATH = Path(__file__).parent
6
7
  ROOT_PATH = DIR_PATH.parent
7
8
  DEFAULT_WORKSPACE_DIR_PATH: Path = DIR_PATH / "workdir"
@@ -14,7 +15,7 @@ class WorkspaceDirectory:
14
15
  @classmethod
15
16
  def get_dir(cls) -> Path:
16
17
  if cls.workspace_dir is None:
17
- return Path(os.getenv("WORKSPACE_DIR", DEFAULT_WORKSPACE_DIR_PATH))
18
+ return Path(settings.WORKSPACE_DIR)
18
19
  return cls.workspace_dir
19
20
 
20
21
  @classmethod
academia_mcp/llm.py CHANGED
@@ -1,10 +1,11 @@
1
- import os
2
1
  from typing import List, Dict, Any
3
2
 
4
3
  from pydantic import BaseModel
5
4
  from openai import AsyncOpenAI
6
5
  from openai.types.chat.chat_completion_message import ChatCompletionMessage
7
6
 
7
+ from academia_mcp.settings import settings
8
+
8
9
 
9
10
  class ChatMessage(BaseModel): # type: ignore
10
11
  role: str
@@ -15,9 +16,9 @@ ChatMessages = List[ChatMessage]
15
16
 
16
17
 
17
18
  async def llm_acall(model_name: str, messages: ChatMessages, **kwargs: Any) -> str:
18
- key = os.getenv("OPENROUTER_API_KEY", "")
19
+ key = settings.OPENROUTER_API_KEY
19
20
  assert key, "Please set OPENROUTER_API_KEY in the environment variables"
20
- base_url = os.getenv("BASE_URL", "https://openrouter.ai/api/v1")
21
+ base_url = settings.BASE_URL
21
22
 
22
23
  client = AsyncOpenAI(base_url=base_url, api_key=key)
23
24
  response: ChatCompletionMessage = (
academia_mcp/server.py CHANGED
@@ -1,34 +1,45 @@
1
- import os
2
1
  import socket
2
+ import logging
3
+ from logging.config import dictConfig
3
4
  from typing import Optional, Literal
4
5
 
5
6
  import fire # type: ignore
6
7
  from mcp.server.fastmcp import FastMCP
7
- from dotenv import load_dotenv
8
+ from uvicorn.config import LOGGING_CONFIG as UVICORN_LOGGING_CONFIG
8
9
 
9
- from .tools.arxiv_search import arxiv_search
10
- from .tools.arxiv_download import arxiv_download
11
- from .tools.s2_citations import s2_get_citations, s2_get_references
12
- from .tools.hf_datasets_search import hf_datasets_search
13
- from .tools.anthology_search import anthology_search
14
- from .tools.document_qa import document_qa
15
- from .tools.latex import (
10
+ from academia_mcp.settings import settings
11
+ from academia_mcp.tools.arxiv_search import arxiv_search
12
+ from academia_mcp.tools.arxiv_download import arxiv_download
13
+ from academia_mcp.tools.s2_citations import s2_get_citations, s2_get_references
14
+ from academia_mcp.tools.hf_datasets_search import hf_datasets_search
15
+ from academia_mcp.tools.anthology_search import anthology_search
16
+ from academia_mcp.tools.document_qa import document_qa
17
+ from academia_mcp.tools.latex import (
16
18
  compile_latex,
17
19
  get_latex_template,
18
20
  get_latex_templates_list,
19
21
  read_pdf,
20
22
  )
21
- from .tools.web_search import web_search, tavily_web_search, exa_web_search, brave_web_search
22
- from .tools.visit_webpage import visit_webpage
23
- from .tools.bitflip import (
23
+ from academia_mcp.tools.web_search import (
24
+ web_search,
25
+ tavily_web_search,
26
+ exa_web_search,
27
+ brave_web_search,
28
+ )
29
+ from academia_mcp.tools.visit_webpage import visit_webpage
30
+ from academia_mcp.tools.bitflip import (
24
31
  extract_bitflip_info,
25
32
  generate_research_proposals,
26
33
  score_research_proposals,
27
34
  )
28
- from .tools.review import review_pdf_paper, download_pdf_paper
35
+ from academia_mcp.tools.review import review_pdf_paper, download_pdf_paper
29
36
 
30
37
 
31
- load_dotenv()
38
+ def configure_uvicorn_style_logging(level: int = logging.INFO) -> None:
39
+ config = {**UVICORN_LOGGING_CONFIG}
40
+ config["disable_existing_loggers"] = False
41
+ config["root"] = {"handlers": ["default"], "level": logging.getLevelName(level)}
42
+ dictConfig(config)
32
43
 
33
44
 
34
45
  def find_free_port() -> int:
@@ -51,12 +62,14 @@ def run(
51
62
  disable_web_search_tools: bool = False,
52
63
  disable_llm_tools: bool = False,
53
64
  ) -> None:
65
+ configure_uvicorn_style_logging()
54
66
  server = FastMCP(
55
67
  "Academia MCP",
56
68
  stateless_http=True,
57
69
  streamable_http_path=streamable_http_path,
58
70
  mount_path=mount_path,
59
71
  )
72
+ logger = logging.getLogger(__name__)
60
73
 
61
74
  server.add_tool(arxiv_search)
62
75
  server.add_tool(arxiv_download)
@@ -64,33 +77,45 @@ def run(
64
77
  server.add_tool(s2_get_references)
65
78
  server.add_tool(hf_datasets_search)
66
79
  server.add_tool(anthology_search)
67
- server.add_tool(compile_latex)
68
80
  server.add_tool(get_latex_template)
69
81
  server.add_tool(get_latex_templates_list)
70
82
  server.add_tool(visit_webpage)
71
- server.add_tool(download_pdf_paper)
72
- server.add_tool(read_pdf)
83
+
84
+ if settings.WORKSPACE_DIR:
85
+ server.add_tool(compile_latex)
86
+ server.add_tool(download_pdf_paper)
87
+ server.add_tool(read_pdf)
88
+ else:
89
+ logger.warning(
90
+ "WORKSPACE_DIR is not set, compile_latex/download_pdf_paper/read_pdf will not be available!"
91
+ )
73
92
 
74
93
  if not disable_web_search_tools:
75
- if os.getenv("TAVILY_API_KEY"):
94
+ if settings.TAVILY_API_KEY:
76
95
  server.add_tool(tavily_web_search)
77
- if os.getenv("EXA_API_KEY"):
96
+ if settings.EXA_API_KEY:
78
97
  server.add_tool(exa_web_search)
79
- if os.getenv("BRAVE_API_KEY"):
98
+ if settings.BRAVE_API_KEY:
80
99
  server.add_tool(brave_web_search)
81
- if os.getenv("EXA_API_KEY") or os.getenv("BRAVE_API_KEY") or os.getenv("TAVILY_API_KEY"):
100
+ if settings.EXA_API_KEY or settings.BRAVE_API_KEY or settings.TAVILY_API_KEY:
82
101
  server.add_tool(web_search)
102
+ else:
103
+ logger.warning("No web search tools keys are set, web_search will not be available!")
83
104
 
84
- if not disable_llm_tools and os.getenv("OPENROUTER_API_KEY"):
105
+ if not disable_llm_tools and settings.OPENROUTER_API_KEY:
85
106
  server.add_tool(extract_bitflip_info)
86
107
  server.add_tool(generate_research_proposals)
87
108
  server.add_tool(score_research_proposals)
88
109
  server.add_tool(document_qa)
89
- server.add_tool(review_pdf_paper)
110
+ if settings.WORKSPACE_DIR:
111
+ server.add_tool(review_pdf_paper)
112
+ else:
113
+ logger.warning("No OpenRouter API key is set, LLM-related tools will not be available!")
90
114
 
91
115
  if port is None:
92
- port = int(os.environ.get("PORT", -1))
93
- if port == -1:
116
+ if settings.PORT is not None:
117
+ port = int(settings.PORT)
118
+ else:
94
119
  port = find_free_port()
95
120
  server.settings.port = port
96
121
  server.settings.host = host
@@ -0,0 +1,33 @@
1
+ from pathlib import Path
2
+ from typing import Optional
3
+
4
+ from pydantic_settings import BaseSettings, SettingsConfigDict
5
+
6
+
7
+ class Settings(BaseSettings):
8
+ BASE_URL: str = "https://openrouter.ai/api/v1"
9
+
10
+ OPENROUTER_API_KEY: str = ""
11
+ TAVILY_API_KEY: Optional[str] = None
12
+ EXA_API_KEY: Optional[str] = None
13
+ BRAVE_API_KEY: Optional[str] = None
14
+
15
+ REVIEW_MODEL_NAME: str = "gpt-5"
16
+ BITFLIP_MODEL_NAME: str = "deepseek/deepseek-chat-v3-0324"
17
+ BITFLIP_MAX_COMPLETION_TOKENS: int = 16384
18
+ DOCUMENT_QA_MODEL_NAME: str = "deepseek/deepseek-chat-v3-0324"
19
+ DOCUMENT_QA_QUESTION_MAX_LENGTH: int = 10000
20
+ DOCUMENT_QA_DOCUMENT_MAX_LENGTH: int = 200000
21
+
22
+ PORT: int = 5056
23
+ WORKSPACE_DIR: Optional[Path] = None
24
+
25
+ model_config = SettingsConfigDict(
26
+ env_file=".env",
27
+ env_file_encoding="utf-8",
28
+ env_prefix="",
29
+ extra="ignore",
30
+ )
31
+
32
+
33
+ settings = Settings()
@@ -13,7 +13,7 @@ from .latex import (
13
13
  from .web_search import web_search, tavily_web_search, exa_web_search, brave_web_search
14
14
  from .visit_webpage import visit_webpage
15
15
  from .bitflip import extract_bitflip_info, generate_research_proposals, score_research_proposals
16
- from .review import review_pdf_paper, download_pdf_paper
16
+ from .review import review_pdf_paper, download_pdf_paper, review_pdf_paper_by_url
17
17
 
18
18
  __all__ = [
19
19
  "arxiv_search",
@@ -35,6 +35,7 @@ __all__ = [
35
35
  "generate_research_proposals",
36
36
  "score_research_proposals",
37
37
  "review_pdf_paper",
38
+ "review_pdf_paper_by_url",
38
39
  "download_pdf_paper",
39
40
  "read_pdf",
40
41
  ]
@@ -2,7 +2,6 @@
2
2
  # https://web.stanford.edu/class/cs197c/slides/02-literature-search.pdf
3
3
 
4
4
  import json
5
- import os
6
5
  import random
7
6
  from typing import List, Optional, Any, Dict
8
7
 
@@ -12,6 +11,7 @@ from datasets import load_dataset # type: ignore
12
11
  from academia_mcp.tools.arxiv_download import arxiv_download
13
12
  from academia_mcp.utils import extract_json, encode_prompt
14
13
  from academia_mcp.llm import llm_acall, ChatMessage
14
+ from academia_mcp.settings import settings
15
15
 
16
16
 
17
17
  class ProposalDataset:
@@ -201,7 +201,7 @@ async def extract_bitflip_info(arxiv_id: str) -> str:
201
201
  Args:
202
202
  arxiv_id: The arXiv ID of the paper to extract the Bit-Flip information from.
203
203
  """
204
- model_name = os.getenv("BITFLIP_MODEL_NAME", "deepseek/deepseek-chat-v3-0324")
204
+ model_name = settings.BITFLIP_MODEL_NAME
205
205
  paper = arxiv_download(arxiv_id)
206
206
  abstract = json.loads(paper)["abstract"]
207
207
  prompt = encode_prompt(EXTRACT_PROMPT, abstract=abstract)
@@ -240,8 +240,8 @@ async def generate_research_proposals(
240
240
  ]
241
241
  Use `json.loads` to deserialize the result if you want to get specific items.
242
242
  """
243
- model_name = os.getenv("BITFLIP_MODEL_NAME", "deepseek/deepseek-chat-v3-0324")
244
- max_completion_tokens = int(os.getenv("BITFLIP_MAX_COMPLETION_TOKENS", 16384))
243
+ model_name = settings.BITFLIP_MODEL_NAME
244
+ max_completion_tokens = int(settings.BITFLIP_MAX_COMPLETION_TOKENS)
245
245
  examples = ProposalDataset.get_dataset()[:]
246
246
  examples = random.choices(examples, k=2)
247
247
 
@@ -293,7 +293,7 @@ async def score_research_proposals(proposals: str | List[str | Dict[str, Any] |
293
293
  Args:
294
294
  proposals: A list of JSON strings with research proposals.
295
295
  """
296
- model_name = os.getenv("BITFLIP_MODEL_NAME", "deepseek/deepseek-chat-v3-0324")
296
+ model_name = settings.BITFLIP_MODEL_NAME
297
297
  if isinstance(proposals, str):
298
298
  proposals = json.loads(proposals)
299
299
  assert isinstance(proposals, list), "Proposals should be a list of JSON strings"
@@ -1,14 +1,12 @@
1
- import os
2
1
  import json
3
2
  from typing import List, Any, Dict
4
- from dotenv import load_dotenv
5
3
 
6
4
  from pydantic import BaseModel
7
5
 
8
6
  from academia_mcp.llm import llm_acall
9
7
  from academia_mcp.utils import truncate_content
8
+ from academia_mcp.settings import settings
10
9
 
11
- load_dotenv()
12
10
 
13
11
  PROMPT = """You are a helpful assistant that answers questions about documents accurately and concisely.
14
12
  Please answer the following questions based solely on the provided document.
@@ -65,10 +63,10 @@ async def document_qa(
65
63
  document = json.dumps(document)
66
64
  assert document and document.strip(), "Please provide non-empty 'document'"
67
65
 
68
- question = truncate_content(question, 10000)
69
- document = truncate_content(document, 200000)
66
+ question = truncate_content(question, settings.DOCUMENT_QA_QUESTION_MAX_LENGTH)
67
+ document = truncate_content(document, settings.DOCUMENT_QA_DOCUMENT_MAX_LENGTH)
70
68
 
71
- model_name = os.getenv("DOCUMENT_QA_MODEL_NAME", "deepseek/deepseek-chat-v3-0324")
69
+ model_name = settings.DOCUMENT_QA_MODEL_NAME
72
70
  prompt = PROMPT.format(question=question, document=document)
73
71
  content = await llm_acall(
74
72
  model_name=model_name, messages=[ChatMessage(role="user", content=prompt)]
@@ -1,6 +1,6 @@
1
1
  import base64
2
- import os
3
2
  import uuid
3
+ import tempfile
4
4
  from io import BytesIO
5
5
  from pathlib import Path
6
6
  from typing import List, Dict, Any
@@ -8,6 +8,7 @@ from typing import List, Dict, Any
8
8
  from academia_mcp.pdf import parse_pdf_file_to_images, parse_pdf_file, download_pdf
9
9
  from academia_mcp.llm import llm_acall, ChatMessage
10
10
  from academia_mcp.files import get_workspace_dir
11
+ from academia_mcp.settings import settings
11
12
 
12
13
 
13
14
  PROMPT = """
@@ -138,6 +139,16 @@ Always produce a correct JSON object.
138
139
  """
139
140
 
140
141
 
142
+ def _create_pdf_filename(pdf_url: str) -> str:
143
+ if "arxiv.org/pdf" in pdf_url:
144
+ pdf_filename = pdf_url.split("/")[-1]
145
+ else:
146
+ pdf_filename = str(uuid.uuid4())
147
+ if not pdf_filename.endswith(".pdf"):
148
+ pdf_filename += ".pdf"
149
+ return pdf_filename
150
+
151
+
141
152
  def download_pdf_paper(pdf_url: str) -> str:
142
153
  """
143
154
  Download a pdf file from a url to the workspace directory.
@@ -147,13 +158,7 @@ def download_pdf_paper(pdf_url: str) -> str:
147
158
  Args:
148
159
  pdf_url: The url of the pdf file.
149
160
  """
150
- if "arxiv.org/pdf" in pdf_url:
151
- pdf_filename = pdf_url.split("/")[-1]
152
- else:
153
- pdf_filename = str(uuid.uuid4())
154
- if not pdf_filename.endswith(".pdf"):
155
- pdf_filename += ".pdf"
156
-
161
+ pdf_filename = _create_pdf_filename(pdf_url)
157
162
  pdf_path = Path(get_workspace_dir()) / pdf_filename
158
163
  download_pdf(pdf_url, pdf_path)
159
164
  return pdf_filename
@@ -198,7 +203,7 @@ async def review_pdf_paper(pdf_filename: str) -> str:
198
203
  "text": "####\n\nInstructions:\n\n" + PROMPT,
199
204
  }
200
205
  )
201
- model_name = os.getenv("REVIEW_MODEL_NAME", "gpt-5")
206
+ model_name = settings.REVIEW_MODEL_NAME
202
207
  llm_response = await llm_acall(
203
208
  model_name=model_name,
204
209
  messages=[
@@ -206,3 +211,21 @@ async def review_pdf_paper(pdf_filename: str) -> str:
206
211
  ],
207
212
  )
208
213
  return llm_response.strip()
214
+
215
+
216
+ async def review_pdf_paper_by_url(pdf_url: str) -> str:
217
+ """
218
+ Review a pdf file with a paper by url.
219
+ It downloads the pdf file and then reviews it.
220
+ It parses the pdf file into images and then sends the images to the LLM for review.
221
+ It can detect different issues with the paper formatting.
222
+ Returns a proper NeurIPS-style review.
223
+
224
+ Args:
225
+ pdf_url: The url of the pdf file.
226
+ """
227
+ pdf_filename = _create_pdf_filename(pdf_url)
228
+ with tempfile.TemporaryDirectory(prefix="temp_pdf_") as temp_dir:
229
+ pdf_path = Path(temp_dir) / pdf_filename
230
+ download_pdf(pdf_url, pdf_path)
231
+ return await review_pdf_paper(str(pdf_path))
@@ -1,18 +1,18 @@
1
1
  import re
2
- import os
3
2
  import json
4
3
  from typing import Optional
5
4
 
6
5
  from markdownify import markdownify # type: ignore
7
6
 
8
7
  from academia_mcp.utils import get_with_retries, post_with_retries
8
+ from academia_mcp.settings import settings
9
9
 
10
10
  EXA_CONTENTS_URL = "https://api.exa.ai/contents"
11
11
  AVAILABLE_PROVIDERS = ("basic", "exa")
12
12
 
13
13
 
14
14
  def _exa_visit_webpage(url: str) -> str:
15
- key = os.getenv("EXA_API_KEY", "")
15
+ key = settings.EXA_API_KEY or ""
16
16
  assert key, "Error: EXA_API_KEY is not set and no api_key was provided"
17
17
  payload = {
18
18
  "urls": [url],
@@ -38,14 +38,14 @@ def visit_webpage(url: str, provider: Optional[str] = "basic") -> str:
38
38
  provider in AVAILABLE_PROVIDERS
39
39
  ), f"Invalid provider: {provider}. Available providers: {AVAILABLE_PROVIDERS}"
40
40
 
41
- if provider == "exa":
41
+ if provider == "exa" and settings.EXA_API_KEY:
42
42
  return _exa_visit_webpage(url)
43
43
 
44
44
  assert provider == "basic"
45
45
  response = get_with_retries(url)
46
46
  content_type = response.headers.get("content-type", "").lower()
47
47
  if not content_type or (not content_type.startswith("text/") and "html" not in content_type):
48
- if os.getenv("EXA_API_KEY"):
48
+ if settings.EXA_API_KEY:
49
49
  return _exa_visit_webpage(url)
50
50
  return json.dumps(
51
51
  {"id": url, "error": f"Unsupported content-type: {content_type or 'unknown'}"}
@@ -1,8 +1,8 @@
1
- import os
2
1
  import json
3
2
  from typing import Optional
4
3
 
5
4
  from academia_mcp.utils import post_with_retries, get_with_retries
5
+ from academia_mcp.settings import settings
6
6
 
7
7
 
8
8
  EXA_SEARCH_URL = "https://api.exa.ai/search"
@@ -32,9 +32,9 @@ def web_search(
32
32
  providers = ("tavily", "brave", "exa")
33
33
  assert provider in providers, "Error: provider must be either 'exa', 'tavily' or 'brave'"
34
34
 
35
- is_tavily_available = os.getenv("TAVILY_API_KEY") is not None
36
- is_exa_available = os.getenv("EXA_API_KEY") is not None
37
- is_brave_available = os.getenv("BRAVE_API_KEY") is not None
35
+ is_tavily_available = bool(settings.TAVILY_API_KEY)
36
+ is_exa_available = bool(settings.EXA_API_KEY)
37
+ is_brave_available = bool(settings.BRAVE_API_KEY)
38
38
  assert is_tavily_available or is_exa_available or is_brave_available
39
39
  availability = {
40
40
  "tavily": is_tavily_available,
@@ -76,7 +76,7 @@ def tavily_web_search(query: str, limit: Optional[int] = 20) -> str:
76
76
  assert isinstance(limit, int), "Error: limit should be an integer"
77
77
  assert 0 < limit <= 25, "Error: limit should be between 1 and 25"
78
78
 
79
- key = os.getenv("TAVILY_API_KEY", "")
79
+ key = settings.TAVILY_API_KEY or ""
80
80
  assert key, "Error: TAVILY_API_KEY is not set and no api_key was provided"
81
81
  payload = {
82
82
  "query": query,
@@ -112,7 +112,7 @@ def exa_web_search(query: str, limit: Optional[int] = 20) -> str:
112
112
  assert isinstance(limit, int), "Error: limit should be an integer"
113
113
  assert 0 < limit <= 25, "Error: limit should be between 1 and 25"
114
114
 
115
- key = os.getenv("EXA_API_KEY", "")
115
+ key = settings.EXA_API_KEY or ""
116
116
  assert key, "Error: EXA_API_KEY is not set and no api_key was provided"
117
117
  payload = {
118
118
  "query": query,
@@ -151,7 +151,7 @@ def brave_web_search(query: str, limit: Optional[int] = 20) -> str:
151
151
  assert isinstance(limit, int), "Error: limit should be an integer"
152
152
  assert 0 < limit <= 20, "Error: limit should be between 1 and 20"
153
153
 
154
- key = os.getenv("BRAVE_API_KEY", "")
154
+ key = settings.BRAVE_API_KEY or ""
155
155
  assert key, "Error: BRAVE_API_KEY is not set and no api_key was provided"
156
156
  payload = {
157
157
  "q": query,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: academia-mcp
3
- Version: 1.8.1
3
+ Version: 1.9.0
4
4
  Summary: MCP server that provides different tools to search for scientific publications
5
5
  Author-email: Ilya Gusev <phoenixilya@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/IlyaGusev/academia_mcp
@@ -29,6 +29,7 @@ Requires-Dist: jinja2>=3.1.6
29
29
  Requires-Dist: datasets>=4.0.0
30
30
  Requires-Dist: pymupdf>=1.26.4
31
31
  Requires-Dist: pillow>=11.3.0
32
+ Requires-Dist: pydantic-settings>=2.6.0
32
33
  Dynamic: license-file
33
34
 
34
35
  # Academia MCP
@@ -1,29 +1,30 @@
1
1
  academia_mcp/__init__.py,sha256=2Ru2I5u4cE7DrkkAsibDUEF1K6sYtqppb9VyFrRoQKI,94
2
2
  academia_mcp/__main__.py,sha256=rcmsOtJd3SA82exjrcGBuxuptcoxF8AXI7jNjiVq2BY,59
3
- academia_mcp/files.py,sha256=tvt3OPr5q6pAPCZ0XvRHHL9ZWuTXINRZvqjeRFmx5YE,815
4
- academia_mcp/llm.py,sha256=E0TjWUCjo2q3lONyWMxdppX72m6BdCjsZk-vFLRvGyo,1003
3
+ academia_mcp/files.py,sha256=ynIt0XbU1Z7EPWkv_hVX0pGKsLlmjYv-MVJLOfi6yzs,817
4
+ academia_mcp/llm.py,sha256=zpGkuJFf58Ofgys_fi28-47_wJ1a7sIs_yZvI1Si6z0,993
5
5
  academia_mcp/pdf.py,sha256=9PlXzHGhb6ay3ldbTdxCcTWvH4TkET3bnb64mgoh9i0,1273
6
6
  academia_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- academia_mcp/server.py,sha256=3qeJaO2udhlFu7AOy7_g3P2DrY-ygYxYsMl-FtlfQFc,3147
7
+ academia_mcp/server.py,sha256=ufNqg_C75ziQFXSZY7rYc41XA6LUBEAmCrX0RyMopjk,4198
8
+ academia_mcp/settings.py,sha256=vkhCrglL_OI1W0Me_1vS0WoQwDhpzj_XTfcuOylweYA,907
8
9
  academia_mcp/utils.py,sha256=P9U3RjYzcztE0KxXvJSy5wSBaUg2CM9tpByljYrsrl4,4607
9
10
  academia_mcp/latex_templates/agents4science_2025/agents4science_2025.sty,sha256=hGcEPCYBJS4vdhWvN_yEaJC4GvT_yDroI94CfY2Oguk,12268
10
11
  academia_mcp/latex_templates/agents4science_2025/agents4science_2025.tex,sha256=Tl1QkHXHRopw9VEfWrD3Layr5JP_0gIzVQjL4KXIWqc,15814
11
- academia_mcp/tools/__init__.py,sha256=V2A8sG3c2OTf4VOKnntdKDq8Z5EiUrjgBJLLffILrR4,1197
12
+ academia_mcp/tools/__init__.py,sha256=Bf76VHYQtRKXsHukdwmxhDVcaVdtlsnMlHe4nxbcUMI,1253
12
13
  academia_mcp/tools/anthology_search.py,sha256=rhFpJZqGLABgr0raDuH0CARBiAJNJtEI4dlMrKNHfDQ,7669
13
14
  academia_mcp/tools/arxiv_download.py,sha256=gBY0_Kz0yGtVkLMwn6GrAyfBjovZVgcSMuyy67p65Cw,10474
14
15
  academia_mcp/tools/arxiv_search.py,sha256=pzM18qrF3QL03A53w003kE7hQi3s3QKtjgw0m7K88UY,8355
15
- academia_mcp/tools/bitflip.py,sha256=eihmNk_C_8ZkBcjtJYH6MvZ0rItgIlvHHA0eGLxsvRs,12276
16
- academia_mcp/tools/document_qa.py,sha256=t9mygYQ7AFIAPiha1nZ-y043luQlkTCBdWb_SDnzEsE,2444
16
+ academia_mcp/tools/bitflip.py,sha256=1B-EEcDnJjB9YmvVWsGv_Un19Bkeud9SZDw2TpGTCSg,12184
17
+ academia_mcp/tools/document_qa.py,sha256=Wb2nEEVu9UyPp8ktHWeT9wS2JBle8fb9zRjTNVIDdBE,2463
17
18
  academia_mcp/tools/hf_datasets_search.py,sha256=KiBkqT4rXjEN4oc1AWZOPnqN_Go90TQogY5-DUm3LQo,2854
18
19
  academia_mcp/tools/latex.py,sha256=B1Leqt1FHY6H3DlUgeYse4LMFpf4-K1FQViXl5MKk8A,6144
19
20
  academia_mcp/tools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- academia_mcp/tools/review.py,sha256=9lMMELiRNVpiETqtFEHk551LaFuf1LYx0gXD5Sn7Ve0,10285
21
+ academia_mcp/tools/review.py,sha256=Va0lFJJKuk-NvWhKS3UZ-Dnuk7CyuDQ4S1nd70D-ffE,11117
21
22
  academia_mcp/tools/s2_citations.py,sha256=XZ3a4rsovAiI_D_kIy0GddRHSjpC5Fa_CS8dmB9Qftg,4902
22
- academia_mcp/tools/visit_webpage.py,sha256=OZdqDkVPIbANyFw5o5jIjU5Rr_dolxrGDs63Ud-GmRM,1966
23
- academia_mcp/tools/web_search.py,sha256=mobKm4iqKppn8pduZYMzWRo1MQBjkAqmMtrFLI5XY2Y,6296
24
- academia_mcp-1.8.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
25
- academia_mcp-1.8.1.dist-info/METADATA,sha256=PNRwlxz6oRQmZDKqJiMzfsXZQLb1lI4Y8n8s2jVvFCI,3714
26
- academia_mcp-1.8.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
- academia_mcp-1.8.1.dist-info/entry_points.txt,sha256=gxkiKJ74w2FwJpSECpjA3XtCfI5ZfrM6N8cqnwsq4yY,51
28
- academia_mcp-1.8.1.dist-info/top_level.txt,sha256=CzGpRFsRRJRqWEb1e3SUlcfGqRzOxevZGaJWrtGF8W0,13
29
- academia_mcp-1.8.1.dist-info/RECORD,,
23
+ academia_mcp/tools/visit_webpage.py,sha256=uEqZIkMqscZG58Rx2wd6L_OQkGxo5SrzCkoUB55HhC0,2018
24
+ academia_mcp/tools/web_search.py,sha256=kj3BrPdTVfyTjZ_9Jl2n3YUGzcRZk8diQs6cVSVmPrQ,6293
25
+ academia_mcp-1.9.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
26
+ academia_mcp-1.9.0.dist-info/METADATA,sha256=GQCmidDARqWtdZuOFww8Sdg5CRCGGOIooeHlS6TIlJ8,3754
27
+ academia_mcp-1.9.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
28
+ academia_mcp-1.9.0.dist-info/entry_points.txt,sha256=gxkiKJ74w2FwJpSECpjA3XtCfI5ZfrM6N8cqnwsq4yY,51
29
+ academia_mcp-1.9.0.dist-info/top_level.txt,sha256=CzGpRFsRRJRqWEb1e3SUlcfGqRzOxevZGaJWrtGF8W0,13
30
+ academia_mcp-1.9.0.dist-info/RECORD,,