google-news-trends-mcp 0.1.8__py3-none-any.whl → 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google_news_trends_mcp/__init__.py +1 -1
- google_news_trends_mcp/news.py +30 -29
- google_news_trends_mcp/server.py +3 -6
- {google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/METADATA +3 -3
- google_news_trends_mcp-0.1.9.dist-info/RECORD +11 -0
- google_news_trends_mcp-0.1.8.dist-info/RECORD +0 -11
- {google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/WHEEL +0 -0
- {google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/entry_points.txt +0 -0
- {google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/licenses/LICENSE +0 -0
- {google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/top_level.txt +0 -0
@@ -1,2 +1,2 @@
|
|
1
1
|
import logging
|
2
|
-
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
2
|
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
google_news_trends_mcp/news.py
CHANGED
@@ -8,7 +8,6 @@ It will fallback to using Playwright for websites that are difficult to scrape w
|
|
8
8
|
|
9
9
|
import re
|
10
10
|
import json
|
11
|
-
import time
|
12
11
|
import asyncio
|
13
12
|
from gnews import GNews
|
14
13
|
import newspaper # newspaper4k
|
@@ -16,7 +15,6 @@ from googlenewsdecoder import gnewsdecoder
|
|
16
15
|
import cloudscraper
|
17
16
|
from playwright.async_api import async_playwright, Browser, Playwright
|
18
17
|
from trendspy import Trends, TrendKeyword
|
19
|
-
import click
|
20
18
|
from typing import Optional, cast, overload, Literal, Awaitable
|
21
19
|
import atexit
|
22
20
|
from contextlib import asynccontextmanager
|
@@ -97,28 +95,15 @@ async def download_article_with_playwright(url) -> newspaper.Article | None:
|
|
97
95
|
await page.goto(url, wait_until="domcontentloaded")
|
98
96
|
await asyncio.sleep(2) # Wait for the page to load completely
|
99
97
|
content = await page.content()
|
100
|
-
article = newspaper.article(url, input_html=content
|
98
|
+
article = newspaper.article(url, input_html=content)
|
101
99
|
return article
|
102
100
|
except Exception as e:
|
103
101
|
logging.warning(f"Error downloading article with Playwright from {url}\n {e.args}")
|
104
102
|
return None
|
105
103
|
|
106
104
|
|
107
|
-
|
108
|
-
"""
|
109
|
-
Download an article from a given URL using newspaper4k and cloudscraper (async).
|
110
|
-
"""
|
105
|
+
def download_article_with_scraper(url) -> newspaper.Article | None:
|
111
106
|
article = None
|
112
|
-
if url.startswith("https://news.google.com/rss/"):
|
113
|
-
try:
|
114
|
-
decoded_url = gnewsdecoder(url)
|
115
|
-
if decoded_url.get("status"):
|
116
|
-
url = decoded_url["decoded_url"]
|
117
|
-
else:
|
118
|
-
logging.debug("Failed to decode Google News RSS link:")
|
119
|
-
return None
|
120
|
-
except Exception as err:
|
121
|
-
logging.warning(f"Error while decoding url {url}\n {err.args}")
|
122
107
|
try:
|
123
108
|
article = newspaper.article(url)
|
124
109
|
except Exception as e:
|
@@ -134,19 +119,32 @@ async def download_article(url: str, nlp: bool = True) -> newspaper.Article | No
|
|
134
119
|
)
|
135
120
|
except Exception as e:
|
136
121
|
logging.debug(f"Error downloading article with cloudscraper from {url}\n {e.args}")
|
122
|
+
return article
|
137
123
|
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
124
|
+
|
125
|
+
def decode_url(url: str) -> str:
|
126
|
+
if url.startswith("https://news.google.com/rss/"):
|
127
|
+
try:
|
128
|
+
decoded_url = gnewsdecoder(url)
|
129
|
+
if decoded_url.get("status"):
|
130
|
+
url = decoded_url["decoded_url"]
|
131
|
+
else:
|
132
|
+
logging.debug("Failed to decode Google News RSS link:")
|
133
|
+
return ""
|
134
|
+
except Exception as err:
|
135
|
+
logging.warning(f"Error while decoding url {url}\n {err.args}")
|
136
|
+
return url
|
137
|
+
|
138
|
+
|
139
|
+
async def download_article(url: str) -> newspaper.Article | None:
|
140
|
+
"""
|
141
|
+
Download an article from a given URL using newspaper4k and cloudscraper (async).
|
142
|
+
"""
|
143
|
+
if not (url := decode_url(url)):
|
149
144
|
return None
|
145
|
+
article = download_article_with_scraper(url)
|
146
|
+
if article is None or not article.text:
|
147
|
+
article = await download_article_with_playwright(url)
|
150
148
|
return article
|
151
149
|
|
152
150
|
|
@@ -162,10 +160,13 @@ async def process_gnews_articles(
|
|
162
160
|
articles = []
|
163
161
|
total = len(gnews_articles)
|
164
162
|
for idx, gnews_article in enumerate(gnews_articles):
|
165
|
-
article = await download_article(gnews_article["url"]
|
163
|
+
article = await download_article(gnews_article["url"])
|
166
164
|
if article is None or not article.text:
|
167
165
|
logging.debug(f"Failed to download article from {gnews_article['url']}:\n{article}")
|
168
166
|
continue
|
167
|
+
article.parse()
|
168
|
+
if nlp:
|
169
|
+
article.nlp()
|
169
170
|
articles.append(article)
|
170
171
|
if report_progress:
|
171
172
|
await report_progress(idx, total)
|
google_news_trends_mcp/server.py
CHANGED
@@ -1,7 +1,5 @@
|
|
1
|
-
from typing import Annotated,
|
1
|
+
from typing import Annotated, Optional, Any, TYPE_CHECKING
|
2
2
|
from fastmcp import FastMCP, Context
|
3
|
-
from fastmcp.exceptions import ToolError
|
4
|
-
from fastmcp.server.dependencies import get_context
|
5
3
|
from fastmcp.server.middleware.timing import TimingMiddleware
|
6
4
|
from fastmcp.server.middleware.logging import LoggingMiddleware
|
7
5
|
from fastmcp.server.middleware.rate_limiting import RateLimitingMiddleware
|
@@ -134,15 +132,14 @@ async def summarize_article(article: Article, ctx: Context) -> None:
|
|
134
132
|
if article.text:
|
135
133
|
prompt = f"Please provide a concise summary of the following news article:\n\n{article.text}"
|
136
134
|
response = await ctx.sample(prompt)
|
137
|
-
# response = cast(TextContent, response)
|
138
135
|
if isinstance(response, TextContent):
|
139
136
|
if not response.text:
|
140
|
-
await ctx.warning("
|
137
|
+
await ctx.warning("LLM Sampling response is empty. Unable to summarize article.")
|
141
138
|
article.summary = "No summary available."
|
142
139
|
else:
|
143
140
|
article.summary = response.text
|
144
141
|
else:
|
145
|
-
await ctx.warning("
|
142
|
+
await ctx.warning("LLM Sampling response is not a TextContent object. Unable to summarize article.")
|
146
143
|
article.summary = "No summary available."
|
147
144
|
else:
|
148
145
|
article.summary = "No summary available."
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: google-news-trends-mcp
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.9
|
4
4
|
Summary: An MCP server to access Google News and Google Trends.
|
5
5
|
Author-email: Jesse Manek <jesse.manek@gmail.com>
|
6
6
|
License-Expression: MIT
|
@@ -35,7 +35,7 @@ An MCP server to access Google News and Google Trends. Does not rely on any pai
|
|
35
35
|
|
36
36
|
- Search Google News articles based on keyword, location, topic
|
37
37
|
- Get top news stories from Google News
|
38
|
-
- Google Trends keywords
|
38
|
+
- Google Trends keywords based on location
|
39
39
|
- Optional LLM Sampling and NLP to summarize articles and extract keywords
|
40
40
|
|
41
41
|
## Installation
|
@@ -141,7 +141,7 @@ The following MCP tools are available:
|
|
141
141
|
| **get_top_news** | Fetch the top news stories from Google News. |
|
142
142
|
| **get_trending_keywords**| Return trending keywords from Google Trends for a specified location.|
|
143
143
|
|
144
|
-
All of the news related tools have an option to summarize the text of the article using
|
144
|
+
All of the news related tools have an option to summarize the text of the article using LLM Sampling (if supported) or NLP
|
145
145
|
|
146
146
|
|
147
147
|
## CLI
|
@@ -0,0 +1,11 @@
|
|
1
|
+
google_news_trends_mcp/__init__.py,sha256=NkmudPEEuKk8Geah4EtzeEHQ-ChqR66lZEO5VrMwXNo,77
|
2
|
+
google_news_trends_mcp/__main__.py,sha256=ysiAk_xpnnW3lrLlzdIQQa71tuGBRT8WocbecBsY2Fs,87
|
3
|
+
google_news_trends_mcp/cli.py,sha256=-Cith02x6-9o91rXpgMM0lrhArPDMB9d3h8AAE1rimw,3959
|
4
|
+
google_news_trends_mcp/news.py,sha256=CpNIOJ4NA-BFmiE0d4Jadn20apMTf8vNDMsqZjFVl6A,12707
|
5
|
+
google_news_trends_mcp/server.py,sha256=h8GP_XUPqiPw4vFu1jy9MFv0i384rBARePvm15YOZJo,14807
|
6
|
+
google_news_trends_mcp-0.1.9.dist-info/licenses/LICENSE,sha256=5dsv2ZI5EZIer0a9MktVmILVrlp5vqH_0tPIe3bRLgE,1067
|
7
|
+
google_news_trends_mcp-0.1.9.dist-info/METADATA,sha256=t76FntOxc0t_CFvzcaWB0lVdXmcv5J9SnLCcIYMwcfY,4520
|
8
|
+
google_news_trends_mcp-0.1.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
9
|
+
google_news_trends_mcp-0.1.9.dist-info/entry_points.txt,sha256=eVT3xd6YJQgsWAUBwhnffuwhXNF7yyt_uco6fjBy-1o,130
|
10
|
+
google_news_trends_mcp-0.1.9.dist-info/top_level.txt,sha256=RFheDbzhNnEV_Y3iFNm7jhRhY1P1wQgfiYqVpXCTD_U,23
|
11
|
+
google_news_trends_mcp-0.1.9.dist-info/RECORD,,
|
@@ -1,11 +0,0 @@
|
|
1
|
-
google_news_trends_mcp/__init__.py,sha256=J9O5WNvC9cNDaxecveSUvzLGOXOYO-pCHbiGopfYoIc,76
|
2
|
-
google_news_trends_mcp/__main__.py,sha256=ysiAk_xpnnW3lrLlzdIQQa71tuGBRT8WocbecBsY2Fs,87
|
3
|
-
google_news_trends_mcp/cli.py,sha256=-Cith02x6-9o91rXpgMM0lrhArPDMB9d3h8AAE1rimw,3959
|
4
|
-
google_news_trends_mcp/news.py,sha256=Anxs65Fxq1Qz_tkmVyTDY3Fn-I0dv0xR3ipDrLBc6gw,12851
|
5
|
-
google_news_trends_mcp/server.py,sha256=promIVXRcd1ZUSgFClZ73l2scXlsS-joRHv1AZs73SE,14946
|
6
|
-
google_news_trends_mcp-0.1.8.dist-info/licenses/LICENSE,sha256=5dsv2ZI5EZIer0a9MktVmILVrlp5vqH_0tPIe3bRLgE,1067
|
7
|
-
google_news_trends_mcp-0.1.8.dist-info/METADATA,sha256=j0yuLLp3OaCYS07o-Q5vNVs3xCk5HHAMcGVUn7kT2TI,4495
|
8
|
-
google_news_trends_mcp-0.1.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
9
|
-
google_news_trends_mcp-0.1.8.dist-info/entry_points.txt,sha256=eVT3xd6YJQgsWAUBwhnffuwhXNF7yyt_uco6fjBy-1o,130
|
10
|
-
google_news_trends_mcp-0.1.8.dist-info/top_level.txt,sha256=RFheDbzhNnEV_Y3iFNm7jhRhY1P1wQgfiYqVpXCTD_U,23
|
11
|
-
google_news_trends_mcp-0.1.8.dist-info/RECORD,,
|
File without changes
|
{google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/entry_points.txt
RENAMED
File without changes
|
{google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/licenses/LICENSE
RENAMED
File without changes
|
{google_news_trends_mcp-0.1.8.dist-info → google_news_trends_mcp-0.1.9.dist-info}/top_level.txt
RENAMED
File without changes
|