google-news-trends-mcp 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,9 @@ from google_news_trends_mcp.news import (
9
9
  save_article_to_json,
10
10
  BrowserManager,
11
11
  )
12
+ import logging
13
+
14
+ logger = logging.getLogger(__name__)
12
15
 
13
16
 
14
17
  @click.group()
@@ -32,7 +35,7 @@ def keyword(keyword, period, max_results, no_nlp):
32
35
  async def _keyword():
33
36
  articles = await get_news_by_keyword(keyword, period=period, max_results=max_results, nlp=not no_nlp)
34
37
  print_articles(articles)
35
- print(f"Found {len(articles)} articles for keyword '{keyword}'.")
38
+ logger.info(f"Found {len(articles)} articles for keyword '{keyword}'.")
36
39
 
37
40
  asyncio.run(_keyword())
38
41
 
@@ -53,7 +56,7 @@ def location(location, period, max_results, no_nlp):
53
56
  async def _location():
54
57
  articles = await get_news_by_location(location, period=period, max_results=max_results, nlp=not no_nlp)
55
58
  print_articles(articles)
56
- print(f"Found {len(articles)} articles for location '{location}'.")
59
+ logger.info(f"Found {len(articles)} articles for location '{location}'.")
57
60
 
58
61
  asyncio.run(_location())
59
62
 
@@ -74,7 +77,7 @@ def topic(topic, period, max_results, no_nlp):
74
77
  async def _topic():
75
78
  articles = await get_news_by_topic(topic, period=period, max_results=max_results, nlp=not no_nlp)
76
79
  print_articles(articles)
77
- print(f"Found {len(articles)} articles for topic '{topic}'.")
80
+ logger.info(f"Found {len(articles)} articles for topic '{topic}'.")
78
81
 
79
82
  asyncio.run(_topic())
80
83
 
@@ -87,14 +90,14 @@ def trending(geo, full_data):
87
90
  async def _trending():
88
91
  trending_terms = await get_trending_terms(geo=geo, full_data=full_data)
89
92
  if trending_terms:
90
- print("Trending terms:")
93
+ logger.info("Trending terms:")
91
94
  for term in trending_terms:
92
95
  if isinstance(term, dict):
93
- print(f"{term['keyword']:<40} - {term['volume']}")
96
+ logger.info(f"{term['keyword']:<40} - {term['volume']}")
94
97
  else:
95
- print(term)
98
+ logger.info(term)
96
99
  else:
97
- print("No trending terms found.")
100
+ logger.info("No trending terms found.")
98
101
 
99
102
  asyncio.run(_trending())
100
103
 
@@ -114,19 +117,19 @@ def top(period, max_results, no_nlp):
114
117
  async def _top():
115
118
  articles = await get_top_news(max_results=max_results, period=period, nlp=not no_nlp)
116
119
  print_articles(articles)
117
- print(f"Found {len(articles)} top articles.")
120
+ logger.info(f"Found {len(articles)} top articles.")
118
121
 
119
122
  asyncio.run(_top())
120
123
 
121
124
 
122
125
  def print_articles(articles):
123
126
  for article in articles:
124
- print(f"Title: {article.title}")
125
- print(f"URL: {article.original_url}")
126
- print(f"Authors: {article.authors}")
127
- print(f"Publish Date: {article.publish_date}")
128
- print(f"Top Image: {article.top_image}")
129
- print(f"Summary: {article.summary}\n")
127
+ logger.info(f"Title: {article.title}")
128
+ logger.info(f"URL: {article.original_url}")
129
+ logger.info(f"Authors: {article.authors}")
130
+ logger.info(f"Publish Date: {article.publish_date}")
131
+ logger.info(f"Top Image: {article.top_image}")
132
+ logger.info(f"Summary: {article.summary}\n")
130
133
  save_article_to_json(article)
131
134
 
132
135
 
@@ -70,7 +70,6 @@ class BrowserManager(AsyncContextDecorator):
70
70
  raise SystemExit(1)
71
71
  return cast(Browser, cls.browser)
72
72
 
73
-
74
73
  @classmethod
75
74
  async def _shutdown(cls):
76
75
  logger.info("Shutting down browser...")
@@ -275,22 +274,14 @@ async def get_news_by_topic(
275
274
 
276
275
 
277
276
  @overload
278
- async def get_trending_terms(
279
- geo: str = "US", full_data: Literal[False] = False
280
- ) -> list[dict[str, int]]:
281
- pass
277
+ async def get_trending_terms(geo: str = "US", full_data: Literal[False] = False) -> list[dict[str, str]]: ...
282
278
 
283
279
 
284
280
  @overload
285
- async def get_trending_terms(
286
- geo: str = "US", full_data: Literal[True] = True
287
- ) -> list[TrendKeywordLite]:
288
- pass
281
+ async def get_trending_terms(geo: str = "US", full_data: Literal[True] = True) -> list[TrendKeywordLite]: ...
289
282
 
290
283
 
291
- async def get_trending_terms(
292
- geo: str = "US", full_data: bool = False
293
- ) -> list[dict[str, int]] | list[TrendKeywordLite]:
284
+ async def get_trending_terms(geo: str = "US", full_data: bool = False) -> list[dict[str, str]] | list[TrendKeywordLite]:
294
285
  """
295
286
  Returns google trends for a specific geo location.
296
287
  """
@@ -128,7 +128,7 @@ def set_newspaper_article_fields(full_data: bool = False):
128
128
  ]
129
129
 
130
130
 
131
- async def summarize_article(article: Article, ctx: Context) -> None:
131
+ async def llm_summarize_article(article: Article, ctx: Context) -> None:
132
132
  if article.text:
133
133
  prompt = f"Please provide a concise summary of the following news article:\n\n{article.text}"
134
134
  response = await ctx.sample(prompt)
@@ -149,7 +149,7 @@ async def summarize_articles(articles: list[Article], ctx: Context) -> None:
149
149
  total_articles = len(articles)
150
150
  try:
151
151
  for idx, article in enumerate(articles):
152
- await summarize_article(article, ctx)
152
+ await llm_summarize_article(article, ctx)
153
153
  await ctx.report_progress(idx, total_articles)
154
154
  except Exception as err:
155
155
  await ctx.debug(f"Failed to use LLM sampling for article summary:\n{err.args}")
@@ -309,7 +309,7 @@ async def get_trending_terms(
309
309
  trends_out = []
310
310
  for trend in trends:
311
311
  trend = trend.__dict__
312
- if 'news' in trend:
312
+ if "news" in trend:
313
313
  trend["news"] = [TrendingTermArticleOut(**article.__dict__) for article in trend["news"]]
314
314
  trends_out.append(TrendingTermOut(**trend))
315
315
  return trends_out
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: google-news-trends-mcp
3
- Version: 0.2.3
3
+ Version: 0.2.5
4
4
  Summary: An MCP server to access RSS feeds of Google News and Google Trends
5
5
  Author-email: Jesse Manek <jesse.manek@gmail.com>
6
6
  License-Expression: MIT
@@ -27,14 +27,14 @@ Dynamic: license-file
27
27
 
28
28
  # Google News Trends MCP
29
29
 
30
- An MCP server to access RSS feeds of Google News and Google Trends.
30
+ An MCP server that slurps data from Google News and Google Trends RSS endpoints, optionally distills it with LLM/NLP, and barfs out structured results.
31
31
 
32
32
  ## Features
33
33
 
34
- - Search Google News RSS feeds for articles based on keyword, location, topic
35
- - Get top news stories from Google News
36
- - Google Trends keywords based on location
37
- - Optional LLM Sampling and NLP to summarize articles and extract keywords
34
+ - Trawl Google News RSS feeds for articles based on keyword, location, topic
35
+ - Ingest top news stories from Google News
36
+ - Snag trending search terms from Google Trends based on geographic input
37
+ - Plug in LLM/NLP pipelines to condense article payloads and extract key concepts
38
38
 
39
39
  ## Installation
40
40
 
@@ -0,0 +1,11 @@
1
+ google_news_trends_mcp/__init__.py,sha256=nDWNd6_TSf4vDQuHVBoAf4QfZCB3ZUFQ0M7XvifNJ-g,78
2
+ google_news_trends_mcp/__main__.py,sha256=ysiAk_xpnnW3lrLlzdIQQa71tuGBRT8WocbecBsY2Fs,87
3
+ google_news_trends_mcp/cli.py,sha256=K6zakxdFWAORXTN28Ewi9Gcxzm4suU8YAdiPqh-fcaI,4520
4
+ google_news_trends_mcp/news.py,sha256=4LplhgELLP9_R36KzFe1rXm370W9aGVSBQHIt8EHvzs,12460
5
+ google_news_trends_mcp/server.py,sha256=lCBCm8EvchTv-w7zojFFo-eIj9-WL4x80qzylsYYnEg,13402
6
+ google_news_trends_mcp-0.2.5.dist-info/licenses/LICENSE,sha256=5dsv2ZI5EZIer0a9MktVmILVrlp5vqH_0tPIe3bRLgE,1067
7
+ google_news_trends_mcp-0.2.5.dist-info/METADATA,sha256=GRdK8iVw3GV7OKITlzmbwDB1UzPAVskj1Pqpw-hgoJ4,4570
8
+ google_news_trends_mcp-0.2.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
9
+ google_news_trends_mcp-0.2.5.dist-info/entry_points.txt,sha256=eVT3xd6YJQgsWAUBwhnffuwhXNF7yyt_uco6fjBy-1o,130
10
+ google_news_trends_mcp-0.2.5.dist-info/top_level.txt,sha256=RFheDbzhNnEV_Y3iFNm7jhRhY1P1wQgfiYqVpXCTD_U,23
11
+ google_news_trends_mcp-0.2.5.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- google_news_trends_mcp/__init__.py,sha256=nDWNd6_TSf4vDQuHVBoAf4QfZCB3ZUFQ0M7XvifNJ-g,78
2
- google_news_trends_mcp/__main__.py,sha256=ysiAk_xpnnW3lrLlzdIQQa71tuGBRT8WocbecBsY2Fs,87
3
- google_news_trends_mcp/cli.py,sha256=IZ4UdAQ-tBnfutLcd3lGwsukpkYbdaJyuXZf7vddfLs,4383
4
- google_news_trends_mcp/news.py,sha256=pTAUTrM8Rkp8GaTPOLOo7BkFx9mePdQeCON7C6Q32aA,12489
5
- google_news_trends_mcp/server.py,sha256=TYVOnUVFQk2RQTGRVyHqoOMrADlHvFmfkN-0TmsuEO8,13394
6
- google_news_trends_mcp-0.2.3.dist-info/licenses/LICENSE,sha256=5dsv2ZI5EZIer0a9MktVmILVrlp5vqH_0tPIe3bRLgE,1067
7
- google_news_trends_mcp-0.2.3.dist-info/METADATA,sha256=KJcma10FlOdMJ4_Xm2EKB4WKfACOaMM-ICrtYF5ctk8,4446
8
- google_news_trends_mcp-0.2.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
9
- google_news_trends_mcp-0.2.3.dist-info/entry_points.txt,sha256=eVT3xd6YJQgsWAUBwhnffuwhXNF7yyt_uco6fjBy-1o,130
10
- google_news_trends_mcp-0.2.3.dist-info/top_level.txt,sha256=RFheDbzhNnEV_Y3iFNm7jhRhY1P1wQgfiYqVpXCTD_U,23
11
- google_news_trends_mcp-0.2.3.dist-info/RECORD,,