5mghost-rover 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- 5mghost_rover-0.0.1/.gitignore +24 -0
- 5mghost_rover-0.0.1/PKG-INFO +124 -0
- 5mghost_rover-0.0.1/README.md +101 -0
- 5mghost_rover-0.0.1/docs/API_FIELDS.md +409 -0
- 5mghost_rover-0.0.1/pyproject.toml +38 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/__init__.py +3 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/cli.py +239 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/config.py +58 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/cookie_manager.py +244 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/oauth_flow.py +202 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/reddit_client.py +314 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/server.py +252 -0
- 5mghost_rover-0.0.1/src/reddit_mcp/token_manager.py +205 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# 依赖和编译产物
|
|
2
|
+
**/node_modules/
|
|
3
|
+
**/dist/
|
|
4
|
+
|
|
5
|
+
# 环境变量(含密钥)
|
|
6
|
+
**/.env
|
|
7
|
+
**/.env.*
|
|
8
|
+
|
|
9
|
+
# 运行时数据
|
|
10
|
+
**/data/
|
|
11
|
+
**/log/
|
|
12
|
+
|
|
13
|
+
# 本地工具状态 / 缓存 / 打包产物
|
|
14
|
+
/.claude/
|
|
15
|
+
/.context/
|
|
16
|
+
/.gstack/
|
|
17
|
+
**/.gemini/
|
|
18
|
+
**/__pycache__/
|
|
19
|
+
**/*.tgz
|
|
20
|
+
**/docs/superpowers/plans/
|
|
21
|
+
|
|
22
|
+
# 系统文件
|
|
23
|
+
.DS_Store
|
|
24
|
+
*.log
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: 5mghost-rover
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Reddit MCP server — access Reddit data through MCP protocol
|
|
5
|
+
Project-URL: Homepage, https://github.com/mkterswingman/mcp_projects
|
|
6
|
+
Author: mkterswingman
|
|
7
|
+
License-Expression: MIT
|
|
8
|
+
Keywords: ai,llm,mcp,reddit
|
|
9
|
+
Classifier: Development Status :: 4 - Beta
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
17
|
+
Requires-Python: >=3.10
|
|
18
|
+
Requires-Dist: browser-use>=0.12
|
|
19
|
+
Requires-Dist: curl-cffi>=0.7
|
|
20
|
+
Requires-Dist: fastmcp>=2.0
|
|
21
|
+
Requires-Dist: httpx>=0.25
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
|
|
24
|
+
# reddit-mcp
|
|
25
|
+
|
|
26
|
+
Reddit MCP server — access Reddit data (posts, comments, search) through the MCP protocol.
|
|
27
|
+
|
|
28
|
+
## Quick Start
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
# 1. Full setup (OAuth login + Reddit cookies)
|
|
32
|
+
uvx reddit-mcp setup
|
|
33
|
+
|
|
34
|
+
# 2. Add to your AI client config (Claude Desktop, Cursor, etc.)
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
```json
|
|
38
|
+
{
|
|
39
|
+
"mcpServers": {
|
|
40
|
+
"reddit": {
|
|
41
|
+
"command": "uvx",
|
|
42
|
+
"args": ["reddit-mcp", "serve"],
|
|
43
|
+
"env": {
|
|
44
|
+
"REDDIT_MCP_TOKEN": "pat_xxx"
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Authentication
|
|
52
|
+
|
|
53
|
+
reddit-mcp uses two layers of authentication:
|
|
54
|
+
|
|
55
|
+
1. **PAT / OAuth token** — Controls access to the MCP service (shared account system with yt-mcp / x-mcp)
|
|
56
|
+
2. **Reddit cookies** — Used to fetch data from Reddit's JSON API
|
|
57
|
+
|
|
58
|
+
### PAT Setup (Simplest)
|
|
59
|
+
|
|
60
|
+
1. Visit https://mkterswingman.com/pat/login
|
|
61
|
+
2. Log in and generate a PAT
|
|
62
|
+
3. Set `REDDIT_MCP_TOKEN=pat_xxx` in your MCP config's `env`
|
|
63
|
+
4. Run `uvx reddit-mcp setup-cookies` to set up Reddit cookies
|
|
64
|
+
|
|
65
|
+
### OAuth Setup (Full)
|
|
66
|
+
|
|
67
|
+
```bash
|
|
68
|
+
uvx reddit-mcp setup
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
This runs the OAuth flow and cookie setup in one step.
|
|
72
|
+
|
|
73
|
+
## CLI Commands
|
|
74
|
+
|
|
75
|
+
| Command | Description |
|
|
76
|
+
|---------|-------------|
|
|
77
|
+
| `reddit-mcp serve` | Start MCP server (stdio transport) |
|
|
78
|
+
| `reddit-mcp setup` | Full setup: OAuth login + cookie setup |
|
|
79
|
+
| `reddit-mcp setup-cookies` | Cookie-only setup (already have a PAT) |
|
|
80
|
+
| `reddit-mcp update` | Check for updates and install latest version |
|
|
81
|
+
| `reddit-mcp check` | Check token & cookie status |
|
|
82
|
+
| `reddit-mcp version` | Show version |
|
|
83
|
+
|
|
84
|
+
## MCP Tools
|
|
85
|
+
|
|
86
|
+
### Data Tools
|
|
87
|
+
|
|
88
|
+
- **`get_subreddit_posts`** — Get posts from a subreddit (hot/new/top/rising, up to 100 per request)
|
|
89
|
+
- **`get_post_content`** — Get a post's content and comment tree
|
|
90
|
+
- **`search_reddit`** — Search Reddit posts (global or within a subreddit)
|
|
91
|
+
- **`get_subreddit_info`** — Get subreddit metadata (subscribers, description, etc.)
|
|
92
|
+
|
|
93
|
+
### Management Tools
|
|
94
|
+
|
|
95
|
+
- **`check_cookies`** — Check cookie validity, age, and username
|
|
96
|
+
- **`get_rate_limit_status`** — Check remaining API quota
|
|
97
|
+
|
|
98
|
+
## Rate Limits
|
|
99
|
+
|
|
100
|
+
Reddit allows 100 requests per 10-minute window. The client automatically:
|
|
101
|
+
- Reads `x-ratelimit-*` headers from responses
|
|
102
|
+
- Throttles when remaining quota < 20
|
|
103
|
+
- Retries on 429 (Too Many Requests)
|
|
104
|
+
|
|
105
|
+
For typical usage (1-10 requests per conversation), you'll never hit the limit.
|
|
106
|
+
|
|
107
|
+
## Cookie Management
|
|
108
|
+
|
|
109
|
+
Reddit cookies are stored at `~/.reddit-mcp/cookies.json` (permissions: 600).
|
|
110
|
+
|
|
111
|
+
- Cookies are obtained by launching Chrome and having you log in to Reddit
|
|
112
|
+
- They're stored locally and **never uploaded to any server**
|
|
113
|
+
- If cookies expire (403 errors), run `reddit-mcp setup-cookies` to refresh
|
|
114
|
+
- A warning is shown when cookies are > 7 days old
|
|
115
|
+
|
|
116
|
+
## Requirements
|
|
117
|
+
|
|
118
|
+
- Python 3.10+
|
|
119
|
+
- Chrome browser (for `setup` / `setup-cookies` commands)
|
|
120
|
+
- A valid PAT or OAuth token
|
|
121
|
+
|
|
122
|
+
## License
|
|
123
|
+
|
|
124
|
+
MIT
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
# reddit-mcp
|
|
2
|
+
|
|
3
|
+
Reddit MCP server — access Reddit data (posts, comments, search) through the MCP protocol.
|
|
4
|
+
|
|
5
|
+
## Quick Start
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
# 1. Full setup (OAuth login + Reddit cookies)
|
|
9
|
+
uvx reddit-mcp setup
|
|
10
|
+
|
|
11
|
+
# 2. Add to your AI client config (Claude Desktop, Cursor, etc.)
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
```json
|
|
15
|
+
{
|
|
16
|
+
"mcpServers": {
|
|
17
|
+
"reddit": {
|
|
18
|
+
"command": "uvx",
|
|
19
|
+
"args": ["reddit-mcp", "serve"],
|
|
20
|
+
"env": {
|
|
21
|
+
"REDDIT_MCP_TOKEN": "pat_xxx"
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Authentication
|
|
29
|
+
|
|
30
|
+
reddit-mcp uses two layers of authentication:
|
|
31
|
+
|
|
32
|
+
1. **PAT / OAuth token** — Controls access to the MCP service (shared account system with yt-mcp / x-mcp)
|
|
33
|
+
2. **Reddit cookies** — Used to fetch data from Reddit's JSON API
|
|
34
|
+
|
|
35
|
+
### PAT Setup (Simplest)
|
|
36
|
+
|
|
37
|
+
1. Visit https://mkterswingman.com/pat/login
|
|
38
|
+
2. Log in and generate a PAT
|
|
39
|
+
3. Set `REDDIT_MCP_TOKEN=pat_xxx` in your MCP config's `env`
|
|
40
|
+
4. Run `uvx reddit-mcp setup-cookies` to set up Reddit cookies
|
|
41
|
+
|
|
42
|
+
### OAuth Setup (Full)
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
uvx reddit-mcp setup
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
This runs the OAuth flow and cookie setup in one step.
|
|
49
|
+
|
|
50
|
+
## CLI Commands
|
|
51
|
+
|
|
52
|
+
| Command | Description |
|
|
53
|
+
|---------|-------------|
|
|
54
|
+
| `reddit-mcp serve` | Start MCP server (stdio transport) |
|
|
55
|
+
| `reddit-mcp setup` | Full setup: OAuth login + cookie setup |
|
|
56
|
+
| `reddit-mcp setup-cookies` | Cookie-only setup (already have a PAT) |
|
|
57
|
+
| `reddit-mcp update` | Check for updates and install latest version |
|
|
58
|
+
| `reddit-mcp check` | Check token & cookie status |
|
|
59
|
+
| `reddit-mcp version` | Show version |
|
|
60
|
+
|
|
61
|
+
## MCP Tools
|
|
62
|
+
|
|
63
|
+
### Data Tools
|
|
64
|
+
|
|
65
|
+
- **`get_subreddit_posts`** — Get posts from a subreddit (hot/new/top/rising, up to 100 per request)
|
|
66
|
+
- **`get_post_content`** — Get a post's content and comment tree
|
|
67
|
+
- **`search_reddit`** — Search Reddit posts (global or within a subreddit)
|
|
68
|
+
- **`get_subreddit_info`** — Get subreddit metadata (subscribers, description, etc.)
|
|
69
|
+
|
|
70
|
+
### Management Tools
|
|
71
|
+
|
|
72
|
+
- **`check_cookies`** — Check cookie validity, age, and username
|
|
73
|
+
- **`get_rate_limit_status`** — Check remaining API quota
|
|
74
|
+
|
|
75
|
+
## Rate Limits
|
|
76
|
+
|
|
77
|
+
Reddit allows 100 requests per 10-minute window. The client automatically:
|
|
78
|
+
- Reads `x-ratelimit-*` headers from responses
|
|
79
|
+
- Throttles when remaining quota < 20
|
|
80
|
+
- Retries on 429 (Too Many Requests)
|
|
81
|
+
|
|
82
|
+
For typical usage (1-10 requests per conversation), you'll never hit the limit.
|
|
83
|
+
|
|
84
|
+
## Cookie Management
|
|
85
|
+
|
|
86
|
+
Reddit cookies are stored at `~/.reddit-mcp/cookies.json` (permissions: 600).
|
|
87
|
+
|
|
88
|
+
- Cookies are obtained by launching Chrome and having you log in to Reddit
|
|
89
|
+
- They're stored locally and **never uploaded to any server**
|
|
90
|
+
- If cookies expire (403 errors), run `reddit-mcp setup-cookies` to refresh
|
|
91
|
+
- A warning is shown when cookies are > 7 days old
|
|
92
|
+
|
|
93
|
+
## Requirements
|
|
94
|
+
|
|
95
|
+
- Python 3.10+
|
|
96
|
+
- Chrome browser (for `setup` / `setup-cookies` commands)
|
|
97
|
+
- A valid PAT or OAuth token
|
|
98
|
+
|
|
99
|
+
## License
|
|
100
|
+
|
|
101
|
+
MIT
|
|
@@ -0,0 +1,409 @@
|
|
|
1
|
+
# Reddit MCP — API 字段参考
|
|
2
|
+
|
|
3
|
+
> 每个 MCP tool 返回的完整字段说明。**所有返回示例都是真实数据**(2026-03-28 从 Reddit 实际请求获取)。
|
|
4
|
+
|
|
5
|
+
## Tools 总览
|
|
6
|
+
|
|
7
|
+
| Tool | 用途 | 返回结构 |
|
|
8
|
+
|------|------|---------|
|
|
9
|
+
| `get_subreddit_posts` | 获取 subreddit 帖子列表 | `{ posts, after }` |
|
|
10
|
+
| `get_post_content` | 获取帖子内容 + 评论树 | `{ post, comments, has_more }` |
|
|
11
|
+
| `search_reddit` | 搜索帖子 | `{ results, after }` |
|
|
12
|
+
| `get_subreddit_info` | 获取 subreddit 信息 | `{ name, title, ... }` |
|
|
13
|
+
| `check_cookies` | 检查 cookie 状态 | `{ valid, age_days, ... }` |
|
|
14
|
+
| `get_rate_limit_status` | API 配额状态 | `{ remaining, reset_seconds, used }` |
|
|
15
|
+
|
|
16
|
+
---
|
|
17
|
+
|
|
18
|
+
## 1. `get_subreddit_posts`
|
|
19
|
+
|
|
20
|
+
获取某个 subreddit 的帖子列表。
|
|
21
|
+
|
|
22
|
+
**参数:**
|
|
23
|
+
|
|
24
|
+
| 参数 | 类型 | 默认值 | 说明 |
|
|
25
|
+
|------|------|--------|------|
|
|
26
|
+
| `subreddit` | string | (必填) | subreddit 名称,不带 `r/`。如 `"technology"` |
|
|
27
|
+
| `sort` | string | `"hot"` | 排序方式:`hot` / `new` / `top` / `rising` |
|
|
28
|
+
| `limit` | int | 25 | 返回数量,1-100 |
|
|
29
|
+
| `time_filter` | string | `"day"` | 仅 `sort=top` 时生效:`hour` / `day` / `week` / `month` / `year` / `all` |
|
|
30
|
+
| `after` | string | null | 分页游标,从上一次返回的 `after` 字段获取 |
|
|
31
|
+
|
|
32
|
+
**真实返回示例** — `get_subreddit_posts(subreddit="technology", sort="hot", limit=2)`:
|
|
33
|
+
|
|
34
|
+
```json
|
|
35
|
+
{
|
|
36
|
+
"posts": [
|
|
37
|
+
{
|
|
38
|
+
"id": "1s5yfpx",
|
|
39
|
+
"title": "L.A. Dodgers Tell 82-Year-Old, 50-Year Season Ticket Holder: 'Go Digital'—Or Don't Go At All",
|
|
40
|
+
"author": "-d1sc0nn3ct-",
|
|
41
|
+
"subreddit": "technology",
|
|
42
|
+
"score": 2912,
|
|
43
|
+
"upvote_ratio": 0.95,
|
|
44
|
+
"num_comments": 358,
|
|
45
|
+
"created_utc": 1774699207.0,
|
|
46
|
+
"created_time": "2026-03-28 20:00:07",
|
|
47
|
+
"link_url": "https://www.loscerritosnews.net/2026/03/25/l-a-dodgers-tell-82-year-old-50-year-season-ticket-holder-go-digital-or-dont-go-at-all/",
|
|
48
|
+
"reddit_url": "https://www.reddit.com/r/technology/comments/1s5yfpx/la_dodgers_tell_82yearold_50year_season_ticket/",
|
|
49
|
+
"body": "",
|
|
50
|
+
"is_text_post": false,
|
|
51
|
+
"tag": "Society",
|
|
52
|
+
"over_18": false
|
|
53
|
+
},
|
|
54
|
+
{
|
|
55
|
+
"id": "1s5tgiu",
|
|
56
|
+
"title": "The Shocking Speed of China's Scientific Rise",
|
|
57
|
+
"author": "straightdge",
|
|
58
|
+
"subreddit": "technology",
|
|
59
|
+
"score": 2813,
|
|
60
|
+
"upvote_ratio": 0.92,
|
|
61
|
+
"num_comments": 576,
|
|
62
|
+
"created_utc": 1774681330.0,
|
|
63
|
+
"created_time": "2026-03-28 15:02:10",
|
|
64
|
+
"link_url": "https://www.theatlantic.com/science/2026/03/china-science-superpower/686564/",
|
|
65
|
+
"reddit_url": "https://www.reddit.com/r/technology/comments/1s5tgiu/the_shocking_speed_of_chinas_scientific_rise/",
|
|
66
|
+
"body": "",
|
|
67
|
+
"is_text_post": false,
|
|
68
|
+
"tag": "Business",
|
|
69
|
+
"over_18": false
|
|
70
|
+
}
|
|
71
|
+
],
|
|
72
|
+
"after": "t3_1s5tgiu"
|
|
73
|
+
}
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
**字段说明:**
|
|
77
|
+
|
|
78
|
+
| 字段 | 说明 |
|
|
79
|
+
|------|------|
|
|
80
|
+
| `posts` | 帖子数组,结构见下方 [Post 对象](#post-对象) |
|
|
81
|
+
| `after` | 分页游标。传给下一次请求的 `after` 参数可翻下一页。`null` = 没有更多了 |
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
## 2. `get_post_content`
|
|
86
|
+
|
|
87
|
+
获取单个帖子的完整内容和评论树。
|
|
88
|
+
|
|
89
|
+
**参数:**
|
|
90
|
+
|
|
91
|
+
| 参数 | 类型 | 默认值 | 说明 |
|
|
92
|
+
|------|------|--------|------|
|
|
93
|
+
| `post_id` | string | (必填) | 帖子 ID(如 `"1s5yfpx"`)或完整 Reddit URL |
|
|
94
|
+
| `comment_limit` | int | 50 | 顶级评论数量,1-200 |
|
|
95
|
+
| `comment_depth` | int | 3 | 评论树最大深度,1-10 |
|
|
96
|
+
| `comment_sort` | string | `"best"` | 评论排序:`best` / `top` / `new` / `controversial` / `old` |
|
|
97
|
+
|
|
98
|
+
**真实返回示例** — `get_post_content(post_id="1s5yfpx", comment_limit=3, comment_sort="top")`:
|
|
99
|
+
|
|
100
|
+
```json
|
|
101
|
+
{
|
|
102
|
+
"post": {
|
|
103
|
+
"id": "1s5yfpx",
|
|
104
|
+
"title": "L.A. Dodgers Tell 82-Year-Old, 50-Year Season Ticket Holder: 'Go Digital'—Or Don't Go At All",
|
|
105
|
+
"author": "-d1sc0nn3ct-",
|
|
106
|
+
"subreddit": "technology",
|
|
107
|
+
"score": 2912,
|
|
108
|
+
"upvote_ratio": 0.95,
|
|
109
|
+
"num_comments": 358,
|
|
110
|
+
"created_utc": 1774699207.0,
|
|
111
|
+
"created_time": "2026-03-28 20:00:07",
|
|
112
|
+
"link_url": "https://www.loscerritosnews.net/2026/03/25/...",
|
|
113
|
+
"reddit_url": "https://www.reddit.com/r/technology/comments/1s5yfpx/...",
|
|
114
|
+
"body": "",
|
|
115
|
+
"is_text_post": false,
|
|
116
|
+
"tag": "Society",
|
|
117
|
+
"over_18": false
|
|
118
|
+
},
|
|
119
|
+
"comments": [
|
|
120
|
+
{
|
|
121
|
+
"id": "ocxzy0j",
|
|
122
|
+
"author": "Strange-Effort1305",
|
|
123
|
+
"body": "At least they are ruining baseball for their fans too",
|
|
124
|
+
"score": 2910,
|
|
125
|
+
"created_utc": 1774700602.0,
|
|
126
|
+
"created_time": "2026-03-28 20:23:22",
|
|
127
|
+
"depth": 0,
|
|
128
|
+
"replies": [
|
|
129
|
+
{ "kind": "more", "count": 16 }
|
|
130
|
+
]
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
"id": "ocy0raz",
|
|
134
|
+
"author": "Pr0ducer",
|
|
135
|
+
"body": "The term for what to do is called Grandfathering. Just let the old guy be the exception. WTF? Yo, Magic Johnson, you're a co-owner, do something about that.",
|
|
136
|
+
"score": 1191,
|
|
137
|
+
"created_utc": 1774700924.0,
|
|
138
|
+
"created_time": "2026-03-28 20:28:44",
|
|
139
|
+
"depth": 0,
|
|
140
|
+
"replies": [
|
|
141
|
+
{ "kind": "more", "count": 93 }
|
|
142
|
+
]
|
|
143
|
+
},
|
|
144
|
+
{ "kind": "more", "count": 168 }
|
|
145
|
+
],
|
|
146
|
+
"has_more": true
|
|
147
|
+
}
|
|
148
|
+
```
|
|
149
|
+
|
|
150
|
+
**字段说明:**
|
|
151
|
+
|
|
152
|
+
| 字段 | 说明 |
|
|
153
|
+
|------|------|
|
|
154
|
+
| `post` | 帖子内容,结构见 [Post 对象](#post-对象)。帖子被删除时为 `null` |
|
|
155
|
+
| `comments` | 评论数组,结构见 [Comment 对象](#comment-对象)。子评论嵌套在 `replies` 字段中 |
|
|
156
|
+
| `has_more` | `true` = 还有更多评论被截断了(看到数组末尾的 `"kind": "more"` 标记) |
|
|
157
|
+
|
|
158
|
+
---
|
|
159
|
+
|
|
160
|
+
## 3. `search_reddit`
|
|
161
|
+
|
|
162
|
+
全站或限定 subreddit 搜索帖子。
|
|
163
|
+
|
|
164
|
+
**参数:**
|
|
165
|
+
|
|
166
|
+
| 参数 | 类型 | 默认值 | 说明 |
|
|
167
|
+
|------|------|--------|------|
|
|
168
|
+
| `query` | string | (必填) | 搜索关键词 |
|
|
169
|
+
| `sort` | string | `"relevance"` | 排序:`relevance` / `new` / `hot` / `top` / `comments` |
|
|
170
|
+
| `time_filter` | string | `"all"` | 时间范围:`hour` / `day` / `week` / `month` / `year` / `all` |
|
|
171
|
+
| `limit` | int | 25 | 返回数量,1-100 |
|
|
172
|
+
| `subreddit` | string | null | 限定在某个 subreddit 内搜索。不传则全站搜索 |
|
|
173
|
+
| `after` | string | null | 分页游标 |
|
|
174
|
+
|
|
175
|
+
**真实返回示例** — `search_reddit(query="artificial intelligence", sort="top", time_filter="week", limit=2)`:
|
|
176
|
+
|
|
177
|
+
```json
|
|
178
|
+
{
|
|
179
|
+
"results": [
|
|
180
|
+
{
|
|
181
|
+
"id": "1s2p3sx",
|
|
182
|
+
"title": "Sora is dead. We're going to win",
|
|
183
|
+
"author": "2RINITY",
|
|
184
|
+
"subreddit": "antiai",
|
|
185
|
+
"score": 13341,
|
|
186
|
+
"upvote_ratio": 0.95,
|
|
187
|
+
"num_comments": 655,
|
|
188
|
+
"created_utc": 1774383400.0,
|
|
189
|
+
"created_time": "2026-03-26 08:16:40",
|
|
190
|
+
"link_url": "https://i.redd.it/ivnzmi9uw1rg1.jpeg",
|
|
191
|
+
"reddit_url": "https://www.reddit.com/r/antiai/comments/1s2p3sx/sora_is_dead_were_going_to_win/",
|
|
192
|
+
"body": "",
|
|
193
|
+
"is_text_post": false,
|
|
194
|
+
"tag": "AI News 🗞️ ",
|
|
195
|
+
"over_18": false
|
|
196
|
+
},
|
|
197
|
+
{
|
|
198
|
+
"id": "1s38ef8",
|
|
199
|
+
"title": "With context, this is the funniest image of the whole show.",
|
|
200
|
+
"author": "_theKataclysm_",
|
|
201
|
+
"subreddit": "TheDigitalCircus",
|
|
202
|
+
"score": 9048,
|
|
203
|
+
"upvote_ratio": 0.99,
|
|
204
|
+
"num_comments": 241,
|
|
205
|
+
"created_utc": 1774439150.0,
|
|
206
|
+
"created_time": "2026-03-26 23:45:50",
|
|
207
|
+
"link_url": "https://i.redd.it/ju5j89ami6rg1.jpeg",
|
|
208
|
+
"reddit_url": "https://www.reddit.com/r/TheDigitalCircus/comments/1s38ef8/...",
|
|
209
|
+
"body": "I've yet to see anyone else point out just how funny this whole bit is...",
|
|
210
|
+
"is_text_post": false,
|
|
211
|
+
"tag": "s:Censor1::Censor2:posts!",
|
|
212
|
+
"over_18": false
|
|
213
|
+
}
|
|
214
|
+
],
|
|
215
|
+
"after": "t3_1s38ef8"
|
|
216
|
+
}
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
**字段说明:**
|
|
220
|
+
|
|
221
|
+
| 字段 | 说明 |
|
|
222
|
+
|------|------|
|
|
223
|
+
| `results` | 搜索结果数组,结构同 [Post 对象](#post-对象)。注意搜索结果来自多个 subreddit |
|
|
224
|
+
| `after` | 分页游标 |
|
|
225
|
+
|
|
226
|
+
---
|
|
227
|
+
|
|
228
|
+
## 4. `get_subreddit_info`
|
|
229
|
+
|
|
230
|
+
获取 subreddit 的基本信息。
|
|
231
|
+
|
|
232
|
+
**参数:**
|
|
233
|
+
|
|
234
|
+
| 参数 | 类型 | 说明 |
|
|
235
|
+
|------|------|------|
|
|
236
|
+
| `subreddit` | string | subreddit 名称,如 `"technology"` |
|
|
237
|
+
|
|
238
|
+
**真实返回示例** — `get_subreddit_info(subreddit="technology")`:
|
|
239
|
+
|
|
240
|
+
```json
|
|
241
|
+
{
|
|
242
|
+
"name": "technology",
|
|
243
|
+
"title": "/r/Technology ",
|
|
244
|
+
"description": "Subreddit dedicated to the news and discussions about the creation and use of technology and its surrounding issues.",
|
|
245
|
+
"subscribers": 20204859,
|
|
246
|
+
"active_users": null,
|
|
247
|
+
"created_utc": 1201231675.0,
|
|
248
|
+
"created_time": "2008-01-25 11:27:55",
|
|
249
|
+
"over18": false,
|
|
250
|
+
"subreddit_type": "public"
|
|
251
|
+
}
|
|
252
|
+
```
|
|
253
|
+
|
|
254
|
+
**字段说明:**
|
|
255
|
+
|
|
256
|
+
| 字段 | 类型 | 说明 |
|
|
257
|
+
|------|------|------|
|
|
258
|
+
| `name` | string | subreddit 短名称 |
|
|
259
|
+
| `title` | string | subreddit 显示标题(版主自定义的) |
|
|
260
|
+
| `description` | string | 公开描述文字 |
|
|
261
|
+
| `subscribers` | int | 订阅人数。上面的例子:r/technology 有 2020 万订阅者 |
|
|
262
|
+
| `active_users` | int / null | 当前在线活跃用户。Reddit 有时不返回这个数据,为 `null` |
|
|
263
|
+
| `created_utc` | float | 社区创建时间(Unix 时间戳,秒) |
|
|
264
|
+
| `created_time` | string | 社区创建时间(本地时间,24 小时制)。`"2008-01-25 11:27:55"` = 2008 年 1 月 25 日上午 11 点 |
|
|
265
|
+
| `over18` | bool | 是否为 NSFW(成人内容)社区 |
|
|
266
|
+
| `subreddit_type` | string | 社区类型:`"public"`(公开)/ `"private"`(私密)/ `"restricted"`(受限) |
|
|
267
|
+
|
|
268
|
+
---
|
|
269
|
+
|
|
270
|
+
## 5. `check_cookies`
|
|
271
|
+
|
|
272
|
+
检查当前 Reddit cookie 的状态。无参数。
|
|
273
|
+
|
|
274
|
+
**真实返回示例:**
|
|
275
|
+
|
|
276
|
+
```json
|
|
277
|
+
{
|
|
278
|
+
"valid": true,
|
|
279
|
+
"age_days": 0.3,
|
|
280
|
+
"username": "Budget-Anxiety4023",
|
|
281
|
+
"warning": null
|
|
282
|
+
}
|
|
283
|
+
```
|
|
284
|
+
|
|
285
|
+
**字段说明:**
|
|
286
|
+
|
|
287
|
+
| 字段 | 类型 | 说明 |
|
|
288
|
+
|------|------|------|
|
|
289
|
+
| `valid` | bool | cookie 是否有效(能不能成功调 Reddit API) |
|
|
290
|
+
| `age_days` | float / null | cookie 创建了多少天。上面的 `0.3` 表示创建了约 7 小时 |
|
|
291
|
+
| `username` | string | 当前登录的 Reddit 用户名(仅 `valid=true` 时返回) |
|
|
292
|
+
| `warning` | string / null | 过期预警。cookie 快到期时会返回类似 `"⚠️ Reddit session cookie expires in 29 days..."` 的文案。正常时为 `null` |
|
|
293
|
+
|
|
294
|
+
---
|
|
295
|
+
|
|
296
|
+
## 6. `get_rate_limit_status`
|
|
297
|
+
|
|
298
|
+
获取 Reddit API 当前的请求配额状态。无参数。
|
|
299
|
+
|
|
300
|
+
**真实返回示例:**
|
|
301
|
+
|
|
302
|
+
```json
|
|
303
|
+
{
|
|
304
|
+
"remaining": 94.0,
|
|
305
|
+
"reset_seconds": 480,
|
|
306
|
+
"used": 6.0
|
|
307
|
+
}
|
|
308
|
+
```
|
|
309
|
+
|
|
310
|
+
**字段说明:**
|
|
311
|
+
|
|
312
|
+
| 字段 | 类型 | 说明 |
|
|
313
|
+
|------|------|------|
|
|
314
|
+
| `remaining` | float | 当前窗口内还能发多少次请求。Reddit 配额是 **每 10 分钟 100 次**。上面的 `94.0` 表示还剩 94 次 |
|
|
315
|
+
| `reset_seconds` | int | 多少秒后配额重置回 100。上面的 `480` 表示 8 分钟后重置 |
|
|
316
|
+
| `used` | float | 本窗口已用了多少次请求 |
|
|
317
|
+
|
|
318
|
+
---
|
|
319
|
+
|
|
320
|
+
## 公共数据结构
|
|
321
|
+
|
|
322
|
+
### Post 对象
|
|
323
|
+
|
|
324
|
+
每个帖子包含 15 个字段。以真实帖子为例:
|
|
325
|
+
|
|
326
|
+
```json
|
|
327
|
+
{
|
|
328
|
+
"id": "1s5yfpx",
|
|
329
|
+
"title": "L.A. Dodgers Tell 82-Year-Old, 50-Year Season Ticket Holder: 'Go Digital'—Or Don't Go At All",
|
|
330
|
+
"author": "-d1sc0nn3ct-",
|
|
331
|
+
"subreddit": "technology",
|
|
332
|
+
"score": 2912,
|
|
333
|
+
"upvote_ratio": 0.95,
|
|
334
|
+
"num_comments": 358,
|
|
335
|
+
"created_utc": 1774699207.0,
|
|
336
|
+
"created_time": "2026-03-28 20:00:07",
|
|
337
|
+
"link_url": "https://www.loscerritosnews.net/2026/03/25/...",
|
|
338
|
+
"reddit_url": "https://www.reddit.com/r/technology/comments/1s5yfpx/...",
|
|
339
|
+
"body": "",
|
|
340
|
+
"is_text_post": false,
|
|
341
|
+
"tag": "Society",
|
|
342
|
+
"over_18": false
|
|
343
|
+
}
|
|
344
|
+
```
|
|
345
|
+
|
|
346
|
+
| 字段 | 类型 | 说明 |
|
|
347
|
+
|------|------|------|
|
|
348
|
+
| `id` | string | 帖子唯一 ID。传给 `get_post_content` 可拿到完整内容和评论 |
|
|
349
|
+
| `title` | string | 帖子标题 |
|
|
350
|
+
| `author` | string | 发帖人的用户名 |
|
|
351
|
+
| `subreddit` | string | 所属社区名称 |
|
|
352
|
+
| `score` | int | **净投票分 = 点赞数 - 点踩数**。`2912` 表示这个帖子获得了 2912 净赞。Reddit 用这个排序帖子 |
|
|
353
|
+
| `upvote_ratio` | float | 点赞占总投票的比例。`0.95` = 95% 的人点了赞,5% 点了踩 |
|
|
354
|
+
| `num_comments` | int | 评论总数(包括所有层级的回复)。`358` = 这个帖子下有 358 条评论 |
|
|
355
|
+
| `created_utc` | float | 发帖时间,Unix 时间戳(秒)。保留此字段方便程序处理 |
|
|
356
|
+
| `created_time` | string | 发帖时间,客户端本地时间,24 小时制。`"2026-03-28 20:00:07"` = 今天晚上 8 点 |
|
|
357
|
+
| `link_url` | string | 帖子链接到的**目标地址**。链接帖指向外部文章(上面是一篇新闻),文字帖指向帖子自身 |
|
|
358
|
+
| `reddit_url` | string | 这个帖子**在 Reddit 上的地址**,点开是评论页 |
|
|
359
|
+
| `body` | string | 帖子正文(Markdown 格式)。链接帖为空字符串,文字帖有内容 |
|
|
360
|
+
| `is_text_post` | bool | `true` = 文字帖(用户自己写的内容),`false` = 链接帖(分享外部链接) |
|
|
361
|
+
| `tag` | string / null | 版主设置的分类标签。如 `"Society"`, `"Energy"`。没有标签时为 `null` |
|
|
362
|
+
| `over_18` | bool | 是否为 NSFW(成人内容) |
|
|
363
|
+
|
|
364
|
+
### Comment 对象
|
|
365
|
+
|
|
366
|
+
每条评论包含 8 个字段。以真实评论为例:
|
|
367
|
+
|
|
368
|
+
```json
|
|
369
|
+
{
|
|
370
|
+
"id": "ocxzy0j",
|
|
371
|
+
"author": "Strange-Effort1305",
|
|
372
|
+
"body": "At least they are ruining baseball for their fans too",
|
|
373
|
+
"score": 2910,
|
|
374
|
+
"created_utc": 1774700602.0,
|
|
375
|
+
"created_time": "2026-03-28 20:23:22",
|
|
376
|
+
"depth": 0,
|
|
377
|
+
"replies": [
|
|
378
|
+
{ "kind": "more", "count": 16 }
|
|
379
|
+
]
|
|
380
|
+
}
|
|
381
|
+
```
|
|
382
|
+
|
|
383
|
+
| 字段 | 类型 | 说明 |
|
|
384
|
+
|------|------|------|
|
|
385
|
+
| `id` | string | 评论唯一 ID |
|
|
386
|
+
| `author` | string | 评论者用户名 |
|
|
387
|
+
| `body` | string | 评论正文(Markdown 格式) |
|
|
388
|
+
| `score` | int | 净投票分,含义同 Post 的 score。这条评论获得了 2910 净赞 |
|
|
389
|
+
| `created_utc` | float | 评论时间,Unix 时间戳 |
|
|
390
|
+
| `created_time` | string | 评论时间,客户端本地时间,24 小时制 |
|
|
391
|
+
| `depth` | int | 嵌套层级。`0` = 顶级评论(直接回复帖子),`1` = 回复顶级评论,`2` = 回复的回复... |
|
|
392
|
+
| `replies` | Comment[] | 子评论数组,递归嵌套同样的结构。受 `comment_depth` 参数控制最大深度 |
|
|
393
|
+
|
|
394
|
+
### 截断标记(More 对象)
|
|
395
|
+
|
|
396
|
+
当评论太多被截断时,数组中会出现这种特殊对象:
|
|
397
|
+
|
|
398
|
+
```json
|
|
399
|
+
{ "kind": "more", "count": 168 }
|
|
400
|
+
```
|
|
401
|
+
|
|
402
|
+
| 字段 | 类型 | 说明 |
|
|
403
|
+
|------|------|------|
|
|
404
|
+
| `kind` | string | 固定为 `"more"`,表示后面还有评论没返回 |
|
|
405
|
+
| `count` | int | 被截断了多少条评论。`168` = 还有 168 条评论没展示 |
|
|
406
|
+
|
|
407
|
+
这个对象可以出现在两个位置:
|
|
408
|
+
- `comments` 数组末尾 — 还有更多顶级评论没返回
|
|
409
|
+
- 某条评论的 `replies` 数组中 — 这条评论下还有更多子回复没展开
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "5mghost-rover"
|
|
3
|
+
version = "0.0.1"
|
|
4
|
+
description = "Reddit MCP server — access Reddit data through MCP protocol"
|
|
5
|
+
requires-python = ">=3.10"
|
|
6
|
+
license = "MIT"
|
|
7
|
+
authors = [{ name = "mkterswingman" }]
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
keywords = ["mcp", "reddit", "ai", "llm"]
|
|
10
|
+
classifiers = [
|
|
11
|
+
"Development Status :: 4 - Beta",
|
|
12
|
+
"Intended Audience :: Developers",
|
|
13
|
+
"License :: OSI Approved :: MIT License",
|
|
14
|
+
"Programming Language :: Python :: 3",
|
|
15
|
+
"Programming Language :: Python :: 3.10",
|
|
16
|
+
"Programming Language :: Python :: 3.11",
|
|
17
|
+
"Programming Language :: Python :: 3.12",
|
|
18
|
+
"Programming Language :: Python :: 3.13",
|
|
19
|
+
]
|
|
20
|
+
dependencies = [
|
|
21
|
+
"fastmcp>=2.0",
|
|
22
|
+
"httpx>=0.25",
|
|
23
|
+
"curl_cffi>=0.7",
|
|
24
|
+
"browser-use>=0.12",
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
[project.scripts]
|
|
28
|
+
reddit-mcp = "reddit_mcp.cli:main"
|
|
29
|
+
|
|
30
|
+
[project.urls]
|
|
31
|
+
Homepage = "https://github.com/mkterswingman/mcp_projects"
|
|
32
|
+
|
|
33
|
+
[build-system]
|
|
34
|
+
requires = ["hatchling"]
|
|
35
|
+
build-backend = "hatchling.build"
|
|
36
|
+
|
|
37
|
+
[tool.hatch.build.targets.wheel]
|
|
38
|
+
packages = ["src/reddit_mcp"]
|