arionxiv 1.0.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arionxiv/__init__.py +40 -0
- arionxiv/__main__.py +10 -0
- arionxiv/arxiv_operations/__init__.py +0 -0
- arionxiv/arxiv_operations/client.py +225 -0
- arionxiv/arxiv_operations/fetcher.py +173 -0
- arionxiv/arxiv_operations/searcher.py +122 -0
- arionxiv/arxiv_operations/utils.py +293 -0
- arionxiv/cli/__init__.py +4 -0
- arionxiv/cli/commands/__init__.py +1 -0
- arionxiv/cli/commands/analyze.py +587 -0
- arionxiv/cli/commands/auth.py +365 -0
- arionxiv/cli/commands/chat.py +714 -0
- arionxiv/cli/commands/daily.py +482 -0
- arionxiv/cli/commands/fetch.py +217 -0
- arionxiv/cli/commands/library.py +295 -0
- arionxiv/cli/commands/preferences.py +426 -0
- arionxiv/cli/commands/search.py +254 -0
- arionxiv/cli/commands/settings_unified.py +1407 -0
- arionxiv/cli/commands/trending.py +41 -0
- arionxiv/cli/commands/welcome.py +168 -0
- arionxiv/cli/main.py +407 -0
- arionxiv/cli/ui/__init__.py +1 -0
- arionxiv/cli/ui/global_theme_manager.py +173 -0
- arionxiv/cli/ui/logo.py +127 -0
- arionxiv/cli/ui/splash.py +89 -0
- arionxiv/cli/ui/theme.py +32 -0
- arionxiv/cli/ui/theme_system.py +391 -0
- arionxiv/cli/utils/__init__.py +54 -0
- arionxiv/cli/utils/animations.py +522 -0
- arionxiv/cli/utils/api_client.py +583 -0
- arionxiv/cli/utils/api_config.py +505 -0
- arionxiv/cli/utils/command_suggestions.py +147 -0
- arionxiv/cli/utils/db_config_manager.py +254 -0
- arionxiv/github_actions_runner.py +206 -0
- arionxiv/main.py +23 -0
- arionxiv/prompts/__init__.py +9 -0
- arionxiv/prompts/prompts.py +247 -0
- arionxiv/rag_techniques/__init__.py +8 -0
- arionxiv/rag_techniques/basic_rag.py +1531 -0
- arionxiv/scheduler_daemon.py +139 -0
- arionxiv/server.py +1000 -0
- arionxiv/server_main.py +24 -0
- arionxiv/services/__init__.py +73 -0
- arionxiv/services/llm_client.py +30 -0
- arionxiv/services/llm_inference/__init__.py +58 -0
- arionxiv/services/llm_inference/groq_client.py +469 -0
- arionxiv/services/llm_inference/llm_utils.py +250 -0
- arionxiv/services/llm_inference/openrouter_client.py +564 -0
- arionxiv/services/unified_analysis_service.py +872 -0
- arionxiv/services/unified_auth_service.py +457 -0
- arionxiv/services/unified_config_service.py +456 -0
- arionxiv/services/unified_daily_dose_service.py +823 -0
- arionxiv/services/unified_database_service.py +1633 -0
- arionxiv/services/unified_llm_service.py +366 -0
- arionxiv/services/unified_paper_service.py +604 -0
- arionxiv/services/unified_pdf_service.py +522 -0
- arionxiv/services/unified_prompt_service.py +344 -0
- arionxiv/services/unified_scheduler_service.py +589 -0
- arionxiv/services/unified_user_service.py +954 -0
- arionxiv/utils/__init__.py +51 -0
- arionxiv/utils/api_helpers.py +200 -0
- arionxiv/utils/file_cleanup.py +150 -0
- arionxiv/utils/ip_helper.py +96 -0
- arionxiv-1.0.32.dist-info/METADATA +336 -0
- arionxiv-1.0.32.dist-info/RECORD +69 -0
- arionxiv-1.0.32.dist-info/WHEEL +5 -0
- arionxiv-1.0.32.dist-info/entry_points.txt +4 -0
- arionxiv-1.0.32.dist-info/licenses/LICENSE +21 -0
- arionxiv-1.0.32.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utility functions for ArionXiv
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .ip_helper import get_public_ip, display_ip_whitelist_help, check_mongodb_connection_error
|
|
6
|
+
from .file_cleanup import file_cleanup_manager, FileCleanupManager
|
|
7
|
+
from .api_helpers import (
|
|
8
|
+
RegisterRequest,
|
|
9
|
+
LoginRequest,
|
|
10
|
+
RefreshTokenRequest,
|
|
11
|
+
ChatMessageRequest,
|
|
12
|
+
ChatSessionRequest,
|
|
13
|
+
LibraryAddRequest,
|
|
14
|
+
LibraryUpdateRequest,
|
|
15
|
+
PaperSearchRequest,
|
|
16
|
+
APIResponse,
|
|
17
|
+
AuthResponse,
|
|
18
|
+
PaperListResponse,
|
|
19
|
+
ChatResponse,
|
|
20
|
+
create_error_response,
|
|
21
|
+
handle_service_error,
|
|
22
|
+
sanitize_arxiv_id,
|
|
23
|
+
format_user_response,
|
|
24
|
+
paginate_results
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
'get_public_ip',
|
|
29
|
+
'display_ip_whitelist_help',
|
|
30
|
+
'check_mongodb_connection_error',
|
|
31
|
+
'file_cleanup_manager',
|
|
32
|
+
'FileCleanupManager',
|
|
33
|
+
# API helpers
|
|
34
|
+
'RegisterRequest',
|
|
35
|
+
'LoginRequest',
|
|
36
|
+
'RefreshTokenRequest',
|
|
37
|
+
'ChatMessageRequest',
|
|
38
|
+
'ChatSessionRequest',
|
|
39
|
+
'LibraryAddRequest',
|
|
40
|
+
'LibraryUpdateRequest',
|
|
41
|
+
'PaperSearchRequest',
|
|
42
|
+
'APIResponse',
|
|
43
|
+
'AuthResponse',
|
|
44
|
+
'PaperListResponse',
|
|
45
|
+
'ChatResponse',
|
|
46
|
+
'create_error_response',
|
|
47
|
+
'handle_service_error',
|
|
48
|
+
'sanitize_arxiv_id',
|
|
49
|
+
'format_user_response',
|
|
50
|
+
'paginate_results'
|
|
51
|
+
]
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"""
|
|
2
|
+
API Helper utilities for ArionXiv server
|
|
3
|
+
Shared response models, error handlers, and common API utilities
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, Any, Optional, List
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pydantic import BaseModel, Field, field_validator
|
|
9
|
+
from fastapi import HTTPException
|
|
10
|
+
import logging
|
|
11
|
+
import re
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# =============================================================================
|
|
17
|
+
# REQUEST MODELS
|
|
18
|
+
# =============================================================================
|
|
19
|
+
|
|
20
|
+
class RegisterRequest(BaseModel):
|
|
21
|
+
"""User registration request model"""
|
|
22
|
+
email: str = Field(..., description="User email address")
|
|
23
|
+
user_name: str = Field(..., min_length=3, max_length=32, description="Username")
|
|
24
|
+
password: str = Field(..., min_length=8, description="Password")
|
|
25
|
+
full_name: Optional[str] = Field(default="", description="Full name")
|
|
26
|
+
|
|
27
|
+
@field_validator('email')
|
|
28
|
+
@classmethod
|
|
29
|
+
def validate_email(cls, v: str) -> str:
|
|
30
|
+
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
|
31
|
+
if not re.match(pattern, v):
|
|
32
|
+
raise ValueError('Invalid email format')
|
|
33
|
+
return v.lower().strip()
|
|
34
|
+
|
|
35
|
+
@field_validator('user_name')
|
|
36
|
+
@classmethod
|
|
37
|
+
def validate_username(cls, v: str) -> str:
|
|
38
|
+
v = v.strip().lower()
|
|
39
|
+
if not re.match(r'^[a-z0-9._-]+$', v):
|
|
40
|
+
raise ValueError('Username can only contain lowercase letters, numbers, dot, underscore, or hyphen')
|
|
41
|
+
return v
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class LoginRequest(BaseModel):
|
|
45
|
+
"""User login request model"""
|
|
46
|
+
identifier: str = Field(..., description="Email or username")
|
|
47
|
+
password: str = Field(..., description="Password")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class RefreshTokenRequest(BaseModel):
|
|
51
|
+
"""Token refresh request model"""
|
|
52
|
+
token: str = Field(..., description="Current JWT token to refresh")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class ChatMessageRequest(BaseModel):
|
|
56
|
+
"""Chat message request model"""
|
|
57
|
+
message: str = Field(..., min_length=1, description="User message")
|
|
58
|
+
paper_id: str = Field(..., description="ArXiv paper ID for context")
|
|
59
|
+
session_id: Optional[str] = Field(default=None, description="Existing chat session ID")
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class ChatSessionRequest(BaseModel):
|
|
63
|
+
"""Create chat session request model"""
|
|
64
|
+
paper_id: str = Field(..., description="ArXiv paper ID")
|
|
65
|
+
title: Optional[str] = Field(default=None, description="Session title")
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class LibraryAddRequest(BaseModel):
|
|
69
|
+
"""Add paper to library request model"""
|
|
70
|
+
arxiv_id: str = Field(..., description="ArXiv paper ID")
|
|
71
|
+
tags: Optional[List[str]] = Field(default=None, description="Tags for the paper")
|
|
72
|
+
notes: Optional[str] = Field(default=None, description="Personal notes")
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class LibraryUpdateRequest(BaseModel):
|
|
76
|
+
"""Update library paper request model"""
|
|
77
|
+
tags: Optional[List[str]] = Field(default=None, description="Updated tags")
|
|
78
|
+
notes: Optional[str] = Field(default=None, description="Updated notes")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class PaperSearchRequest(BaseModel):
|
|
82
|
+
"""Paper search request model"""
|
|
83
|
+
query: str = Field(..., min_length=1, description="Search query")
|
|
84
|
+
max_results: int = Field(default=10, ge=1, le=100, description="Max results")
|
|
85
|
+
category: Optional[str] = Field(default=None, description="ArXiv category filter")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# =============================================================================
|
|
89
|
+
# RESPONSE MODELS
|
|
90
|
+
# =============================================================================
|
|
91
|
+
|
|
92
|
+
class APIResponse(BaseModel):
|
|
93
|
+
"""Standard API response wrapper"""
|
|
94
|
+
success: bool
|
|
95
|
+
message: Optional[str] = None
|
|
96
|
+
data: Optional[Dict[str, Any]] = None
|
|
97
|
+
error: Optional[str] = None
|
|
98
|
+
timestamp: str = Field(default_factory=lambda: datetime.utcnow().isoformat())
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class AuthResponse(BaseModel):
|
|
102
|
+
"""Authentication response model"""
|
|
103
|
+
success: bool
|
|
104
|
+
message: Optional[str] = None
|
|
105
|
+
user: Optional[Dict[str, Any]] = None
|
|
106
|
+
token: Optional[str] = None
|
|
107
|
+
error: Optional[str] = None
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class PaperListResponse(BaseModel):
|
|
111
|
+
"""Paper list response model"""
|
|
112
|
+
papers: List[Dict[str, Any]]
|
|
113
|
+
count: int
|
|
114
|
+
total: Optional[int] = None
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class ChatResponse(BaseModel):
|
|
118
|
+
"""Chat response model"""
|
|
119
|
+
response: str
|
|
120
|
+
session_id: str
|
|
121
|
+
sources: Optional[List[Dict[str, Any]]] = None
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
# =============================================================================
|
|
125
|
+
# ERROR HANDLING
|
|
126
|
+
# =============================================================================
|
|
127
|
+
|
|
128
|
+
def create_error_response(
|
|
129
|
+
status_code: int,
|
|
130
|
+
detail: str,
|
|
131
|
+
error_type: str = "APIError"
|
|
132
|
+
) -> HTTPException:
|
|
133
|
+
"""Create standardized HTTP exception"""
|
|
134
|
+
logger.error(f"{error_type}: {detail}")
|
|
135
|
+
return HTTPException(
|
|
136
|
+
status_code=status_code,
|
|
137
|
+
detail={
|
|
138
|
+
"error": error_type,
|
|
139
|
+
"message": detail,
|
|
140
|
+
"timestamp": datetime.utcnow().isoformat()
|
|
141
|
+
}
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def handle_service_error(result: Dict[str, Any], operation: str) -> None:
|
|
146
|
+
"""Handle service layer errors and raise appropriate HTTP exceptions"""
|
|
147
|
+
if not result.get("success", False):
|
|
148
|
+
error_msg = result.get("error") or result.get("message") or f"{operation} failed"
|
|
149
|
+
logger.error(f"{operation} failed: {error_msg}")
|
|
150
|
+
|
|
151
|
+
# Map common errors to status codes
|
|
152
|
+
error_lower = error_msg.lower()
|
|
153
|
+
if "not found" in error_lower:
|
|
154
|
+
raise create_error_response(404, error_msg, "NotFoundError")
|
|
155
|
+
elif "already exists" in error_lower or "already taken" in error_lower:
|
|
156
|
+
raise create_error_response(409, error_msg, "ConflictError")
|
|
157
|
+
elif "invalid" in error_lower or "required" in error_lower:
|
|
158
|
+
raise create_error_response(400, error_msg, "ValidationError")
|
|
159
|
+
elif "unauthorized" in error_lower or "authentication" in error_lower:
|
|
160
|
+
raise create_error_response(401, error_msg, "AuthenticationError")
|
|
161
|
+
else:
|
|
162
|
+
raise create_error_response(500, error_msg, "InternalError")
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# =============================================================================
|
|
166
|
+
# UTILITY FUNCTIONS
|
|
167
|
+
# =============================================================================
|
|
168
|
+
|
|
169
|
+
# Import sanitize_arxiv_id from the consolidated llm_utils module
|
|
170
|
+
from ..services.llm_inference.llm_utils import sanitize_arxiv_id
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def format_user_response(user: Dict[str, Any]) -> Dict[str, Any]:
|
|
174
|
+
"""Format user data for API response (remove sensitive fields)"""
|
|
175
|
+
return {
|
|
176
|
+
"id": str(user.get("_id", user.get("id", ""))),
|
|
177
|
+
"email": user.get("email", ""),
|
|
178
|
+
"user_name": user.get("user_name") or user.get("username", ""),
|
|
179
|
+
"full_name": user.get("full_name", ""),
|
|
180
|
+
"created_at": user.get("created_at", ""),
|
|
181
|
+
"last_login": user.get("last_login", "")
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def paginate_results(
|
|
186
|
+
items: List[Any],
|
|
187
|
+
skip: int = 0,
|
|
188
|
+
limit: int = 20
|
|
189
|
+
) -> Dict[str, Any]:
|
|
190
|
+
"""Paginate a list of items"""
|
|
191
|
+
total = len(items)
|
|
192
|
+
paginated = items[skip:skip + limit]
|
|
193
|
+
return {
|
|
194
|
+
"items": paginated,
|
|
195
|
+
"count": len(paginated),
|
|
196
|
+
"total": total,
|
|
197
|
+
"skip": skip,
|
|
198
|
+
"limit": limit,
|
|
199
|
+
"has_more": skip + limit < total
|
|
200
|
+
}
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File Cleanup Utility for ArionXiv
|
|
3
|
+
Manages temporary paper downloads and cleanup after usage
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import List, Optional
|
|
9
|
+
import logging
|
|
10
|
+
|
|
11
|
+
from ..arxiv_operations.utils import ArxivUtils
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
class FileCleanupManager:
|
|
16
|
+
"""Manages cleanup of downloaded paper files"""
|
|
17
|
+
|
|
18
|
+
def __init__(self):
|
|
19
|
+
# Get the downloads directory
|
|
20
|
+
self.downloads_dir = self._get_downloads_dir()
|
|
21
|
+
|
|
22
|
+
def _get_downloads_dir(self) -> Path:
|
|
23
|
+
"""Get the downloads directory path"""
|
|
24
|
+
# Default to project root downloads directory
|
|
25
|
+
project_root = Path(__file__).parent.parent.parent
|
|
26
|
+
downloads_dir = project_root / "downloads"
|
|
27
|
+
downloads_dir.mkdir(exist_ok=True)
|
|
28
|
+
return downloads_dir
|
|
29
|
+
|
|
30
|
+
def cleanup_paper_files(self, paper_id: str) -> bool:
|
|
31
|
+
"""
|
|
32
|
+
Delete all files associated with a paper ID
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
paper_id: ArXiv paper ID (e.g., "1706.03762")
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
bool: True if cleanup was successful
|
|
39
|
+
"""
|
|
40
|
+
try:
|
|
41
|
+
# Clean version IDs from paper_id using normalized function
|
|
42
|
+
clean_id = ArxivUtils.normalize_arxiv_id(paper_id)
|
|
43
|
+
|
|
44
|
+
deleted_count = 0
|
|
45
|
+
|
|
46
|
+
# Find all files matching the paper ID pattern
|
|
47
|
+
patterns = [
|
|
48
|
+
f"{clean_id}*.pdf",
|
|
49
|
+
f"{clean_id}*.txt",
|
|
50
|
+
f"*{clean_id}*.pdf",
|
|
51
|
+
f"*{clean_id}*.txt"
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
for pattern in patterns:
|
|
55
|
+
matching_files = list(self.downloads_dir.glob(pattern))
|
|
56
|
+
for file_path in matching_files:
|
|
57
|
+
try:
|
|
58
|
+
file_path.unlink()
|
|
59
|
+
deleted_count += 1
|
|
60
|
+
logger.info(f"Deleted file: {file_path.name}")
|
|
61
|
+
except Exception as e:
|
|
62
|
+
logger.warning(f"Failed to delete {file_path.name}: {e}")
|
|
63
|
+
|
|
64
|
+
if deleted_count > 0:
|
|
65
|
+
logger.info(f"Cleaned up {deleted_count} files for paper {paper_id}")
|
|
66
|
+
|
|
67
|
+
return True
|
|
68
|
+
|
|
69
|
+
except Exception as e:
|
|
70
|
+
logger.error(f"Failed to cleanup files for paper {paper_id}: {e}")
|
|
71
|
+
return False
|
|
72
|
+
|
|
73
|
+
def cleanup_multiple_papers(self, paper_ids: List[str]) -> int:
|
|
74
|
+
"""
|
|
75
|
+
Cleanup files for multiple papers
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
paper_ids: List of paper IDs to cleanup
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
int: Number of papers successfully cleaned up
|
|
82
|
+
"""
|
|
83
|
+
success_count = 0
|
|
84
|
+
for paper_id in paper_ids:
|
|
85
|
+
if self.cleanup_paper_files(paper_id):
|
|
86
|
+
success_count += 1
|
|
87
|
+
|
|
88
|
+
return success_count
|
|
89
|
+
|
|
90
|
+
def get_paper_files(self, paper_id: str) -> List[Path]:
|
|
91
|
+
"""
|
|
92
|
+
Get all files associated with a paper ID
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
paper_id: ArXiv paper ID
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
List[Path]: List of file paths for the paper
|
|
99
|
+
"""
|
|
100
|
+
try:
|
|
101
|
+
# Clean version IDs from paper_id using normalized function
|
|
102
|
+
clean_id = ArxivUtils.normalize_arxiv_id(paper_id)
|
|
103
|
+
|
|
104
|
+
files = []
|
|
105
|
+
|
|
106
|
+
# Find all files matching the paper ID pattern
|
|
107
|
+
patterns = [
|
|
108
|
+
f"{clean_id}*.pdf",
|
|
109
|
+
f"{clean_id}*.txt",
|
|
110
|
+
f"*{clean_id}*.pdf",
|
|
111
|
+
f"*{clean_id}*.txt"
|
|
112
|
+
]
|
|
113
|
+
|
|
114
|
+
for pattern in patterns:
|
|
115
|
+
matching_files = list(self.downloads_dir.glob(pattern))
|
|
116
|
+
files.extend(matching_files)
|
|
117
|
+
|
|
118
|
+
return files
|
|
119
|
+
|
|
120
|
+
except Exception as e:
|
|
121
|
+
logger.error(f"Failed to get files for paper {paper_id}: {e}")
|
|
122
|
+
return []
|
|
123
|
+
|
|
124
|
+
def cleanup_all_downloads(self) -> int:
|
|
125
|
+
"""
|
|
126
|
+
Clean up all downloaded files
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
int: Number of files deleted
|
|
130
|
+
"""
|
|
131
|
+
try:
|
|
132
|
+
deleted_count = 0
|
|
133
|
+
|
|
134
|
+
for file_path in self.downloads_dir.glob("*"):
|
|
135
|
+
if file_path.is_file() and file_path.suffix in ['.pdf', '.txt']:
|
|
136
|
+
try:
|
|
137
|
+
file_path.unlink()
|
|
138
|
+
deleted_count += 1
|
|
139
|
+
except Exception as e:
|
|
140
|
+
logger.warning(f"Failed to delete {file_path.name}: {e}")
|
|
141
|
+
|
|
142
|
+
logger.info(f"Cleaned up {deleted_count} total files")
|
|
143
|
+
return deleted_count
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
logger.error(f"Failed to cleanup all downloads: {e}")
|
|
147
|
+
return 0
|
|
148
|
+
|
|
149
|
+
# Global instance
|
|
150
|
+
file_cleanup_manager = FileCleanupManager()
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"""
|
|
2
|
+
IP Detection Helper for MongoDB Atlas Connection Issues
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
import logging
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_public_ip():
|
|
12
|
+
"""Get current public IP address"""
|
|
13
|
+
try:
|
|
14
|
+
response = requests.get('https://api.ipify.org?format=json', timeout=5)
|
|
15
|
+
ip = response.json().get('ip')
|
|
16
|
+
return ip
|
|
17
|
+
except Exception as e:
|
|
18
|
+
logger.warning(f"Failed to get public IP via ipify: {e}")
|
|
19
|
+
|
|
20
|
+
# Fallback methods
|
|
21
|
+
fallback_services = [
|
|
22
|
+
'https://icanhazip.com',
|
|
23
|
+
'https://ident.me',
|
|
24
|
+
'https://ipecho.net/plain'
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
for service in fallback_services:
|
|
28
|
+
try:
|
|
29
|
+
response = requests.get(service, timeout=5)
|
|
30
|
+
ip = response.text.strip()
|
|
31
|
+
if ip:
|
|
32
|
+
return ip
|
|
33
|
+
except Exception:
|
|
34
|
+
continue
|
|
35
|
+
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def display_ip_whitelist_help(current_ip=None):
|
|
40
|
+
"""Display helpful message about IP whitelisting"""
|
|
41
|
+
if current_ip is None:
|
|
42
|
+
current_ip = get_public_ip()
|
|
43
|
+
|
|
44
|
+
message = "\n" + "="*70 + "\n"
|
|
45
|
+
message += "MongoDB Atlas IP Whitelisting Issue Detected\n"
|
|
46
|
+
message += "="*70 + "\n"
|
|
47
|
+
|
|
48
|
+
if current_ip:
|
|
49
|
+
message += f"\nYour current public IP: {current_ip}\n"
|
|
50
|
+
else:
|
|
51
|
+
message += "\nCould not detect your current IP automatically.\n"
|
|
52
|
+
|
|
53
|
+
message += "\nSolutions:\n"
|
|
54
|
+
message += "\n1. FOR DEVELOPMENT (Recommended):"
|
|
55
|
+
message += "\n - Go to MongoDB Atlas Dashboard"
|
|
56
|
+
message += "\n - Network Access > IP Access List"
|
|
57
|
+
message += "\n - Click 'Add IP Address'"
|
|
58
|
+
message += "\n - Select 'Allow Access from Anywhere' (0.0.0.0/0)"
|
|
59
|
+
message += "\n - This allows all IPs and solves your issue permanently for dev"
|
|
60
|
+
|
|
61
|
+
message += "\n\n2. FOR PRODUCTION (More Secure):"
|
|
62
|
+
if current_ip:
|
|
63
|
+
message += f"\n - Add your current IP: {current_ip}"
|
|
64
|
+
message += "\n - Use a static IP or VPN"
|
|
65
|
+
message += "\n - Configure IP ranges for your infrastructure"
|
|
66
|
+
|
|
67
|
+
message += "\n\n3. ALTERNATIVE: Use MongoDB Connection String with srv+mongodb://"
|
|
68
|
+
message += "\n - Some ISPs work better with direct connections"
|
|
69
|
+
|
|
70
|
+
message += "\n\nMongoDB Atlas Dashboard:"
|
|
71
|
+
message += "\n https://cloud.mongodb.com/"
|
|
72
|
+
|
|
73
|
+
message += "\n\n" + "="*70 + "\n"
|
|
74
|
+
|
|
75
|
+
logger.warning(message)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def check_mongodb_connection_error(error_message):
|
|
79
|
+
"""Check if error is related to IP whitelisting and provide help"""
|
|
80
|
+
ip_whitelist_indicators = [
|
|
81
|
+
"not authorized",
|
|
82
|
+
"IP address is not whitelisted",
|
|
83
|
+
"connection refused",
|
|
84
|
+
"timed out",
|
|
85
|
+
"network access",
|
|
86
|
+
"authentication failed"
|
|
87
|
+
]
|
|
88
|
+
|
|
89
|
+
error_lower = str(error_message).lower()
|
|
90
|
+
|
|
91
|
+
for indicator in ip_whitelist_indicators:
|
|
92
|
+
if indicator in error_lower:
|
|
93
|
+
display_ip_whitelist_help()
|
|
94
|
+
return True
|
|
95
|
+
|
|
96
|
+
return False
|