pomera-ai-commander 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +680 -0
- package/bin/pomera-ai-commander.js +62 -0
- package/core/__init__.py +66 -0
- package/core/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/__pycache__/app_context.cpython-313.pyc +0 -0
- package/core/__pycache__/async_text_processor.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/backup_recovery_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/content_hash_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/context_menu.cpython-313.pyc +0 -0
- package/core/__pycache__/data_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/database_connection_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_curl_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_promera_ai_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema.cpython-313.pyc +0 -0
- package/core/__pycache__/database_schema_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/database_settings_manager_interface.cpython-313.pyc +0 -0
- package/core/__pycache__/dialog_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/efficient_line_numbers.cpython-313.pyc +0 -0
- package/core/__pycache__/error_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/error_service.cpython-313.pyc +0 -0
- package/core/__pycache__/event_consolidator.cpython-313.pyc +0 -0
- package/core/__pycache__/memory_efficient_text_widget.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_test_suite.cpython-313.pyc +0 -0
- package/core/__pycache__/migration_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_find_replace.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_pattern_engine.cpython-313.pyc +0 -0
- package/core/__pycache__/optimized_search_highlighter.cpython-313.pyc +0 -0
- package/core/__pycache__/performance_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/persistence_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/progressive_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_cache.cpython-313.pyc +0 -0
- package/core/__pycache__/regex_pattern_library.cpython-313.pyc +0 -0
- package/core/__pycache__/search_operation_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_defaults_registry.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_integrity_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_serializer.cpython-313.pyc +0 -0
- package/core/__pycache__/settings_validator.cpython-313.pyc +0 -0
- package/core/__pycache__/smart_stats_calculator.cpython-313.pyc +0 -0
- package/core/__pycache__/statistics_update_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/stats_config_manager.cpython-313.pyc +0 -0
- package/core/__pycache__/streaming_text_handler.cpython-313.pyc +0 -0
- package/core/__pycache__/task_scheduler.cpython-313.pyc +0 -0
- package/core/__pycache__/visibility_monitor.cpython-313.pyc +0 -0
- package/core/__pycache__/widget_cache.cpython-313.pyc +0 -0
- package/core/app_context.py +482 -0
- package/core/async_text_processor.py +422 -0
- package/core/backup_manager.py +656 -0
- package/core/backup_recovery_manager.py +1034 -0
- package/core/content_hash_cache.py +509 -0
- package/core/context_menu.py +313 -0
- package/core/data_validator.py +1067 -0
- package/core/database_connection_manager.py +745 -0
- package/core/database_curl_settings_manager.py +609 -0
- package/core/database_promera_ai_settings_manager.py +447 -0
- package/core/database_schema.py +412 -0
- package/core/database_schema_manager.py +396 -0
- package/core/database_settings_manager.py +1508 -0
- package/core/database_settings_manager_interface.py +457 -0
- package/core/dialog_manager.py +735 -0
- package/core/efficient_line_numbers.py +511 -0
- package/core/error_handler.py +747 -0
- package/core/error_service.py +431 -0
- package/core/event_consolidator.py +512 -0
- package/core/mcp/__init__.py +43 -0
- package/core/mcp/__pycache__/__init__.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/protocol.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/schema.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/server_stdio.cpython-313.pyc +0 -0
- package/core/mcp/__pycache__/tool_registry.cpython-313.pyc +0 -0
- package/core/mcp/protocol.py +288 -0
- package/core/mcp/schema.py +251 -0
- package/core/mcp/server_stdio.py +299 -0
- package/core/mcp/tool_registry.py +2345 -0
- package/core/memory_efficient_text_widget.py +712 -0
- package/core/migration_manager.py +915 -0
- package/core/migration_test_suite.py +1086 -0
- package/core/migration_validator.py +1144 -0
- package/core/optimized_find_replace.py +715 -0
- package/core/optimized_pattern_engine.py +424 -0
- package/core/optimized_search_highlighter.py +553 -0
- package/core/performance_monitor.py +675 -0
- package/core/persistence_manager.py +713 -0
- package/core/progressive_stats_calculator.py +632 -0
- package/core/regex_pattern_cache.py +530 -0
- package/core/regex_pattern_library.py +351 -0
- package/core/search_operation_manager.py +435 -0
- package/core/settings_defaults_registry.py +1087 -0
- package/core/settings_integrity_validator.py +1112 -0
- package/core/settings_serializer.py +558 -0
- package/core/settings_validator.py +1824 -0
- package/core/smart_stats_calculator.py +710 -0
- package/core/statistics_update_manager.py +619 -0
- package/core/stats_config_manager.py +858 -0
- package/core/streaming_text_handler.py +723 -0
- package/core/task_scheduler.py +596 -0
- package/core/update_pattern_library.py +169 -0
- package/core/visibility_monitor.py +596 -0
- package/core/widget_cache.py +498 -0
- package/mcp.json +61 -0
- package/package.json +57 -0
- package/pomera.py +7483 -0
- package/pomera_mcp_server.py +144 -0
- package/tools/__init__.py +5 -0
- package/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- package/tools/__pycache__/ai_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/ascii_art_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/base64_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/base_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/case_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/column_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/cron_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_history.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_processor.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_settings.cpython-313.pyc +0 -0
- package/tools/__pycache__/curl_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/diff_viewer.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_extraction_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/email_header_analyzer.cpython-313.pyc +0 -0
- package/tools/__pycache__/extraction_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/find_replace.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter.cpython-313.pyc +0 -0
- package/tools/__pycache__/folder_file_reporter_adapter.cpython-313.pyc +0 -0
- package/tools/__pycache__/generator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/hash_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/html_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/huggingface_helper.cpython-313.pyc +0 -0
- package/tools/__pycache__/jsonxml_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/line_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/list_comparator.cpython-313.pyc +0 -0
- package/tools/__pycache__/markdown_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/mcp_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/notes_widget.cpython-313.pyc +0 -0
- package/tools/__pycache__/number_base_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/regex_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/slug_generator.cpython-313.pyc +0 -0
- package/tools/__pycache__/sorter_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/string_escape_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_statistics_tool.cpython-313.pyc +0 -0
- package/tools/__pycache__/text_wrapper.cpython-313.pyc +0 -0
- package/tools/__pycache__/timestamp_converter.cpython-313.pyc +0 -0
- package/tools/__pycache__/tool_loader.cpython-313.pyc +0 -0
- package/tools/__pycache__/translator_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_link_extractor.cpython-313.pyc +0 -0
- package/tools/__pycache__/url_parser.cpython-313.pyc +0 -0
- package/tools/__pycache__/whitespace_tools.cpython-313.pyc +0 -0
- package/tools/__pycache__/word_frequency_counter.cpython-313.pyc +0 -0
- package/tools/ai_tools.py +2892 -0
- package/tools/ascii_art_generator.py +353 -0
- package/tools/base64_tools.py +184 -0
- package/tools/base_tool.py +511 -0
- package/tools/case_tool.py +309 -0
- package/tools/column_tools.py +396 -0
- package/tools/cron_tool.py +885 -0
- package/tools/curl_history.py +601 -0
- package/tools/curl_processor.py +1208 -0
- package/tools/curl_settings.py +503 -0
- package/tools/curl_tool.py +5467 -0
- package/tools/diff_viewer.py +1072 -0
- package/tools/email_extraction_tool.py +249 -0
- package/tools/email_header_analyzer.py +426 -0
- package/tools/extraction_tools.py +250 -0
- package/tools/find_replace.py +1751 -0
- package/tools/folder_file_reporter.py +1463 -0
- package/tools/folder_file_reporter_adapter.py +480 -0
- package/tools/generator_tools.py +1217 -0
- package/tools/hash_generator.py +256 -0
- package/tools/html_tool.py +657 -0
- package/tools/huggingface_helper.py +449 -0
- package/tools/jsonxml_tool.py +730 -0
- package/tools/line_tools.py +419 -0
- package/tools/list_comparator.py +720 -0
- package/tools/markdown_tools.py +562 -0
- package/tools/mcp_widget.py +1417 -0
- package/tools/notes_widget.py +973 -0
- package/tools/number_base_converter.py +373 -0
- package/tools/regex_extractor.py +572 -0
- package/tools/slug_generator.py +311 -0
- package/tools/sorter_tools.py +459 -0
- package/tools/string_escape_tool.py +393 -0
- package/tools/text_statistics_tool.py +366 -0
- package/tools/text_wrapper.py +431 -0
- package/tools/timestamp_converter.py +422 -0
- package/tools/tool_loader.py +710 -0
- package/tools/translator_tools.py +523 -0
- package/tools/url_link_extractor.py +262 -0
- package/tools/url_parser.py +205 -0
- package/tools/whitespace_tools.py +356 -0
- package/tools/word_frequency_counter.py +147 -0
|
@@ -0,0 +1,1208 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Core HTTP processing module for the cURL GUI Tool.
|
|
3
|
+
|
|
4
|
+
This module provides the CurlProcessor class that handles HTTP requests,
|
|
5
|
+
response processing, and basic error handling for the cURL GUI Tool.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
import time
|
|
10
|
+
import re
|
|
11
|
+
import shlex
|
|
12
|
+
import json
|
|
13
|
+
from dataclasses import dataclass, field
|
|
14
|
+
from typing import Dict, Optional, Any, Union, List, Tuple
|
|
15
|
+
from datetime import datetime
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class ResponseData:
|
|
20
|
+
"""HTTP response data structure."""
|
|
21
|
+
status_code: int
|
|
22
|
+
headers: Dict[str, str]
|
|
23
|
+
body: str
|
|
24
|
+
timing: Dict[str, float]
|
|
25
|
+
size: int
|
|
26
|
+
encoding: str
|
|
27
|
+
content_type: str
|
|
28
|
+
url: str
|
|
29
|
+
|
|
30
|
+
def is_json(self) -> bool:
|
|
31
|
+
"""Check if response is JSON."""
|
|
32
|
+
return 'application/json' in self.content_type.lower()
|
|
33
|
+
|
|
34
|
+
def format_body(self, format_type: str = 'auto') -> str:
|
|
35
|
+
"""Format response body for display."""
|
|
36
|
+
if format_type == 'auto':
|
|
37
|
+
if self.is_json():
|
|
38
|
+
try:
|
|
39
|
+
import json
|
|
40
|
+
parsed = json.loads(self.body)
|
|
41
|
+
return json.dumps(parsed, indent=2)
|
|
42
|
+
except (json.JSONDecodeError, ValueError):
|
|
43
|
+
return self.body
|
|
44
|
+
return self.body
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class CurlToolError(Exception):
|
|
48
|
+
"""Base exception for cURL tool errors."""
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class RequestError(CurlToolError):
|
|
53
|
+
"""HTTP request execution errors."""
|
|
54
|
+
def __init__(self, message: str, suggestion: Optional[str] = None, error_code: Optional[str] = None):
|
|
55
|
+
self.message = message
|
|
56
|
+
self.suggestion = suggestion
|
|
57
|
+
self.error_code = error_code
|
|
58
|
+
super().__init__(message)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class ParseError(CurlToolError):
|
|
62
|
+
"""cURL command parsing errors with helpful suggestions."""
|
|
63
|
+
|
|
64
|
+
def __init__(self, message: str, suggestion: Optional[str] = None, position: Optional[int] = None):
|
|
65
|
+
self.message = message
|
|
66
|
+
self.suggestion = suggestion
|
|
67
|
+
self.position = position
|
|
68
|
+
|
|
69
|
+
# Auto-generate suggestions for common errors
|
|
70
|
+
if not suggestion:
|
|
71
|
+
self.suggestion = self._generate_suggestion(message)
|
|
72
|
+
|
|
73
|
+
super().__init__(message)
|
|
74
|
+
|
|
75
|
+
def _generate_suggestion(self, message: str) -> str:
|
|
76
|
+
"""Generate helpful suggestions based on error message."""
|
|
77
|
+
message_lower = message.lower()
|
|
78
|
+
|
|
79
|
+
if "empty" in message_lower or "missing" in message_lower:
|
|
80
|
+
return "Make sure your cURL command includes a URL. Example: curl https://api.example.com"
|
|
81
|
+
elif "quote" in message_lower or "unterminated" in message_lower:
|
|
82
|
+
return "Check that all quotes are properly closed. Use matching single or double quotes."
|
|
83
|
+
elif "method" in message_lower:
|
|
84
|
+
return "Valid HTTP methods: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS"
|
|
85
|
+
elif "header" in message_lower:
|
|
86
|
+
return "Headers should be in format: -H \"Key: Value\""
|
|
87
|
+
elif "start with" in message_lower:
|
|
88
|
+
return "Command must begin with 'curl'. Example: curl -X GET https://api.example.com"
|
|
89
|
+
else:
|
|
90
|
+
return "Check the cURL command syntax. Use the Help button for examples."
|
|
91
|
+
|
|
92
|
+
def __str__(self):
|
|
93
|
+
"""Format error message with suggestion."""
|
|
94
|
+
msg = self.message
|
|
95
|
+
if self.position is not None:
|
|
96
|
+
msg += f" (at position {self.position})"
|
|
97
|
+
return msg
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@dataclass
|
|
101
|
+
class RequestConfig:
|
|
102
|
+
"""Configuration for HTTP requests."""
|
|
103
|
+
method: str = "GET"
|
|
104
|
+
url: str = ""
|
|
105
|
+
headers: Dict[str, str] = field(default_factory=dict)
|
|
106
|
+
body: Optional[str] = None
|
|
107
|
+
body_type: str = "none" # none, json, form, multipart, raw
|
|
108
|
+
auth_type: str = "none" # none, bearer, basic, apikey
|
|
109
|
+
auth_data: Dict[str, str] = field(default_factory=dict)
|
|
110
|
+
follow_redirects: bool = True
|
|
111
|
+
verify_ssl: bool = True
|
|
112
|
+
timeout: int = 30
|
|
113
|
+
verbose: bool = False # -v flag
|
|
114
|
+
junk_session_cookies: bool = False # -j flag
|
|
115
|
+
save_to_file: bool = False # -O flag
|
|
116
|
+
use_remote_name: bool = False # -O flag (same as save_to_file for our purposes)
|
|
117
|
+
download_path: str = "" # Directory for downloads
|
|
118
|
+
complex_options: str = "" # Additional cURL options not handled by UI
|
|
119
|
+
|
|
120
|
+
def to_curl_command(self, processor=None) -> str:
|
|
121
|
+
"""Generate cURL command from configuration."""
|
|
122
|
+
if processor is None:
|
|
123
|
+
# Create a temporary processor instance
|
|
124
|
+
from tools.curl_processor import CurlProcessor
|
|
125
|
+
processor = CurlProcessor()
|
|
126
|
+
return processor.generate_curl_command(self)
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def from_curl_command(cls, curl_command: str, processor=None) -> 'RequestConfig':
|
|
130
|
+
"""Parse cURL command into configuration."""
|
|
131
|
+
if processor is None:
|
|
132
|
+
# Create a temporary processor instance
|
|
133
|
+
from tools.curl_processor import CurlProcessor
|
|
134
|
+
processor = CurlProcessor()
|
|
135
|
+
return processor.parse_curl_command(curl_command)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class AuthenticationManager:
|
|
139
|
+
"""Handles authentication for HTTP requests."""
|
|
140
|
+
|
|
141
|
+
@staticmethod
|
|
142
|
+
def apply_auth(auth_type: str, auth_data: Dict[str, str], request_params: Dict[str, Any]) -> Dict[str, Any]:
|
|
143
|
+
"""
|
|
144
|
+
Apply authentication to request parameters.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
auth_type: Type of authentication (bearer, basic, apikey, none)
|
|
148
|
+
auth_data: Authentication data dictionary
|
|
149
|
+
request_params: Request parameters dictionary to modify
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
Modified request parameters with authentication applied
|
|
153
|
+
|
|
154
|
+
Raises:
|
|
155
|
+
RequestError: If authentication configuration is invalid
|
|
156
|
+
"""
|
|
157
|
+
if auth_type == "none" or not auth_data:
|
|
158
|
+
return request_params
|
|
159
|
+
|
|
160
|
+
if auth_type == "bearer":
|
|
161
|
+
token = auth_data.get('token', '').strip()
|
|
162
|
+
if not token:
|
|
163
|
+
raise RequestError("Bearer token is required", "Please enter a valid bearer token")
|
|
164
|
+
|
|
165
|
+
# Add Authorization header
|
|
166
|
+
if 'headers' not in request_params:
|
|
167
|
+
request_params['headers'] = {}
|
|
168
|
+
request_params['headers']['Authorization'] = f"Bearer {token}"
|
|
169
|
+
|
|
170
|
+
elif auth_type == "basic":
|
|
171
|
+
username = auth_data.get('username', '').strip()
|
|
172
|
+
password = auth_data.get('password', '')
|
|
173
|
+
|
|
174
|
+
if not username:
|
|
175
|
+
raise RequestError("Username is required for Basic Auth", "Please enter a username")
|
|
176
|
+
|
|
177
|
+
# Use requests built-in basic auth
|
|
178
|
+
from requests.auth import HTTPBasicAuth
|
|
179
|
+
request_params['auth'] = HTTPBasicAuth(username, password)
|
|
180
|
+
|
|
181
|
+
elif auth_type == "apikey":
|
|
182
|
+
key_name = auth_data.get('key_name', '').strip()
|
|
183
|
+
key_value = auth_data.get('key_value', '').strip()
|
|
184
|
+
location = auth_data.get('location', 'header')
|
|
185
|
+
|
|
186
|
+
if not key_name or not key_value:
|
|
187
|
+
raise RequestError("API key name and value are required", "Please enter both key name and value")
|
|
188
|
+
|
|
189
|
+
if location == 'header':
|
|
190
|
+
# Add as header
|
|
191
|
+
if 'headers' not in request_params:
|
|
192
|
+
request_params['headers'] = {}
|
|
193
|
+
request_params['headers'][key_name] = key_value
|
|
194
|
+
elif location == 'query_parameter':
|
|
195
|
+
# Add as query parameter
|
|
196
|
+
if 'params' not in request_params:
|
|
197
|
+
request_params['params'] = {}
|
|
198
|
+
request_params['params'][key_name] = key_value
|
|
199
|
+
|
|
200
|
+
return request_params
|
|
201
|
+
|
|
202
|
+
@staticmethod
|
|
203
|
+
def get_auth_error_suggestion(auth_type: str, error: Exception) -> str:
|
|
204
|
+
"""
|
|
205
|
+
Get authentication-specific error suggestions.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
auth_type: Type of authentication that failed
|
|
209
|
+
error: The exception that occurred
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Helpful suggestion for fixing the authentication error
|
|
213
|
+
"""
|
|
214
|
+
error_str = str(error).lower()
|
|
215
|
+
|
|
216
|
+
if auth_type == "bearer":
|
|
217
|
+
if "401" in error_str or "unauthorized" in error_str:
|
|
218
|
+
return "Bearer token may be invalid or expired. Please check your token and try again."
|
|
219
|
+
elif "403" in error_str or "forbidden" in error_str:
|
|
220
|
+
return "Bearer token is valid but doesn't have permission for this resource."
|
|
221
|
+
|
|
222
|
+
elif auth_type == "basic":
|
|
223
|
+
if "401" in error_str or "unauthorized" in error_str:
|
|
224
|
+
return "Username or password may be incorrect. Please verify your credentials."
|
|
225
|
+
elif "403" in error_str or "forbidden" in error_str:
|
|
226
|
+
return "Credentials are valid but don't have permission for this resource."
|
|
227
|
+
|
|
228
|
+
elif auth_type == "apikey":
|
|
229
|
+
if "401" in error_str or "unauthorized" in error_str:
|
|
230
|
+
return "API key may be invalid or missing. Please check your key and location settings."
|
|
231
|
+
elif "403" in error_str or "forbidden" in error_str:
|
|
232
|
+
return "API key is valid but doesn't have permission for this resource."
|
|
233
|
+
|
|
234
|
+
return "Authentication failed. Please check your credentials and try again."
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class CurlProcessor:
|
|
238
|
+
"""Core HTTP request processing and cURL command handling."""
|
|
239
|
+
|
|
240
|
+
def __init__(self):
|
|
241
|
+
"""Initialize the CurlProcessor with a requests session."""
|
|
242
|
+
self.session = requests.Session()
|
|
243
|
+
self.history = []
|
|
244
|
+
self.current_request = None
|
|
245
|
+
self.current_response = None
|
|
246
|
+
|
|
247
|
+
# Set default headers
|
|
248
|
+
self.session.headers.update({
|
|
249
|
+
'User-Agent': 'Pomera-cURL-Tool/1.0'
|
|
250
|
+
})
|
|
251
|
+
|
|
252
|
+
def execute_request(self, method: str, url: str, headers: Optional[Dict[str, str]] = None,
|
|
253
|
+
body: Optional[Union[str, Dict]] = None, auth: Optional[Any] = None,
|
|
254
|
+
auth_type: str = "none", auth_data: Optional[Dict[str, str]] = None,
|
|
255
|
+
**kwargs) -> ResponseData:
|
|
256
|
+
"""
|
|
257
|
+
Execute HTTP request with the specified parameters.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
method: HTTP method (GET, POST, PUT, DELETE, etc.)
|
|
261
|
+
url: Target URL
|
|
262
|
+
headers: Optional headers dictionary
|
|
263
|
+
body: Optional request body (string or dict for JSON)
|
|
264
|
+
auth: Optional authentication object (legacy)
|
|
265
|
+
auth_type: Authentication type (bearer, basic, apikey, none)
|
|
266
|
+
auth_data: Authentication data dictionary
|
|
267
|
+
**kwargs: Additional requests parameters
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
ResponseData object containing response information
|
|
271
|
+
|
|
272
|
+
Raises:
|
|
273
|
+
RequestError: If the request fails
|
|
274
|
+
"""
|
|
275
|
+
if not url:
|
|
276
|
+
raise RequestError("URL is required", "Please enter a valid URL")
|
|
277
|
+
|
|
278
|
+
if not url.startswith(('http://', 'https://')):
|
|
279
|
+
url = 'https://' + url
|
|
280
|
+
|
|
281
|
+
method = method.upper()
|
|
282
|
+
if method not in ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS']:
|
|
283
|
+
raise RequestError(f"Unsupported HTTP method: {method}",
|
|
284
|
+
"Supported methods: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS")
|
|
285
|
+
|
|
286
|
+
# Prepare request parameters
|
|
287
|
+
request_params = {
|
|
288
|
+
'method': method,
|
|
289
|
+
'url': url,
|
|
290
|
+
'timeout': kwargs.get('timeout', 30),
|
|
291
|
+
'allow_redirects': kwargs.get('follow_redirects', True),
|
|
292
|
+
'verify': kwargs.get('verify_ssl', True)
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
# Add headers if provided
|
|
296
|
+
if headers:
|
|
297
|
+
request_params['headers'] = headers.copy()
|
|
298
|
+
|
|
299
|
+
# Apply authentication using the authentication manager
|
|
300
|
+
try:
|
|
301
|
+
request_params = AuthenticationManager.apply_auth(
|
|
302
|
+
auth_type or "none",
|
|
303
|
+
auth_data or {},
|
|
304
|
+
request_params
|
|
305
|
+
)
|
|
306
|
+
except RequestError:
|
|
307
|
+
raise # Re-raise authentication errors
|
|
308
|
+
|
|
309
|
+
# Add legacy authentication if provided (for backward compatibility)
|
|
310
|
+
if auth and not auth_type:
|
|
311
|
+
request_params['auth'] = auth
|
|
312
|
+
|
|
313
|
+
# Handle files and data for multipart form data
|
|
314
|
+
if 'files' in kwargs and kwargs['files']:
|
|
315
|
+
request_params['files'] = kwargs['files']
|
|
316
|
+
# Also add form data if provided
|
|
317
|
+
if 'data' in kwargs and kwargs['data']:
|
|
318
|
+
request_params['data'] = kwargs['data']
|
|
319
|
+
# Add body for methods that support it
|
|
320
|
+
elif body and method in ['POST', 'PUT', 'PATCH']:
|
|
321
|
+
if isinstance(body, dict):
|
|
322
|
+
request_params['json'] = body
|
|
323
|
+
else:
|
|
324
|
+
request_params['data'] = body
|
|
325
|
+
|
|
326
|
+
try:
|
|
327
|
+
# Record detailed timing information
|
|
328
|
+
timing_info = {}
|
|
329
|
+
start_time = time.time()
|
|
330
|
+
|
|
331
|
+
# Execute the request with detailed timing
|
|
332
|
+
response = self.session.request(**request_params)
|
|
333
|
+
|
|
334
|
+
# Calculate timing
|
|
335
|
+
total_time = time.time() - start_time
|
|
336
|
+
|
|
337
|
+
# Extract detailed timing from response if available
|
|
338
|
+
timing_info = self._extract_detailed_timing(response, total_time)
|
|
339
|
+
|
|
340
|
+
# Create response data
|
|
341
|
+
response_data = ResponseData(
|
|
342
|
+
status_code=response.status_code,
|
|
343
|
+
headers=dict(response.headers),
|
|
344
|
+
body=response.text,
|
|
345
|
+
timing=timing_info,
|
|
346
|
+
size=len(response.content),
|
|
347
|
+
encoding=response.encoding or 'utf-8',
|
|
348
|
+
content_type=response.headers.get('content-type', ''),
|
|
349
|
+
url=response.url
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
# Store current request and response
|
|
353
|
+
self.current_request = request_params
|
|
354
|
+
self.current_response = response_data
|
|
355
|
+
|
|
356
|
+
# Add to history
|
|
357
|
+
self._add_to_history(method, url, response_data)
|
|
358
|
+
|
|
359
|
+
return response_data
|
|
360
|
+
|
|
361
|
+
except requests.exceptions.ConnectionError as e:
|
|
362
|
+
diagnostic_info = self._get_connection_diagnostic(url, str(e))
|
|
363
|
+
raise RequestError(
|
|
364
|
+
f"Connection failed: {str(e)}",
|
|
365
|
+
diagnostic_info
|
|
366
|
+
)
|
|
367
|
+
except requests.exceptions.Timeout as e:
|
|
368
|
+
diagnostic_info = self._get_timeout_diagnostic(request_params.get('timeout', 30))
|
|
369
|
+
raise RequestError(
|
|
370
|
+
f"Request timed out after {request_params.get('timeout', 30)}s: {str(e)}",
|
|
371
|
+
diagnostic_info
|
|
372
|
+
)
|
|
373
|
+
except requests.exceptions.SSLError as e:
|
|
374
|
+
diagnostic_info = self._get_ssl_diagnostic(url, str(e))
|
|
375
|
+
raise RequestError(
|
|
376
|
+
f"SSL verification failed: {str(e)}",
|
|
377
|
+
diagnostic_info
|
|
378
|
+
)
|
|
379
|
+
except requests.exceptions.HTTPError as e:
|
|
380
|
+
# Check for authentication-related HTTP errors
|
|
381
|
+
if response.status_code in [401, 403]:
|
|
382
|
+
auth_suggestion = AuthenticationManager.get_auth_error_suggestion(
|
|
383
|
+
auth_type or "none", e
|
|
384
|
+
)
|
|
385
|
+
diagnostic_info = self._get_http_diagnostic(response.status_code, response.headers)
|
|
386
|
+
raise RequestError(
|
|
387
|
+
f"HTTP {response.status_code}: {str(e)}",
|
|
388
|
+
f"{auth_suggestion}\n\nDiagnostic Info:\n{diagnostic_info}"
|
|
389
|
+
)
|
|
390
|
+
else:
|
|
391
|
+
diagnostic_info = self._get_http_diagnostic(response.status_code, response.headers)
|
|
392
|
+
raise RequestError(
|
|
393
|
+
f"HTTP {response.status_code}: {str(e)}",
|
|
394
|
+
f"Check the request parameters and try again.\n\nDiagnostic Info:\n{diagnostic_info}"
|
|
395
|
+
)
|
|
396
|
+
except requests.exceptions.RequestException as e:
|
|
397
|
+
# Check if it's an authentication error based on response
|
|
398
|
+
if hasattr(e, 'response') and e.response and e.response.status_code in [401, 403]:
|
|
399
|
+
auth_suggestion = AuthenticationManager.get_auth_error_suggestion(
|
|
400
|
+
auth_type or "none", e
|
|
401
|
+
)
|
|
402
|
+
diagnostic_info = self._get_http_diagnostic(e.response.status_code, e.response.headers)
|
|
403
|
+
raise RequestError(
|
|
404
|
+
f"Authentication failed: {str(e)}",
|
|
405
|
+
f"{auth_suggestion}\n\nDiagnostic Info:\n{diagnostic_info}"
|
|
406
|
+
)
|
|
407
|
+
else:
|
|
408
|
+
diagnostic_info = self._get_general_diagnostic(str(e))
|
|
409
|
+
raise RequestError(
|
|
410
|
+
f"Request failed: {str(e)}",
|
|
411
|
+
f"Check the URL and request parameters.\n\nDiagnostic Info:\n{diagnostic_info}"
|
|
412
|
+
)
|
|
413
|
+
except Exception as e:
|
|
414
|
+
diagnostic_info = self._get_general_diagnostic(str(e))
|
|
415
|
+
raise RequestError(
|
|
416
|
+
f"Unexpected error: {str(e)}",
|
|
417
|
+
f"Please try again or contact support.\n\nDiagnostic Info:\n{diagnostic_info}"
|
|
418
|
+
)
|
|
419
|
+
|
|
420
|
+
def _extract_detailed_timing(self, response, total_time):
|
|
421
|
+
"""Extract detailed timing information from response."""
|
|
422
|
+
timing = {
|
|
423
|
+
'total': total_time,
|
|
424
|
+
'dns': 0.0,
|
|
425
|
+
'connect': 0.0,
|
|
426
|
+
'tls': 0.0,
|
|
427
|
+
'ttfb': total_time,
|
|
428
|
+
'download': 0.0
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
# Try to extract timing from response object if available
|
|
432
|
+
if hasattr(response, 'elapsed'):
|
|
433
|
+
# requests library provides elapsed time
|
|
434
|
+
timing['ttfb'] = response.elapsed.total_seconds()
|
|
435
|
+
timing['download'] = max(0, total_time - timing['ttfb'])
|
|
436
|
+
|
|
437
|
+
# Estimate timing breakdown (basic approximation)
|
|
438
|
+
if total_time > 0:
|
|
439
|
+
# Very rough estimates for timing breakdown
|
|
440
|
+
timing['dns'] = min(0.1, total_time * 0.1) # DNS usually quick
|
|
441
|
+
timing['connect'] = min(0.2, total_time * 0.15) # TCP connect
|
|
442
|
+
timing['tls'] = min(0.3, total_time * 0.2) if response.url.startswith('https://') else 0.0
|
|
443
|
+
timing['ttfb'] = max(0, total_time - timing['dns'] - timing['connect'] - timing['tls'] - timing['download'])
|
|
444
|
+
|
|
445
|
+
return timing
|
|
446
|
+
|
|
447
|
+
def _add_to_history(self, method: str, url: str, response_data: ResponseData):
|
|
448
|
+
"""Add request to history."""
|
|
449
|
+
history_item = {
|
|
450
|
+
'timestamp': datetime.now(),
|
|
451
|
+
'method': method,
|
|
452
|
+
'url': url,
|
|
453
|
+
'status_code': response_data.status_code,
|
|
454
|
+
'response_time': response_data.timing['total'],
|
|
455
|
+
'success': 200 <= response_data.status_code < 400,
|
|
456
|
+
'response_preview': response_data.body[:200] if response_data.body else ''
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
self.history.append(history_item)
|
|
460
|
+
|
|
461
|
+
# Keep only last 100 requests
|
|
462
|
+
if len(self.history) > 100:
|
|
463
|
+
self.history.pop(0)
|
|
464
|
+
|
|
465
|
+
def get_history(self):
|
|
466
|
+
"""Get request history."""
|
|
467
|
+
return self.history.copy()
|
|
468
|
+
|
|
469
|
+
def clear_history(self):
|
|
470
|
+
"""Clear request history."""
|
|
471
|
+
self.history.clear()
|
|
472
|
+
|
|
473
|
+
def download_file(self, url: str, filepath: str = None, use_remote_name: bool = False,
|
|
474
|
+
resume: bool = False, progress_callback=None, **kwargs) -> Dict[str, Any]:
|
|
475
|
+
"""
|
|
476
|
+
Download a file from URL with progress indication and resume support.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
url: URL to download from
|
|
480
|
+
filepath: Local file path to save to (optional if use_remote_name=True)
|
|
481
|
+
use_remote_name: Use the remote filename from URL or Content-Disposition header
|
|
482
|
+
resume: Attempt to resume interrupted download
|
|
483
|
+
progress_callback: Callback function for progress updates (bytes_downloaded, total_bytes, speed)
|
|
484
|
+
**kwargs: Additional request parameters (headers, auth, etc.)
|
|
485
|
+
|
|
486
|
+
Returns:
|
|
487
|
+
Dictionary with download information (filepath, size, time, success)
|
|
488
|
+
|
|
489
|
+
Raises:
|
|
490
|
+
RequestError: If download fails
|
|
491
|
+
"""
|
|
492
|
+
import os
|
|
493
|
+
from urllib.parse import urlparse, unquote
|
|
494
|
+
|
|
495
|
+
if not url:
|
|
496
|
+
raise RequestError("URL is required for download", "Please enter a valid URL")
|
|
497
|
+
|
|
498
|
+
if not url.startswith(('http://', 'https://')):
|
|
499
|
+
url = 'https://' + url
|
|
500
|
+
|
|
501
|
+
# Determine filename if use_remote_name is True
|
|
502
|
+
if use_remote_name:
|
|
503
|
+
parsed_url = urlparse(url)
|
|
504
|
+
filename = os.path.basename(unquote(parsed_url.path))
|
|
505
|
+
if not filename or '.' not in filename:
|
|
506
|
+
filename = 'downloaded_file'
|
|
507
|
+
|
|
508
|
+
if filepath:
|
|
509
|
+
# If filepath is provided and it's a directory, append filename
|
|
510
|
+
if os.path.isdir(filepath):
|
|
511
|
+
filepath = os.path.join(filepath, filename)
|
|
512
|
+
# If filepath is provided and it's not a directory, use it as-is
|
|
513
|
+
else:
|
|
514
|
+
# No filepath provided, use current directory with remote filename
|
|
515
|
+
filepath = filename
|
|
516
|
+
elif not filepath:
|
|
517
|
+
raise RequestError("Filepath is required when not using remote name",
|
|
518
|
+
"Please specify a file path or enable 'Use Remote Name'")
|
|
519
|
+
|
|
520
|
+
# Prepare request parameters
|
|
521
|
+
request_params = {
|
|
522
|
+
'timeout': kwargs.get('timeout', 30),
|
|
523
|
+
'allow_redirects': kwargs.get('follow_redirects', True),
|
|
524
|
+
'verify': kwargs.get('verify_ssl', True),
|
|
525
|
+
'stream': True # Important for large file downloads
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
# Add headers if provided
|
|
529
|
+
if 'headers' in kwargs:
|
|
530
|
+
request_params['headers'] = kwargs['headers'].copy()
|
|
531
|
+
else:
|
|
532
|
+
request_params['headers'] = {}
|
|
533
|
+
|
|
534
|
+
# Apply authentication
|
|
535
|
+
auth_type = kwargs.get('auth_type', 'none')
|
|
536
|
+
auth_data = kwargs.get('auth_data', {})
|
|
537
|
+
try:
|
|
538
|
+
request_params = AuthenticationManager.apply_auth(auth_type, auth_data, request_params)
|
|
539
|
+
except RequestError:
|
|
540
|
+
raise
|
|
541
|
+
|
|
542
|
+
# Handle resume functionality
|
|
543
|
+
start_byte = 0
|
|
544
|
+
if resume and os.path.exists(filepath):
|
|
545
|
+
start_byte = os.path.getsize(filepath)
|
|
546
|
+
request_params['headers']['Range'] = f'bytes={start_byte}-'
|
|
547
|
+
|
|
548
|
+
try:
|
|
549
|
+
start_time = time.time()
|
|
550
|
+
|
|
551
|
+
# Make initial request to get headers and file info
|
|
552
|
+
response = self.session.get(url, **request_params)
|
|
553
|
+
response.raise_for_status()
|
|
554
|
+
|
|
555
|
+
# Get total file size
|
|
556
|
+
total_size = None
|
|
557
|
+
if 'content-length' in response.headers:
|
|
558
|
+
total_size = int(response.headers['content-length'])
|
|
559
|
+
if resume and start_byte > 0:
|
|
560
|
+
total_size += start_byte # Add existing bytes to total
|
|
561
|
+
|
|
562
|
+
# Check if server supports resume
|
|
563
|
+
if resume and start_byte > 0:
|
|
564
|
+
if response.status_code != 206: # Partial Content
|
|
565
|
+
# Server doesn't support resume, start over
|
|
566
|
+
start_byte = 0
|
|
567
|
+
response.close()
|
|
568
|
+
# Remove Range header and try again
|
|
569
|
+
if 'Range' in request_params['headers']:
|
|
570
|
+
del request_params['headers']['Range']
|
|
571
|
+
response = self.session.get(url, **request_params)
|
|
572
|
+
response.raise_for_status()
|
|
573
|
+
if 'content-length' in response.headers:
|
|
574
|
+
total_size = int(response.headers['content-length'])
|
|
575
|
+
|
|
576
|
+
# Update filename from Content-Disposition header if use_remote_name
|
|
577
|
+
if use_remote_name:
|
|
578
|
+
content_disposition = response.headers.get('content-disposition', '')
|
|
579
|
+
if 'filename=' in content_disposition:
|
|
580
|
+
# Extract filename from Content-Disposition header
|
|
581
|
+
import re
|
|
582
|
+
filename_match = re.search(r'filename[*]?=([^;]+)', content_disposition)
|
|
583
|
+
if filename_match:
|
|
584
|
+
remote_filename = filename_match.group(1).strip('"\'')
|
|
585
|
+
if remote_filename:
|
|
586
|
+
# Update filepath with the remote filename
|
|
587
|
+
directory = os.path.dirname(filepath) if os.path.dirname(filepath) else '.'
|
|
588
|
+
filepath = os.path.join(directory, remote_filename)
|
|
589
|
+
|
|
590
|
+
# Open file for writing (append mode if resuming)
|
|
591
|
+
mode = 'ab' if (resume and start_byte > 0) else 'wb'
|
|
592
|
+
|
|
593
|
+
with open(filepath, mode) as f:
|
|
594
|
+
downloaded_bytes = start_byte
|
|
595
|
+
last_progress_time = time.time()
|
|
596
|
+
last_downloaded_bytes = downloaded_bytes
|
|
597
|
+
|
|
598
|
+
# Download in chunks
|
|
599
|
+
chunk_size = 8192 # 8KB chunks
|
|
600
|
+
for chunk in response.iter_content(chunk_size=chunk_size):
|
|
601
|
+
if chunk: # Filter out keep-alive chunks
|
|
602
|
+
f.write(chunk)
|
|
603
|
+
downloaded_bytes += len(chunk)
|
|
604
|
+
|
|
605
|
+
# Call progress callback if provided
|
|
606
|
+
if progress_callback:
|
|
607
|
+
current_time = time.time()
|
|
608
|
+
# Calculate speed (bytes per second)
|
|
609
|
+
time_diff = current_time - last_progress_time
|
|
610
|
+
if time_diff >= 0.1: # Update every 100ms
|
|
611
|
+
bytes_diff = downloaded_bytes - last_downloaded_bytes
|
|
612
|
+
speed = bytes_diff / time_diff if time_diff > 0 else 0
|
|
613
|
+
|
|
614
|
+
progress_callback(downloaded_bytes, total_size, speed)
|
|
615
|
+
|
|
616
|
+
last_progress_time = current_time
|
|
617
|
+
last_downloaded_bytes = downloaded_bytes
|
|
618
|
+
|
|
619
|
+
# Final progress callback
|
|
620
|
+
if progress_callback:
|
|
621
|
+
total_time = time.time() - start_time
|
|
622
|
+
avg_speed = downloaded_bytes / total_time if total_time > 0 else 0
|
|
623
|
+
progress_callback(downloaded_bytes, total_size or downloaded_bytes, avg_speed)
|
|
624
|
+
|
|
625
|
+
download_info = {
|
|
626
|
+
'filepath': os.path.abspath(filepath),
|
|
627
|
+
'size': downloaded_bytes,
|
|
628
|
+
'total_size': total_size,
|
|
629
|
+
'time': time.time() - start_time,
|
|
630
|
+
'success': True,
|
|
631
|
+
'resumed': resume and start_byte > 0,
|
|
632
|
+
'url': url
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
return download_info
|
|
636
|
+
|
|
637
|
+
except requests.exceptions.ConnectionError as e:
|
|
638
|
+
diagnostic_info = self._get_connection_diagnostic(url, str(e))
|
|
639
|
+
raise RequestError(
|
|
640
|
+
f"Download failed - Connection error: {str(e)}",
|
|
641
|
+
diagnostic_info
|
|
642
|
+
)
|
|
643
|
+
except requests.exceptions.Timeout as e:
|
|
644
|
+
diagnostic_info = self._get_timeout_diagnostic(request_params.get('timeout', 30))
|
|
645
|
+
raise RequestError(
|
|
646
|
+
f"Download failed - Request timed out after {request_params.get('timeout', 30)}s: {str(e)}",
|
|
647
|
+
diagnostic_info
|
|
648
|
+
)
|
|
649
|
+
except requests.exceptions.HTTPError as e:
|
|
650
|
+
if response.status_code == 416: # Range Not Satisfiable
|
|
651
|
+
raise RequestError(
|
|
652
|
+
"Resume failed - file may be complete or server doesn't support resume",
|
|
653
|
+
"Try downloading without resume option"
|
|
654
|
+
)
|
|
655
|
+
else:
|
|
656
|
+
diagnostic_info = self._get_http_diagnostic(response.status_code, response.headers)
|
|
657
|
+
raise RequestError(
|
|
658
|
+
f"Download failed - HTTP {response.status_code}: {str(e)}",
|
|
659
|
+
diagnostic_info
|
|
660
|
+
)
|
|
661
|
+
except OSError as e:
|
|
662
|
+
raise RequestError(
|
|
663
|
+
f"Download failed - File system error: {str(e)}",
|
|
664
|
+
"Check file path permissions and available disk space"
|
|
665
|
+
)
|
|
666
|
+
except Exception as e:
|
|
667
|
+
raise RequestError(
|
|
668
|
+
f"Download failed - Unexpected error: {str(e)}",
|
|
669
|
+
"Please try again or check the URL and file path"
|
|
670
|
+
)
|
|
671
|
+
|
|
672
|
+
def validate_json(self, text: str) -> tuple[bool, Optional[str]]:
|
|
673
|
+
"""
|
|
674
|
+
Validate JSON text.
|
|
675
|
+
|
|
676
|
+
Args:
|
|
677
|
+
text: JSON text to validate
|
|
678
|
+
|
|
679
|
+
Returns:
|
|
680
|
+
Tuple of (is_valid, error_message)
|
|
681
|
+
"""
|
|
682
|
+
try:
|
|
683
|
+
json.loads(text)
|
|
684
|
+
return True, None
|
|
685
|
+
except json.JSONDecodeError as e:
|
|
686
|
+
return False, f"JSON error at line {e.lineno}, column {e.colno}: {e.msg}"
|
|
687
|
+
except Exception as e:
|
|
688
|
+
return False, f"Validation error: {str(e)}"
|
|
689
|
+
|
|
690
|
+
def parse_curl_command(self, curl_command: str) -> RequestConfig:
|
|
691
|
+
"""
|
|
692
|
+
Parse a cURL command string into a RequestConfig object.
|
|
693
|
+
|
|
694
|
+
Args:
|
|
695
|
+
curl_command: The cURL command string to parse
|
|
696
|
+
|
|
697
|
+
Returns:
|
|
698
|
+
RequestConfig object with parsed parameters
|
|
699
|
+
|
|
700
|
+
Raises:
|
|
701
|
+
ParseError: If the cURL command cannot be parsed
|
|
702
|
+
"""
|
|
703
|
+
if not curl_command.strip():
|
|
704
|
+
raise ParseError("Empty cURL command")
|
|
705
|
+
|
|
706
|
+
# Clean up the command - remove line breaks and extra whitespace
|
|
707
|
+
curl_command = self._clean_curl_command(curl_command)
|
|
708
|
+
|
|
709
|
+
try:
|
|
710
|
+
# Split the command into tokens using shlex for proper quote handling
|
|
711
|
+
tokens = shlex.split(curl_command)
|
|
712
|
+
except ValueError as e:
|
|
713
|
+
raise ParseError(f"Failed to parse cURL command: {str(e)}")
|
|
714
|
+
|
|
715
|
+
if not tokens or tokens[0] != 'curl':
|
|
716
|
+
raise ParseError("Command must start with 'curl'")
|
|
717
|
+
|
|
718
|
+
config = RequestConfig()
|
|
719
|
+
i = 1 # Skip 'curl'
|
|
720
|
+
|
|
721
|
+
while i < len(tokens):
|
|
722
|
+
token = tokens[i]
|
|
723
|
+
|
|
724
|
+
if token in ['-X', '--request']:
|
|
725
|
+
# HTTP method
|
|
726
|
+
if i + 1 >= len(tokens):
|
|
727
|
+
raise ParseError(f"Missing value for {token}")
|
|
728
|
+
config.method = tokens[i + 1].upper()
|
|
729
|
+
i += 2
|
|
730
|
+
|
|
731
|
+
elif token in ['-H', '--header']:
|
|
732
|
+
# Headers
|
|
733
|
+
if i + 1 >= len(tokens):
|
|
734
|
+
raise ParseError(f"Missing value for {token}")
|
|
735
|
+
header_str = tokens[i + 1]
|
|
736
|
+
self._parse_header(header_str, config)
|
|
737
|
+
i += 2
|
|
738
|
+
|
|
739
|
+
elif token in ['-d', '--data', '--data-raw']:
|
|
740
|
+
# Request body
|
|
741
|
+
if i + 1 >= len(tokens):
|
|
742
|
+
raise ParseError(f"Missing value for {token}")
|
|
743
|
+
config.body = tokens[i + 1]
|
|
744
|
+
config.body_type = "raw"
|
|
745
|
+
# If method not explicitly set and we have data, assume POST
|
|
746
|
+
if config.method == "GET":
|
|
747
|
+
config.method = "POST"
|
|
748
|
+
i += 2
|
|
749
|
+
|
|
750
|
+
elif token == '--data-urlencode':
|
|
751
|
+
# URL encoded data
|
|
752
|
+
if i + 1 >= len(tokens):
|
|
753
|
+
raise ParseError(f"Missing value for {token}")
|
|
754
|
+
config.body = tokens[i + 1]
|
|
755
|
+
config.body_type = "form"
|
|
756
|
+
if config.method == "GET":
|
|
757
|
+
config.method = "POST"
|
|
758
|
+
i += 2
|
|
759
|
+
|
|
760
|
+
elif token in ['-u', '--user']:
|
|
761
|
+
# Basic authentication
|
|
762
|
+
if i + 1 >= len(tokens):
|
|
763
|
+
raise ParseError(f"Missing value for {token}")
|
|
764
|
+
auth_str = tokens[i + 1]
|
|
765
|
+
self._parse_basic_auth(auth_str, config)
|
|
766
|
+
i += 2
|
|
767
|
+
|
|
768
|
+
elif token in ['-k', '--insecure']:
|
|
769
|
+
# Disable SSL verification
|
|
770
|
+
config.verify_ssl = False
|
|
771
|
+
i += 1
|
|
772
|
+
|
|
773
|
+
elif token in ['-L', '--location']:
|
|
774
|
+
# Follow redirects
|
|
775
|
+
config.follow_redirects = True
|
|
776
|
+
i += 1
|
|
777
|
+
|
|
778
|
+
elif token in ['-v', '--verbose']:
|
|
779
|
+
# Verbose mode
|
|
780
|
+
config.verbose = True
|
|
781
|
+
i += 1
|
|
782
|
+
|
|
783
|
+
elif token in ['-j', '--junk-session-cookies']:
|
|
784
|
+
# Junk session cookies
|
|
785
|
+
config.junk_session_cookies = True
|
|
786
|
+
i += 1
|
|
787
|
+
|
|
788
|
+
elif token in ['-J', '--remote-header-name']:
|
|
789
|
+
# Use remote header name for downloads - not directly supported in UI
|
|
790
|
+
i += 1
|
|
791
|
+
|
|
792
|
+
elif token in ['-O', '--remote-name']:
|
|
793
|
+
# Use remote name for downloads
|
|
794
|
+
config.save_to_file = True
|
|
795
|
+
config.use_remote_name = True
|
|
796
|
+
i += 1
|
|
797
|
+
|
|
798
|
+
elif token in ['--max-time', '-m']:
|
|
799
|
+
# Timeout
|
|
800
|
+
if i + 1 >= len(tokens):
|
|
801
|
+
raise ParseError(f"Missing value for {token}")
|
|
802
|
+
try:
|
|
803
|
+
config.timeout = int(tokens[i + 1])
|
|
804
|
+
except ValueError:
|
|
805
|
+
raise ParseError(f"Invalid timeout value: {tokens[i + 1]}")
|
|
806
|
+
i += 2
|
|
807
|
+
|
|
808
|
+
elif token.startswith('-') and len(token) > 2 and not token.startswith('--'):
|
|
809
|
+
# Handle combined short flags like -vLJO
|
|
810
|
+
for flag_char in token[1:]: # Skip the initial '-'
|
|
811
|
+
if flag_char == 'v':
|
|
812
|
+
# Verbose mode
|
|
813
|
+
config.verbose = True
|
|
814
|
+
elif flag_char == 'L':
|
|
815
|
+
# Follow redirects
|
|
816
|
+
config.follow_redirects = True
|
|
817
|
+
elif flag_char == 'j':
|
|
818
|
+
# Junk session cookies
|
|
819
|
+
config.junk_session_cookies = True
|
|
820
|
+
elif flag_char == 'J':
|
|
821
|
+
# Use remote header name - not directly supported in UI
|
|
822
|
+
pass
|
|
823
|
+
elif flag_char == 'O':
|
|
824
|
+
# Use remote name for downloads
|
|
825
|
+
config.save_to_file = True
|
|
826
|
+
config.use_remote_name = True
|
|
827
|
+
elif flag_char == 'k':
|
|
828
|
+
# Insecure SSL
|
|
829
|
+
config.verify_ssl = False
|
|
830
|
+
# Add more single-character flags as needed
|
|
831
|
+
i += 1
|
|
832
|
+
|
|
833
|
+
elif token.startswith('-'):
|
|
834
|
+
# Collect unknown flags into complex_options
|
|
835
|
+
complex_parts = [token]
|
|
836
|
+
i += 1
|
|
837
|
+
|
|
838
|
+
# Check if this flag has a value (next token doesn't start with -)
|
|
839
|
+
if i < len(tokens) and not tokens[i].startswith('-'):
|
|
840
|
+
complex_parts.append(tokens[i])
|
|
841
|
+
i += 1
|
|
842
|
+
|
|
843
|
+
# Add to complex_options
|
|
844
|
+
if config.complex_options:
|
|
845
|
+
config.complex_options += " " + " ".join(complex_parts)
|
|
846
|
+
else:
|
|
847
|
+
config.complex_options = " ".join(complex_parts)
|
|
848
|
+
|
|
849
|
+
else:
|
|
850
|
+
# Assume it's the URL if we haven't found one yet
|
|
851
|
+
if not config.url:
|
|
852
|
+
config.url = token
|
|
853
|
+
i += 1
|
|
854
|
+
|
|
855
|
+
if not config.url:
|
|
856
|
+
raise ParseError("No URL found in cURL command")
|
|
857
|
+
|
|
858
|
+
# Auto-detect JSON content type
|
|
859
|
+
if config.body and config.body_type == "raw":
|
|
860
|
+
try:
|
|
861
|
+
json.loads(config.body)
|
|
862
|
+
config.body_type = "json"
|
|
863
|
+
if 'content-type' not in [h.lower() for h in config.headers.keys()]:
|
|
864
|
+
config.headers['Content-Type'] = 'application/json'
|
|
865
|
+
except (json.JSONDecodeError, ValueError):
|
|
866
|
+
pass
|
|
867
|
+
|
|
868
|
+
return config
|
|
869
|
+
|
|
870
|
+
def _clean_curl_command(self, curl_command: str) -> str:
|
|
871
|
+
"""Clean up cURL command by removing line breaks and normalizing whitespace."""
|
|
872
|
+
# Remove line continuation characters and normalize whitespace
|
|
873
|
+
cleaned = re.sub(r'\\\s*\n\s*', ' ', curl_command)
|
|
874
|
+
cleaned = re.sub(r'\s+', ' ', cleaned)
|
|
875
|
+
return cleaned.strip()
|
|
876
|
+
|
|
877
|
+
def _parse_header(self, header_str: str, config: RequestConfig):
|
|
878
|
+
"""Parse a header string and add it to the config."""
|
|
879
|
+
if ':' not in header_str:
|
|
880
|
+
raise ParseError(f"Invalid header format: {header_str}")
|
|
881
|
+
|
|
882
|
+
key, value = header_str.split(':', 1)
|
|
883
|
+
key = key.strip()
|
|
884
|
+
value = value.strip()
|
|
885
|
+
|
|
886
|
+
# Handle authorization headers specially
|
|
887
|
+
if key.lower() == 'authorization':
|
|
888
|
+
if value.lower().startswith('bearer '):
|
|
889
|
+
config.auth_type = "bearer"
|
|
890
|
+
config.auth_data['token'] = value[7:] # Remove 'Bearer '
|
|
891
|
+
config.auth_data['format'] = 'Bearer' # Store original format
|
|
892
|
+
elif value.lower().startswith('token '):
|
|
893
|
+
# GitHub-style token authentication
|
|
894
|
+
config.auth_type = "bearer"
|
|
895
|
+
config.auth_data['token'] = value[6:] # Remove 'token '
|
|
896
|
+
config.auth_data['format'] = 'token' # Store original format
|
|
897
|
+
elif value.lower().startswith('basic '):
|
|
898
|
+
config.auth_type = "basic"
|
|
899
|
+
# Basic auth is already encoded, we'll store it as-is
|
|
900
|
+
config.auth_data['encoded'] = value[6:] # Remove 'Basic '
|
|
901
|
+
|
|
902
|
+
config.headers[key] = value
|
|
903
|
+
|
|
904
|
+
def _parse_basic_auth(self, auth_str: str, config: RequestConfig):
|
|
905
|
+
"""Parse basic authentication string."""
|
|
906
|
+
if ':' in auth_str:
|
|
907
|
+
username, password = auth_str.split(':', 1)
|
|
908
|
+
config.auth_type = "basic"
|
|
909
|
+
config.auth_data['username'] = username
|
|
910
|
+
config.auth_data['password'] = password
|
|
911
|
+
else:
|
|
912
|
+
# Username only, password will be prompted
|
|
913
|
+
config.auth_type = "basic"
|
|
914
|
+
config.auth_data['username'] = auth_str
|
|
915
|
+
config.auth_data['password'] = ""
|
|
916
|
+
|
|
917
|
+
def generate_curl_command(self, config: RequestConfig) -> str:
|
|
918
|
+
"""
|
|
919
|
+
Generate a cURL command string from a RequestConfig object.
|
|
920
|
+
|
|
921
|
+
Args:
|
|
922
|
+
config: RequestConfig object with request parameters
|
|
923
|
+
|
|
924
|
+
Returns:
|
|
925
|
+
Formatted cURL command string
|
|
926
|
+
|
|
927
|
+
Raises:
|
|
928
|
+
ValueError: If required parameters are missing
|
|
929
|
+
"""
|
|
930
|
+
if not config.url:
|
|
931
|
+
raise ValueError("URL is required to generate cURL command")
|
|
932
|
+
|
|
933
|
+
parts = ['curl']
|
|
934
|
+
|
|
935
|
+
# Add method if not GET
|
|
936
|
+
if config.method and config.method.upper() != 'GET':
|
|
937
|
+
parts.extend(['-X', config.method.upper()])
|
|
938
|
+
|
|
939
|
+
# Add URL (always quote it to handle special characters)
|
|
940
|
+
parts.append(self._quote_if_needed(config.url))
|
|
941
|
+
|
|
942
|
+
# Add headers
|
|
943
|
+
for key, value in config.headers.items():
|
|
944
|
+
# Skip auto-generated headers that we'll handle separately
|
|
945
|
+
if key.lower() == 'authorization' and config.auth_type != "none":
|
|
946
|
+
continue
|
|
947
|
+
header_str = f"{key}: {value}"
|
|
948
|
+
parts.extend(['-H', self._quote_if_needed(header_str)])
|
|
949
|
+
|
|
950
|
+
# Add authentication
|
|
951
|
+
if config.auth_type == "bearer" and config.auth_data.get('token'):
|
|
952
|
+
# Use the original format if stored, otherwise default to 'Bearer'
|
|
953
|
+
auth_format = config.auth_data.get('format', 'Bearer')
|
|
954
|
+
auth_header = f"Authorization: {auth_format} {config.auth_data['token']}"
|
|
955
|
+
parts.extend(['-H', self._quote_if_needed(auth_header)])
|
|
956
|
+
elif config.auth_type == "basic":
|
|
957
|
+
if config.auth_data.get('username') and config.auth_data.get('password'):
|
|
958
|
+
auth_str = f"{config.auth_data['username']}:{config.auth_data['password']}"
|
|
959
|
+
parts.extend(['-u', self._quote_if_needed(auth_str)])
|
|
960
|
+
elif config.auth_data.get('username'):
|
|
961
|
+
parts.extend(['-u', self._quote_if_needed(config.auth_data['username'])])
|
|
962
|
+
elif config.auth_type == "apikey":
|
|
963
|
+
# API key authentication - add as header or query param
|
|
964
|
+
key_name = config.auth_data.get('key_name', 'X-API-Key')
|
|
965
|
+
key_value = config.auth_data.get('key_value', '')
|
|
966
|
+
location = config.auth_data.get('location', 'header')
|
|
967
|
+
|
|
968
|
+
if location == 'header':
|
|
969
|
+
api_header = f"{key_name}: {key_value}"
|
|
970
|
+
parts.extend(['-H', self._quote_if_needed(api_header)])
|
|
971
|
+
# Query param handling would need URL modification, skip for now
|
|
972
|
+
|
|
973
|
+
# Add request body
|
|
974
|
+
if config.body:
|
|
975
|
+
if config.body_type == "form":
|
|
976
|
+
parts.extend(['--data-urlencode', self._quote_if_needed(config.body)])
|
|
977
|
+
else:
|
|
978
|
+
parts.extend(['-d', self._quote_if_needed(config.body)])
|
|
979
|
+
|
|
980
|
+
# Add SSL verification flag
|
|
981
|
+
if not config.verify_ssl:
|
|
982
|
+
parts.append('-k')
|
|
983
|
+
|
|
984
|
+
# Add redirect following
|
|
985
|
+
if config.follow_redirects:
|
|
986
|
+
parts.append('-L')
|
|
987
|
+
|
|
988
|
+
# Add verbose flag
|
|
989
|
+
if config.verbose:
|
|
990
|
+
parts.append('-v')
|
|
991
|
+
|
|
992
|
+
# Add download flags
|
|
993
|
+
if config.save_to_file and config.use_remote_name:
|
|
994
|
+
parts.append('-O')
|
|
995
|
+
|
|
996
|
+
# Add timeout if not default
|
|
997
|
+
if config.timeout != 30:
|
|
998
|
+
parts.extend(['--max-time', str(config.timeout)])
|
|
999
|
+
|
|
1000
|
+
# Add junk session cookies flag
|
|
1001
|
+
if config.junk_session_cookies:
|
|
1002
|
+
parts.append('-j')
|
|
1003
|
+
|
|
1004
|
+
# Add complex options (additional flags not handled by UI)
|
|
1005
|
+
if config.complex_options and config.complex_options.strip():
|
|
1006
|
+
# Split complex options by lines and filter out comments and empty lines
|
|
1007
|
+
complex_lines = config.complex_options.strip().split('\n')
|
|
1008
|
+
for line in complex_lines:
|
|
1009
|
+
line = line.strip()
|
|
1010
|
+
if line and not line.startswith('#'):
|
|
1011
|
+
# Split the line into individual options and add them
|
|
1012
|
+
import shlex
|
|
1013
|
+
try:
|
|
1014
|
+
complex_parts = shlex.split(line)
|
|
1015
|
+
parts.extend(complex_parts)
|
|
1016
|
+
except ValueError:
|
|
1017
|
+
# If shlex fails, just add the line as-is
|
|
1018
|
+
parts.append(line)
|
|
1019
|
+
|
|
1020
|
+
return ' '.join(parts)
|
|
1021
|
+
|
|
1022
|
+
def _quote_if_needed(self, value: str) -> str:
|
|
1023
|
+
"""Quote a string if it contains special characters."""
|
|
1024
|
+
# Characters that require quoting in shell commands
|
|
1025
|
+
special_chars = [' ', '"', "'", '\\', '&', '|', ';', '(', ')', '<', '>',
|
|
1026
|
+
'`', '$', '!', '*', '?', '[', ']', '{', '}', '~']
|
|
1027
|
+
|
|
1028
|
+
if any(char in value for char in special_chars):
|
|
1029
|
+
# Escape existing quotes and wrap in quotes
|
|
1030
|
+
escaped = value.replace('\\', '\\\\').replace('"', '\\"')
|
|
1031
|
+
return f'"{escaped}"'
|
|
1032
|
+
|
|
1033
|
+
return value
|
|
1034
|
+
|
|
1035
|
+
def _get_connection_diagnostic(self, url: str, error_msg: str) -> str:
|
|
1036
|
+
"""Get diagnostic information for connection errors."""
|
|
1037
|
+
diagnostics = []
|
|
1038
|
+
|
|
1039
|
+
# Parse URL for diagnostics
|
|
1040
|
+
from urllib.parse import urlparse
|
|
1041
|
+
parsed = urlparse(url)
|
|
1042
|
+
|
|
1043
|
+
diagnostics.append("Connection Error Diagnostics:")
|
|
1044
|
+
diagnostics.append(f"• Host: {parsed.hostname}")
|
|
1045
|
+
diagnostics.append(f"• Port: {parsed.port or (443 if parsed.scheme == 'https' else 80)}")
|
|
1046
|
+
diagnostics.append(f"• Protocol: {parsed.scheme}")
|
|
1047
|
+
|
|
1048
|
+
# Common connection issues
|
|
1049
|
+
if "name or service not known" in error_msg.lower() or "nodename nor servname provided" in error_msg.lower():
|
|
1050
|
+
diagnostics.append("• Issue: DNS resolution failed")
|
|
1051
|
+
diagnostics.append("• Suggestion: Check if the hostname is correct and accessible")
|
|
1052
|
+
elif "connection refused" in error_msg.lower():
|
|
1053
|
+
diagnostics.append("• Issue: Server refused connection")
|
|
1054
|
+
diagnostics.append("• Suggestion: Check if the server is running and port is correct")
|
|
1055
|
+
elif "timeout" in error_msg.lower():
|
|
1056
|
+
diagnostics.append("• Issue: Connection timed out")
|
|
1057
|
+
diagnostics.append("• Suggestion: Server may be slow or unreachable")
|
|
1058
|
+
elif "network is unreachable" in error_msg.lower():
|
|
1059
|
+
diagnostics.append("• Issue: Network routing problem")
|
|
1060
|
+
diagnostics.append("• Suggestion: Check your internet connection")
|
|
1061
|
+
|
|
1062
|
+
return "\n".join(diagnostics)
|
|
1063
|
+
|
|
1064
|
+
def _get_timeout_diagnostic(self, timeout_value: int) -> str:
|
|
1065
|
+
"""Get diagnostic information for timeout errors."""
|
|
1066
|
+
diagnostics = []
|
|
1067
|
+
|
|
1068
|
+
diagnostics.append("Timeout Error Diagnostics:")
|
|
1069
|
+
diagnostics.append(f"• Configured timeout: {timeout_value} seconds")
|
|
1070
|
+
|
|
1071
|
+
if timeout_value < 10:
|
|
1072
|
+
diagnostics.append("• Issue: Timeout may be too short")
|
|
1073
|
+
diagnostics.append("• Suggestion: Try increasing timeout to 30+ seconds")
|
|
1074
|
+
elif timeout_value < 30:
|
|
1075
|
+
diagnostics.append("• Issue: Server is responding slowly")
|
|
1076
|
+
diagnostics.append("• Suggestion: Try increasing timeout or check server status")
|
|
1077
|
+
else:
|
|
1078
|
+
diagnostics.append("• Issue: Server is not responding within reasonable time")
|
|
1079
|
+
diagnostics.append("• Suggestion: Check server status or network connectivity")
|
|
1080
|
+
|
|
1081
|
+
diagnostics.append("• Troubleshooting: Try the request in a browser or with curl command line")
|
|
1082
|
+
|
|
1083
|
+
return "\n".join(diagnostics)
|
|
1084
|
+
|
|
1085
|
+
def _get_ssl_diagnostic(self, url: str, error_msg: str) -> str:
|
|
1086
|
+
"""Get diagnostic information for SSL errors."""
|
|
1087
|
+
diagnostics = []
|
|
1088
|
+
|
|
1089
|
+
diagnostics.append("SSL Error Diagnostics:")
|
|
1090
|
+
diagnostics.append(f"• URL: {url}")
|
|
1091
|
+
|
|
1092
|
+
if "certificate verify failed" in error_msg.lower():
|
|
1093
|
+
diagnostics.append("• Issue: SSL certificate verification failed")
|
|
1094
|
+
diagnostics.append("• Suggestion: Certificate may be expired, self-signed, or invalid")
|
|
1095
|
+
diagnostics.append("• Workaround: Disable SSL verification (not recommended for production)")
|
|
1096
|
+
elif "ssl: wrong_version_number" in error_msg.lower():
|
|
1097
|
+
diagnostics.append("• Issue: SSL version mismatch")
|
|
1098
|
+
diagnostics.append("• Suggestion: Server may not support HTTPS on this port")
|
|
1099
|
+
elif "ssl: handshake_failure" in error_msg.lower():
|
|
1100
|
+
diagnostics.append("• Issue: SSL handshake failed")
|
|
1101
|
+
diagnostics.append("• Suggestion: Server and client SSL/TLS versions may be incompatible")
|
|
1102
|
+
|
|
1103
|
+
diagnostics.append("• Troubleshooting: Check certificate validity with browser or openssl")
|
|
1104
|
+
|
|
1105
|
+
return "\n".join(diagnostics)
|
|
1106
|
+
|
|
1107
|
+
def _get_http_diagnostic(self, status_code: int, headers: dict) -> str:
|
|
1108
|
+
"""Get diagnostic information for HTTP errors."""
|
|
1109
|
+
diagnostics = []
|
|
1110
|
+
|
|
1111
|
+
diagnostics.append(f"HTTP {status_code} Error Diagnostics:")
|
|
1112
|
+
|
|
1113
|
+
# Status code specific diagnostics
|
|
1114
|
+
if status_code == 400:
|
|
1115
|
+
diagnostics.append("• Issue: Bad Request - malformed request syntax")
|
|
1116
|
+
diagnostics.append("• Suggestion: Check request body format, headers, and parameters")
|
|
1117
|
+
elif status_code == 401:
|
|
1118
|
+
diagnostics.append("• Issue: Unauthorized - authentication required")
|
|
1119
|
+
diagnostics.append("• Suggestion: Check authentication credentials")
|
|
1120
|
+
elif status_code == 403:
|
|
1121
|
+
diagnostics.append("• Issue: Forbidden - insufficient permissions")
|
|
1122
|
+
diagnostics.append("• Suggestion: Check if your credentials have required permissions")
|
|
1123
|
+
elif status_code == 404:
|
|
1124
|
+
diagnostics.append("• Issue: Not Found - resource doesn't exist")
|
|
1125
|
+
diagnostics.append("• Suggestion: Verify the URL path and endpoint")
|
|
1126
|
+
elif status_code == 405:
|
|
1127
|
+
diagnostics.append("• Issue: Method Not Allowed")
|
|
1128
|
+
diagnostics.append("• Suggestion: Check if the HTTP method is supported by this endpoint")
|
|
1129
|
+
elif status_code == 429:
|
|
1130
|
+
diagnostics.append("• Issue: Too Many Requests - rate limit exceeded")
|
|
1131
|
+
diagnostics.append("• Suggestion: Wait before retrying or check rate limit headers")
|
|
1132
|
+
elif status_code >= 500:
|
|
1133
|
+
diagnostics.append("• Issue: Server Error - problem on server side")
|
|
1134
|
+
diagnostics.append("• Suggestion: Try again later or contact server administrator")
|
|
1135
|
+
|
|
1136
|
+
# Check for helpful response headers
|
|
1137
|
+
if 'retry-after' in headers:
|
|
1138
|
+
diagnostics.append(f"• Retry After: {headers['retry-after']} seconds")
|
|
1139
|
+
|
|
1140
|
+
if 'www-authenticate' in headers:
|
|
1141
|
+
diagnostics.append(f"• Authentication Method: {headers['www-authenticate']}")
|
|
1142
|
+
|
|
1143
|
+
return "\n".join(diagnostics)
|
|
1144
|
+
|
|
1145
|
+
def _get_general_diagnostic(self, error_msg: str) -> str:
|
|
1146
|
+
"""Get general diagnostic information for other errors."""
|
|
1147
|
+
diagnostics = []
|
|
1148
|
+
|
|
1149
|
+
diagnostics.append("General Error Diagnostics:")
|
|
1150
|
+
diagnostics.append(f"• Error: {error_msg}")
|
|
1151
|
+
|
|
1152
|
+
# Common issues
|
|
1153
|
+
if "json" in error_msg.lower():
|
|
1154
|
+
diagnostics.append("• Issue: JSON parsing or formatting error")
|
|
1155
|
+
diagnostics.append("• Suggestion: Check JSON syntax in request body")
|
|
1156
|
+
elif "encoding" in error_msg.lower():
|
|
1157
|
+
diagnostics.append("• Issue: Character encoding problem")
|
|
1158
|
+
diagnostics.append("• Suggestion: Check response encoding or content type")
|
|
1159
|
+
elif "memory" in error_msg.lower():
|
|
1160
|
+
diagnostics.append("• Issue: Memory or resource limitation")
|
|
1161
|
+
diagnostics.append("• Suggestion: Response may be too large")
|
|
1162
|
+
|
|
1163
|
+
diagnostics.append("• Troubleshooting: Enable verbose logging for more details")
|
|
1164
|
+
|
|
1165
|
+
return "\n".join(diagnostics)
|
|
1166
|
+
|
|
1167
|
+
def generate_curl_from_request_data(self, method: str, url: str,
|
|
1168
|
+
headers: Optional[Dict[str, str]] = None,
|
|
1169
|
+
body: Optional[str] = None,
|
|
1170
|
+
auth_type: str = "none",
|
|
1171
|
+
auth_data: Optional[Dict[str, str]] = None,
|
|
1172
|
+
**kwargs) -> str:
|
|
1173
|
+
"""
|
|
1174
|
+
Generate cURL command from individual request parameters.
|
|
1175
|
+
|
|
1176
|
+
Args:
|
|
1177
|
+
method: HTTP method
|
|
1178
|
+
url: Request URL
|
|
1179
|
+
headers: Optional headers dictionary
|
|
1180
|
+
body: Optional request body
|
|
1181
|
+
auth_type: Authentication type
|
|
1182
|
+
auth_data: Authentication data
|
|
1183
|
+
**kwargs: Additional options
|
|
1184
|
+
|
|
1185
|
+
Returns:
|
|
1186
|
+
Formatted cURL command string
|
|
1187
|
+
"""
|
|
1188
|
+
config = RequestConfig(
|
|
1189
|
+
method=method,
|
|
1190
|
+
url=url,
|
|
1191
|
+
headers=headers or {},
|
|
1192
|
+
body=body,
|
|
1193
|
+
auth_type=auth_type,
|
|
1194
|
+
auth_data=auth_data or {},
|
|
1195
|
+
follow_redirects=kwargs.get('follow_redirects', True),
|
|
1196
|
+
verify_ssl=kwargs.get('verify_ssl', True),
|
|
1197
|
+
timeout=kwargs.get('timeout', 30)
|
|
1198
|
+
)
|
|
1199
|
+
|
|
1200
|
+
# Auto-detect body type
|
|
1201
|
+
if body:
|
|
1202
|
+
try:
|
|
1203
|
+
json.loads(body)
|
|
1204
|
+
config.body_type = "json"
|
|
1205
|
+
except (json.JSONDecodeError, ValueError):
|
|
1206
|
+
config.body_type = "raw"
|
|
1207
|
+
|
|
1208
|
+
return self.generate_curl_command(config)
|