alita-sdk 0.3.486__py3-none-any.whl → 0.3.515__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/agent_loader.py +27 -6
- alita_sdk/cli/agents.py +10 -1
- alita_sdk/cli/inventory.py +12 -195
- alita_sdk/cli/tools/filesystem.py +95 -9
- alita_sdk/community/inventory/__init__.py +12 -0
- alita_sdk/community/inventory/toolkit.py +9 -5
- alita_sdk/community/inventory/toolkit_utils.py +176 -0
- alita_sdk/configurations/ado.py +144 -0
- alita_sdk/configurations/confluence.py +76 -42
- alita_sdk/configurations/figma.py +76 -0
- alita_sdk/configurations/gitlab.py +2 -0
- alita_sdk/configurations/qtest.py +72 -1
- alita_sdk/configurations/report_portal.py +96 -0
- alita_sdk/configurations/sharepoint.py +148 -0
- alita_sdk/configurations/testio.py +83 -0
- alita_sdk/runtime/clients/artifact.py +2 -2
- alita_sdk/runtime/clients/client.py +64 -40
- alita_sdk/runtime/clients/sandbox_client.py +14 -0
- alita_sdk/runtime/langchain/assistant.py +48 -2
- alita_sdk/runtime/langchain/constants.py +3 -1
- alita_sdk/runtime/langchain/document_loaders/AlitaExcelLoader.py +103 -60
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLinesLoader.py +77 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +2 -1
- alita_sdk/runtime/langchain/document_loaders/constants.py +12 -7
- alita_sdk/runtime/langchain/langraph_agent.py +10 -10
- alita_sdk/runtime/langchain/utils.py +6 -1
- alita_sdk/runtime/toolkits/artifact.py +14 -5
- alita_sdk/runtime/toolkits/datasource.py +13 -6
- alita_sdk/runtime/toolkits/mcp.py +94 -219
- alita_sdk/runtime/toolkits/planning.py +13 -6
- alita_sdk/runtime/toolkits/tools.py +60 -25
- alita_sdk/runtime/toolkits/vectorstore.py +11 -5
- alita_sdk/runtime/tools/artifact.py +185 -23
- alita_sdk/runtime/tools/function.py +2 -1
- alita_sdk/runtime/tools/llm.py +155 -34
- alita_sdk/runtime/tools/mcp_remote_tool.py +25 -10
- alita_sdk/runtime/tools/mcp_server_tool.py +2 -4
- alita_sdk/runtime/tools/vectorstore_base.py +3 -3
- alita_sdk/runtime/utils/AlitaCallback.py +136 -21
- alita_sdk/runtime/utils/mcp_client.py +492 -0
- alita_sdk/runtime/utils/mcp_oauth.py +125 -8
- alita_sdk/runtime/utils/mcp_sse_client.py +35 -6
- alita_sdk/runtime/utils/mcp_tools_discovery.py +124 -0
- alita_sdk/runtime/utils/toolkit_utils.py +7 -13
- alita_sdk/runtime/utils/utils.py +2 -0
- alita_sdk/tools/__init__.py +15 -0
- alita_sdk/tools/ado/repos/__init__.py +10 -12
- alita_sdk/tools/ado/test_plan/__init__.py +23 -8
- alita_sdk/tools/ado/wiki/__init__.py +24 -8
- alita_sdk/tools/ado/wiki/ado_wrapper.py +21 -7
- alita_sdk/tools/ado/work_item/__init__.py +24 -8
- alita_sdk/tools/advanced_jira_mining/__init__.py +10 -8
- alita_sdk/tools/aws/delta_lake/__init__.py +12 -9
- alita_sdk/tools/aws/delta_lake/tool.py +5 -1
- alita_sdk/tools/azure_ai/search/__init__.py +9 -7
- alita_sdk/tools/base/tool.py +5 -1
- alita_sdk/tools/base_indexer_toolkit.py +26 -1
- alita_sdk/tools/bitbucket/__init__.py +14 -10
- alita_sdk/tools/bitbucket/api_wrapper.py +50 -2
- alita_sdk/tools/browser/__init__.py +5 -4
- alita_sdk/tools/carrier/__init__.py +5 -6
- alita_sdk/tools/chunkers/sematic/json_chunker.py +1 -0
- alita_sdk/tools/chunkers/sematic/markdown_chunker.py +2 -0
- alita_sdk/tools/chunkers/universal_chunker.py +1 -0
- alita_sdk/tools/cloud/aws/__init__.py +9 -7
- alita_sdk/tools/cloud/azure/__init__.py +9 -7
- alita_sdk/tools/cloud/gcp/__init__.py +9 -7
- alita_sdk/tools/cloud/k8s/__init__.py +9 -7
- alita_sdk/tools/code/linter/__init__.py +9 -8
- alita_sdk/tools/code/loaders/codesearcher.py +3 -2
- alita_sdk/tools/code/sonar/__init__.py +9 -7
- alita_sdk/tools/confluence/__init__.py +15 -10
- alita_sdk/tools/confluence/api_wrapper.py +63 -14
- alita_sdk/tools/custom_open_api/__init__.py +11 -5
- alita_sdk/tools/elastic/__init__.py +10 -8
- alita_sdk/tools/elitea_base.py +387 -9
- alita_sdk/tools/figma/__init__.py +8 -7
- alita_sdk/tools/github/__init__.py +12 -14
- alita_sdk/tools/github/github_client.py +68 -2
- alita_sdk/tools/github/tool.py +5 -1
- alita_sdk/tools/gitlab/__init__.py +14 -11
- alita_sdk/tools/gitlab/api_wrapper.py +81 -1
- alita_sdk/tools/gitlab_org/__init__.py +9 -8
- alita_sdk/tools/google/bigquery/__init__.py +12 -12
- alita_sdk/tools/google/bigquery/tool.py +5 -1
- alita_sdk/tools/google_places/__init__.py +9 -8
- alita_sdk/tools/jira/__init__.py +15 -10
- alita_sdk/tools/keycloak/__init__.py +10 -8
- alita_sdk/tools/localgit/__init__.py +8 -3
- alita_sdk/tools/localgit/local_git.py +62 -54
- alita_sdk/tools/localgit/tool.py +5 -1
- alita_sdk/tools/memory/__init__.py +11 -3
- alita_sdk/tools/ocr/__init__.py +10 -8
- alita_sdk/tools/openapi/__init__.py +6 -2
- alita_sdk/tools/pandas/__init__.py +9 -7
- alita_sdk/tools/postman/__init__.py +10 -11
- alita_sdk/tools/pptx/__init__.py +9 -9
- alita_sdk/tools/qtest/__init__.py +9 -8
- alita_sdk/tools/rally/__init__.py +9 -8
- alita_sdk/tools/report_portal/__init__.py +11 -9
- alita_sdk/tools/salesforce/__init__.py +9 -9
- alita_sdk/tools/servicenow/__init__.py +10 -8
- alita_sdk/tools/sharepoint/__init__.py +9 -8
- alita_sdk/tools/sharepoint/api_wrapper.py +2 -2
- alita_sdk/tools/slack/__init__.py +8 -7
- alita_sdk/tools/sql/__init__.py +9 -8
- alita_sdk/tools/testio/__init__.py +9 -8
- alita_sdk/tools/testrail/__init__.py +10 -8
- alita_sdk/tools/utils/__init__.py +9 -4
- alita_sdk/tools/utils/text_operations.py +254 -0
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +16 -18
- alita_sdk/tools/xray/__init__.py +10 -8
- alita_sdk/tools/yagmail/__init__.py +8 -3
- alita_sdk/tools/zephyr/__init__.py +8 -7
- alita_sdk/tools/zephyr_enterprise/__init__.py +10 -8
- alita_sdk/tools/zephyr_essential/__init__.py +9 -8
- alita_sdk/tools/zephyr_scale/__init__.py +9 -8
- alita_sdk/tools/zephyr_squad/__init__.py +9 -8
- {alita_sdk-0.3.486.dist-info → alita_sdk-0.3.515.dist-info}/METADATA +1 -1
- {alita_sdk-0.3.486.dist-info → alita_sdk-0.3.515.dist-info}/RECORD +124 -119
- {alita_sdk-0.3.486.dist-info → alita_sdk-0.3.515.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.486.dist-info → alita_sdk-0.3.515.dist-info}/entry_points.txt +0 -0
- {alita_sdk-0.3.486.dist-info → alita_sdk-0.3.515.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.486.dist-info → alita_sdk-0.3.515.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Toolkit configuration and instantiation utilities for inventory ingestion.
|
|
3
|
+
|
|
4
|
+
This module provides functions to load toolkit configurations, instantiate source
|
|
5
|
+
toolkits from various sources (filesystem, GitHub, ADO), and get LLM instances
|
|
6
|
+
for entity extraction.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import re
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Dict, Optional
|
|
14
|
+
|
|
15
|
+
from alita_sdk.alita_client import AlitaClient
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def load_toolkit_config(toolkit_path: str) -> Dict[str, Any]:
|
|
19
|
+
"""
|
|
20
|
+
Load and parse a toolkit config JSON file.
|
|
21
|
+
|
|
22
|
+
Supports environment variable substitution for values like ${GITHUB_PAT}.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
toolkit_path: Path to the toolkit configuration JSON file
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
Dictionary containing the parsed and environment-resolved configuration
|
|
29
|
+
|
|
30
|
+
Example:
|
|
31
|
+
>>> config = load_toolkit_config("configs/github_toolkit.json")
|
|
32
|
+
>>> config['type']
|
|
33
|
+
'github'
|
|
34
|
+
"""
|
|
35
|
+
with open(toolkit_path, 'r') as f:
|
|
36
|
+
config = json.load(f)
|
|
37
|
+
|
|
38
|
+
# Recursively resolve environment variables
|
|
39
|
+
def resolve_env_vars(obj):
|
|
40
|
+
if isinstance(obj, str):
|
|
41
|
+
# Match ${VAR_NAME} pattern
|
|
42
|
+
pattern = r'\$\{([^}]+)\}'
|
|
43
|
+
matches = re.findall(pattern, obj)
|
|
44
|
+
for var_name in matches:
|
|
45
|
+
env_value = os.environ.get(var_name, '')
|
|
46
|
+
obj = obj.replace(f'${{{var_name}}}', env_value)
|
|
47
|
+
return obj
|
|
48
|
+
elif isinstance(obj, dict):
|
|
49
|
+
return {k: resolve_env_vars(v) for k, v in obj.items()}
|
|
50
|
+
elif isinstance(obj, list):
|
|
51
|
+
return [resolve_env_vars(item) for item in obj]
|
|
52
|
+
return obj
|
|
53
|
+
|
|
54
|
+
return resolve_env_vars(config)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def get_llm_for_config(
|
|
58
|
+
client: AlitaClient,
|
|
59
|
+
model: Optional[str] = None,
|
|
60
|
+
temperature: float = 0.0
|
|
61
|
+
):
|
|
62
|
+
"""
|
|
63
|
+
Get LLM instance from Alita client for entity extraction.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
client: AlitaClient instance
|
|
67
|
+
model: Model name (defaults to 'gpt-4o-mini' if not specified)
|
|
68
|
+
temperature: Temperature for the model (default 0.0 for deterministic output)
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
LLM instance configured with the specified model and parameters
|
|
72
|
+
|
|
73
|
+
Example:
|
|
74
|
+
>>> client = AlitaClient(...)
|
|
75
|
+
>>> llm = get_llm_for_config(client, model='gpt-4o', temperature=0.0)
|
|
76
|
+
"""
|
|
77
|
+
model_name = model or 'gpt-4o-mini'
|
|
78
|
+
|
|
79
|
+
return client.get_llm(
|
|
80
|
+
model_name=model_name,
|
|
81
|
+
model_config={
|
|
82
|
+
'temperature': temperature,
|
|
83
|
+
'max_tokens': 4096
|
|
84
|
+
}
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def get_source_toolkit(toolkit_config: Dict[str, Any]):
|
|
89
|
+
"""
|
|
90
|
+
Instantiate a source toolkit from configuration.
|
|
91
|
+
|
|
92
|
+
Supports filesystem, GitHub, and Azure DevOps (ADO) toolkit types. For SDK-based
|
|
93
|
+
toolkits (GitHub, ADO), automatically handles configuration mapping and toolkit
|
|
94
|
+
instantiation from the registry.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
toolkit_config: Toolkit configuration dictionary with 'type' key
|
|
98
|
+
and type-specific parameters
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
Instantiated toolkit object ready for ingestion
|
|
102
|
+
|
|
103
|
+
Raises:
|
|
104
|
+
ValueError: If toolkit type is unsupported or configuration is invalid
|
|
105
|
+
|
|
106
|
+
Example:
|
|
107
|
+
>>> # Filesystem toolkit
|
|
108
|
+
>>> config = {'type': 'filesystem', 'base_path': '/path/to/code'}
|
|
109
|
+
>>> toolkit = get_source_toolkit(config)
|
|
110
|
+
|
|
111
|
+
>>> # GitHub toolkit
|
|
112
|
+
>>> config = {
|
|
113
|
+
... 'type': 'github',
|
|
114
|
+
... 'github_token': 'ghp_...',
|
|
115
|
+
... 'github_repository': 'owner/repo',
|
|
116
|
+
... 'github_branch': 'main'
|
|
117
|
+
... }
|
|
118
|
+
>>> toolkit = get_source_toolkit(config)
|
|
119
|
+
"""
|
|
120
|
+
from alita_sdk.community.inventory.filesystem_toolkit import FilesystemToolkit
|
|
121
|
+
from alita_sdk.community.toolkits import AVAILABLE_TOOLS
|
|
122
|
+
|
|
123
|
+
toolkit_type = toolkit_config.get('type')
|
|
124
|
+
|
|
125
|
+
if toolkit_type == 'filesystem':
|
|
126
|
+
base_path = toolkit_config.get('base_path')
|
|
127
|
+
if not base_path:
|
|
128
|
+
raise ValueError("Filesystem toolkit requires 'base_path' configuration")
|
|
129
|
+
return FilesystemToolkit(base_path=Path(base_path))
|
|
130
|
+
|
|
131
|
+
# Handle SDK toolkits (GitHub, ADO)
|
|
132
|
+
if toolkit_type not in AVAILABLE_TOOLS:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
f"Unknown toolkit type: {toolkit_type}. "
|
|
135
|
+
f"Available types: filesystem, {', '.join(AVAILABLE_TOOLS.keys())}"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
toolkit_class = AVAILABLE_TOOLS[toolkit_type]
|
|
139
|
+
|
|
140
|
+
# Flatten nested config if needed
|
|
141
|
+
config_for_init = {}
|
|
142
|
+
for key, value in toolkit_config.items():
|
|
143
|
+
if key == 'type':
|
|
144
|
+
continue
|
|
145
|
+
if isinstance(value, dict):
|
|
146
|
+
# Flatten nested dicts
|
|
147
|
+
config_for_init.update(value)
|
|
148
|
+
else:
|
|
149
|
+
config_for_init[key] = value
|
|
150
|
+
|
|
151
|
+
# Map field names for specific toolkit types
|
|
152
|
+
if toolkit_type == 'github':
|
|
153
|
+
field_mapping = {
|
|
154
|
+
'github_token': 'token',
|
|
155
|
+
'github_repository': 'repository',
|
|
156
|
+
'github_branch': 'branch'
|
|
157
|
+
}
|
|
158
|
+
config_for_init = {
|
|
159
|
+
field_mapping.get(k, k): v
|
|
160
|
+
for k, v in config_for_init.items()
|
|
161
|
+
}
|
|
162
|
+
elif toolkit_type == 'ado':
|
|
163
|
+
field_mapping = {
|
|
164
|
+
'ado_token': 'token',
|
|
165
|
+
'ado_organization': 'organization',
|
|
166
|
+
'ado_project': 'project',
|
|
167
|
+
'ado_repository': 'repository',
|
|
168
|
+
'ado_branch': 'branch'
|
|
169
|
+
}
|
|
170
|
+
config_for_init = {
|
|
171
|
+
field_mapping.get(k, k): v
|
|
172
|
+
for k, v in config_for_init.items()
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
# Instantiate toolkit
|
|
176
|
+
return toolkit_class(**config_for_init)
|
alita_sdk/configurations/ado.py
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
import re
|
|
1
2
|
from typing import Optional
|
|
3
|
+
from urllib.parse import quote
|
|
2
4
|
|
|
5
|
+
import requests
|
|
3
6
|
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
|
4
7
|
|
|
5
8
|
|
|
@@ -19,6 +22,147 @@ class AdoConfiguration(BaseModel):
|
|
|
19
22
|
project: str = Field(description="ADO project")
|
|
20
23
|
token: Optional[SecretStr] = Field(description="ADO Token")
|
|
21
24
|
|
|
25
|
+
@staticmethod
|
|
26
|
+
def check_connection(settings: dict) -> str | None:
|
|
27
|
+
"""
|
|
28
|
+
Test the connection to Azure DevOps API.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
settings: Dictionary containing 'organization_url', 'project', and optionally 'token'
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
None if connection is successful, error message string otherwise
|
|
35
|
+
"""
|
|
36
|
+
organization_url = settings.get("organization_url")
|
|
37
|
+
if organization_url is None or organization_url == "":
|
|
38
|
+
if organization_url == "":
|
|
39
|
+
return "Organization URL cannot be empty"
|
|
40
|
+
return "Organization URL is required"
|
|
41
|
+
|
|
42
|
+
# Validate organization URL format
|
|
43
|
+
if not isinstance(organization_url, str):
|
|
44
|
+
return "Organization URL must be a string"
|
|
45
|
+
|
|
46
|
+
organization_url = organization_url.strip()
|
|
47
|
+
if not organization_url:
|
|
48
|
+
return "Organization URL cannot be empty"
|
|
49
|
+
|
|
50
|
+
if not organization_url.startswith(("http://", "https://")):
|
|
51
|
+
return "Organization URL must start with http:// or https://"
|
|
52
|
+
|
|
53
|
+
# Remove trailing slash for consistency
|
|
54
|
+
organization_url = organization_url.rstrip("/")
|
|
55
|
+
|
|
56
|
+
project = settings.get("project")
|
|
57
|
+
if project is None or project == "":
|
|
58
|
+
if project == "":
|
|
59
|
+
return "Project cannot be empty"
|
|
60
|
+
return "Project is required"
|
|
61
|
+
|
|
62
|
+
# Validate project format
|
|
63
|
+
if not isinstance(project, str):
|
|
64
|
+
return "Project must be a string"
|
|
65
|
+
|
|
66
|
+
project = project.strip()
|
|
67
|
+
if not project:
|
|
68
|
+
return "Project cannot be empty"
|
|
69
|
+
|
|
70
|
+
token = settings.get("token")
|
|
71
|
+
|
|
72
|
+
# Extract secret value if it's a SecretStr
|
|
73
|
+
if token is not None and hasattr(token, "get_secret_value"):
|
|
74
|
+
token = token.get_secret_value()
|
|
75
|
+
|
|
76
|
+
# Validate token if provided
|
|
77
|
+
if token is not None and (not token or not token.strip()):
|
|
78
|
+
return "Token cannot be empty if provided"
|
|
79
|
+
|
|
80
|
+
# NOTE on verification strategy:
|
|
81
|
+
# - Project endpoints can work anonymously for public projects.
|
|
82
|
+
# That makes them a weak signal for detecting a bad/expired token.
|
|
83
|
+
# - If a token is provided, first validate it against a profile endpoint
|
|
84
|
+
# that requires authentication, then check project access.
|
|
85
|
+
|
|
86
|
+
# Strictly require a canonical organization URL so we can build reliable API URLs.
|
|
87
|
+
# Supported formats:
|
|
88
|
+
# - https://dev.azure.com/<org>
|
|
89
|
+
# - https://<org>.visualstudio.com
|
|
90
|
+
org_name: str | None = None
|
|
91
|
+
org_url_kind: str | None = None # 'dev.azure.com' | '*.visualstudio.com'
|
|
92
|
+
m = re.match(r"^https?://dev\.azure\.com/(?P<org>[^/]+)$", organization_url, flags=re.IGNORECASE)
|
|
93
|
+
if m:
|
|
94
|
+
org_name = m.group('org')
|
|
95
|
+
org_url_kind = 'dev.azure.com'
|
|
96
|
+
else:
|
|
97
|
+
m = re.match(r"^https?://(?P<org>[^/.]+)\.visualstudio\.com$", organization_url, flags=re.IGNORECASE)
|
|
98
|
+
if m:
|
|
99
|
+
org_name = m.group('org')
|
|
100
|
+
org_url_kind = '*.visualstudio.com'
|
|
101
|
+
|
|
102
|
+
if org_name is None:
|
|
103
|
+
return (
|
|
104
|
+
"Organization URL format is invalid. Use 'https://dev.azure.com/<org>' "
|
|
105
|
+
"(recommended) or 'https://<org>.visualstudio.com'."
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
project_encoded = quote(project, safe="")
|
|
109
|
+
project_url = f"{organization_url}/_apis/projects/{project_encoded}?api-version=7.0"
|
|
110
|
+
# Auth-required endpoint to validate PAT (works regardless of project visibility)
|
|
111
|
+
if org_url_kind == 'dev.azure.com':
|
|
112
|
+
profile_url = f"https://vssps.dev.azure.com/{org_name}/_apis/profile/profiles/me?api-version=7.1-preview.3"
|
|
113
|
+
else:
|
|
114
|
+
# For legacy org URLs, use the matching vssps host
|
|
115
|
+
profile_url = f"https://{org_name}.vssps.visualstudio.com/_apis/profile/profiles/me?api-version=7.1-preview.3"
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
headers = {}
|
|
119
|
+
if token:
|
|
120
|
+
# Use Basic Auth with PAT token (username can be empty)
|
|
121
|
+
from requests.auth import HTTPBasicAuth
|
|
122
|
+
auth = HTTPBasicAuth("", token)
|
|
123
|
+
|
|
124
|
+
# 1) Validate token first (strong signal)
|
|
125
|
+
profile_resp = requests.get(profile_url, auth=auth, timeout=10)
|
|
126
|
+
if profile_resp.status_code == 200:
|
|
127
|
+
pass
|
|
128
|
+
elif profile_resp.status_code == 401:
|
|
129
|
+
return "Invalid or expired token (PAT). Please generate a new token and try again."
|
|
130
|
+
elif profile_resp.status_code == 403:
|
|
131
|
+
return "Token is valid but lacks permission to access profile. Check PAT scopes/permissions."
|
|
132
|
+
elif profile_resp.status_code == 404:
|
|
133
|
+
return "Organization not found. Verify the Organization URL."
|
|
134
|
+
else:
|
|
135
|
+
return f"Token validation failed (HTTP {profile_resp.status_code})."
|
|
136
|
+
|
|
137
|
+
# 2) Validate project access
|
|
138
|
+
response = requests.get(project_url, auth=auth, timeout=10)
|
|
139
|
+
else:
|
|
140
|
+
# Try without authentication (works for public projects)
|
|
141
|
+
response = requests.get(project_url, headers=headers, timeout=10)
|
|
142
|
+
|
|
143
|
+
if response.status_code == 200:
|
|
144
|
+
return None # Connection successful
|
|
145
|
+
elif response.status_code == 401:
|
|
146
|
+
if token:
|
|
147
|
+
return "Not authorized. Token may be invalid for this organization or expired."
|
|
148
|
+
else:
|
|
149
|
+
return "Authentication required - project may be private"
|
|
150
|
+
elif response.status_code == 403:
|
|
151
|
+
return "Access forbidden - token may lack required permissions for this project"
|
|
152
|
+
elif response.status_code == 404:
|
|
153
|
+
return f"Project '{project}' not found or not accessible. Check project name and organization URL."
|
|
154
|
+
else:
|
|
155
|
+
return f"Connection failed (HTTP {response.status_code})."
|
|
156
|
+
|
|
157
|
+
except requests.exceptions.Timeout:
|
|
158
|
+
return "Connection timeout - Azure DevOps did not respond within 10 seconds"
|
|
159
|
+
except requests.exceptions.ConnectionError:
|
|
160
|
+
return "Connection error - unable to reach Azure DevOps. Check the Organization URL and your network."
|
|
161
|
+
except requests.exceptions.RequestException as e:
|
|
162
|
+
return f"Request failed: {str(e)}"
|
|
163
|
+
except Exception:
|
|
164
|
+
return "Unexpected error during Azure DevOps connection check"
|
|
165
|
+
|
|
22
166
|
|
|
23
167
|
class AdoReposConfiguration(BaseModel):
|
|
24
168
|
model_config = ConfigDict(
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from typing import Optional
|
|
2
|
+
from urllib.parse import urlparse, urlunparse
|
|
2
3
|
|
|
4
|
+
from atlassian import Confluence
|
|
3
5
|
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
|
4
6
|
|
|
5
7
|
|
|
@@ -55,25 +57,53 @@ class ConfluenceConfiguration(BaseModel):
|
|
|
55
57
|
from requests.auth import HTTPBasicAuth
|
|
56
58
|
|
|
57
59
|
# Validate base_url
|
|
58
|
-
|
|
60
|
+
base_url_input = settings.get("base_url", "")
|
|
61
|
+
base_url = base_url_input.strip() if isinstance(base_url_input, str) else ""
|
|
59
62
|
if not base_url:
|
|
60
63
|
return "Confluence URL is required"
|
|
61
64
|
|
|
62
|
-
# Normalize URL - remove trailing slashes
|
|
63
|
-
base_url = base_url.rstrip("/")
|
|
64
|
-
|
|
65
65
|
# Basic URL validation
|
|
66
66
|
if not base_url.startswith(("http://", "https://")):
|
|
67
67
|
return "Confluence URL must start with http:// or https://"
|
|
68
|
+
|
|
69
|
+
# Normalize URL - remove trailing slashes
|
|
70
|
+
base_url = base_url.rstrip("/")
|
|
71
|
+
|
|
72
|
+
# Build candidate base URLs.
|
|
73
|
+
# Confluence Cloud REST API is typically under /wiki. Users often paste
|
|
74
|
+
# https://<site>.atlassian.net and shouldn't be forced to know about /wiki.
|
|
75
|
+
parsed = urlparse(base_url)
|
|
76
|
+
host = (parsed.hostname or "").lower()
|
|
77
|
+
path = parsed.path or ""
|
|
78
|
+
|
|
79
|
+
def with_wiki_path(url: str) -> str:
|
|
80
|
+
p = urlparse(url)
|
|
81
|
+
# Keep existing path if it already starts with /wiki
|
|
82
|
+
if (p.path or "").startswith("/wiki"):
|
|
83
|
+
return url
|
|
84
|
+
# Append /wiki, preserving any existing path (rare but safe)
|
|
85
|
+
new_path = (p.path or "") + "/wiki"
|
|
86
|
+
return urlunparse(p._replace(path=new_path.rstrip("/")))
|
|
87
|
+
|
|
88
|
+
candidate_base_urls: list[str] = []
|
|
89
|
+
if host.endswith(".atlassian.net"):
|
|
90
|
+
# For Atlassian Cloud, prefer the /wiki variant first
|
|
91
|
+
candidate_base_urls.append(with_wiki_path(base_url))
|
|
92
|
+
candidate_base_urls.append(base_url)
|
|
93
|
+
# De-duplicate while preserving order
|
|
94
|
+
candidate_base_urls = list(dict.fromkeys(candidate_base_urls))
|
|
68
95
|
|
|
69
96
|
# Check authentication credentials
|
|
70
97
|
username = settings.get("username")
|
|
71
98
|
api_key = settings.get("api_key")
|
|
72
99
|
token = settings.get("token")
|
|
73
100
|
|
|
101
|
+
api_key_value = api_key.get_secret_value() if hasattr(api_key, 'get_secret_value') else api_key
|
|
102
|
+
token_value = token.get_secret_value() if hasattr(token, 'get_secret_value') else token
|
|
103
|
+
|
|
74
104
|
# Validate authentication - at least one method must be provided
|
|
75
|
-
has_basic_auth = bool(username and
|
|
76
|
-
has_token = bool(
|
|
105
|
+
has_basic_auth = bool(username and api_key_value and str(api_key_value).strip())
|
|
106
|
+
has_token = bool(token_value and str(token_value).strip())
|
|
77
107
|
|
|
78
108
|
# Determine authentication method
|
|
79
109
|
auth_headers = {}
|
|
@@ -81,52 +111,56 @@ class ConfluenceConfiguration(BaseModel):
|
|
|
81
111
|
|
|
82
112
|
if has_token:
|
|
83
113
|
# Bearer token authentication
|
|
84
|
-
token_value = token.get_secret_value() if hasattr(token, 'get_secret_value') else token
|
|
85
114
|
auth_headers["Authorization"] = f"Bearer {token_value}"
|
|
86
115
|
elif has_basic_auth:
|
|
87
116
|
# Basic authentication
|
|
88
|
-
api_key_value = api_key.get_secret_value() if hasattr(api_key, 'get_secret_value') else api_key
|
|
89
117
|
auth = HTTPBasicAuth(username, api_key_value)
|
|
90
118
|
else:
|
|
91
119
|
return "Authentication required: provide either token or both username and api_key"
|
|
92
120
|
|
|
93
|
-
# Test connection using /rest/api/user/current endpoint
|
|
94
|
-
# This endpoint returns current user info and validates authentication
|
|
95
|
-
test_url = f"{base_url}/rest/api/user/current"
|
|
96
|
-
|
|
97
121
|
try:
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
if
|
|
112
|
-
return
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
+
# Test connection using /rest/api/user/current endpoint
|
|
123
|
+
# This endpoint returns current user info and validates authentication
|
|
124
|
+
last_status = None
|
|
125
|
+
for candidate_base in candidate_base_urls:
|
|
126
|
+
test_url = f"{candidate_base}/rest/api/user/current"
|
|
127
|
+
response = requests.get(
|
|
128
|
+
test_url,
|
|
129
|
+
auth=auth,
|
|
130
|
+
headers=auth_headers,
|
|
131
|
+
timeout=10
|
|
132
|
+
)
|
|
133
|
+
last_status = response.status_code
|
|
134
|
+
|
|
135
|
+
if response.status_code == 200:
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
# If we get 404 on the first candidate, try the next one
|
|
139
|
+
if response.status_code == 404:
|
|
140
|
+
continue
|
|
141
|
+
|
|
142
|
+
if response.status_code == 401:
|
|
143
|
+
return "Invalid credentials (401) - check token or username/api_key"
|
|
144
|
+
if response.status_code == 403:
|
|
145
|
+
return "Access forbidden (403) - credentials lack Confluence permissions"
|
|
146
|
+
if response.status_code == 429:
|
|
147
|
+
return "Rate limited (429) - please try again later"
|
|
148
|
+
if 500 <= response.status_code <= 599:
|
|
149
|
+
return f"Confluence service error (HTTP {response.status_code})"
|
|
150
|
+
return f"Confluence request failed (HTTP {response.status_code})"
|
|
151
|
+
|
|
152
|
+
# All candidates returned 404
|
|
153
|
+
return "Confluence API endpoint not found (404) - verify the Confluence URL"
|
|
122
154
|
|
|
123
155
|
except requests.exceptions.SSLError as e:
|
|
124
|
-
|
|
156
|
+
if 'Hostname mismatch' in str(e):
|
|
157
|
+
return "SSL error - hostname mismatch. Verify the Confluence URL"
|
|
158
|
+
return "SSL error - certificate verification failed"
|
|
125
159
|
except requests.exceptions.ConnectionError:
|
|
126
|
-
return
|
|
160
|
+
return "Connection error - unable to reach Confluence. Check URL and network."
|
|
127
161
|
except requests.exceptions.Timeout:
|
|
128
|
-
return
|
|
162
|
+
return "Connection timeout - Confluence did not respond within 10 seconds. Check URL and network."
|
|
129
163
|
except requests.exceptions.RequestException as e:
|
|
130
|
-
return f"
|
|
131
|
-
except Exception
|
|
132
|
-
return
|
|
164
|
+
return f"Request failed: {str(e)}"
|
|
165
|
+
except Exception:
|
|
166
|
+
return "Unexpected error during Confluence connection check"
|
|
@@ -1,8 +1,30 @@
|
|
|
1
|
+
from json import JSONDecodeError
|
|
1
2
|
from typing import Optional
|
|
2
3
|
|
|
4
|
+
import requests
|
|
3
5
|
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
|
4
6
|
|
|
5
7
|
|
|
8
|
+
def _parse_error_response(response: requests.Response) -> Optional[str]:
|
|
9
|
+
"""
|
|
10
|
+
Parse error response from Figma API to extract detailed error message.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
response: Response object from requests
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
Detailed error message if found, None otherwise
|
|
17
|
+
"""
|
|
18
|
+
try:
|
|
19
|
+
json_response = response.json()
|
|
20
|
+
error = json_response.get("err") or json_response.get("error")
|
|
21
|
+
if error and 'Invalid token' in str(error):
|
|
22
|
+
return "Invalid token. Please verify the Figma token and try again."
|
|
23
|
+
except (JSONDecodeError, KeyError, AttributeError):
|
|
24
|
+
pass
|
|
25
|
+
return None
|
|
26
|
+
|
|
27
|
+
|
|
6
28
|
class FigmaConfiguration(BaseModel):
|
|
7
29
|
model_config = ConfigDict(
|
|
8
30
|
json_schema_extra={
|
|
@@ -28,3 +50,57 @@ class FigmaConfiguration(BaseModel):
|
|
|
28
50
|
}
|
|
29
51
|
)
|
|
30
52
|
token: Optional[SecretStr] = Field(description="Figma Token", json_schema_extra={"secret": True}, default=None)
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def check_connection(settings: dict) -> str | None:
|
|
56
|
+
"""
|
|
57
|
+
Test the connection to Figma API.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
settings: Dictionary containing 'token' (required)
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
None if connection is successful, error message string otherwise
|
|
64
|
+
"""
|
|
65
|
+
token = settings.get("token")
|
|
66
|
+
if token is None:
|
|
67
|
+
return "Token is required"
|
|
68
|
+
|
|
69
|
+
# Extract secret value if it's a SecretStr
|
|
70
|
+
if hasattr(token, "get_secret_value"):
|
|
71
|
+
token = token.get_secret_value()
|
|
72
|
+
|
|
73
|
+
# Validate token is not empty
|
|
74
|
+
if not token or not token.strip():
|
|
75
|
+
return "Token cannot be empty"
|
|
76
|
+
|
|
77
|
+
# Figma API endpoint
|
|
78
|
+
base_url = "https://api.figma.com"
|
|
79
|
+
endpoint = f"{base_url}/v1/me"
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
response = requests.get(
|
|
83
|
+
endpoint,
|
|
84
|
+
headers={"X-Figma-Token": token},
|
|
85
|
+
timeout=10,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
if response.status_code == 200:
|
|
89
|
+
return None # Connection successful
|
|
90
|
+
elif response.status_code == 401:
|
|
91
|
+
detailed_error = _parse_error_response(response)
|
|
92
|
+
return detailed_error if detailed_error else "Invalid token"
|
|
93
|
+
elif response.status_code == 403:
|
|
94
|
+
detailed_error = _parse_error_response(response)
|
|
95
|
+
return detailed_error if detailed_error else "Access forbidden - token may lack required permissions"
|
|
96
|
+
else:
|
|
97
|
+
return f"Connection failed with status {response.status_code}"
|
|
98
|
+
|
|
99
|
+
except requests.exceptions.Timeout:
|
|
100
|
+
return "Connection timeout - Figma API is not responding"
|
|
101
|
+
except requests.exceptions.ConnectionError:
|
|
102
|
+
return "Connection error - unable to reach Figma API"
|
|
103
|
+
except requests.exceptions.RequestException as e:
|
|
104
|
+
return f"Request failed: {str(e)}"
|
|
105
|
+
except Exception as e:
|
|
106
|
+
return f"Unexpected error: {str(e)}"
|
|
@@ -99,6 +99,8 @@ class GitlabConfiguration(BaseModel):
|
|
|
99
99
|
return f"GitLab API returned status code {response.status_code}"
|
|
100
100
|
|
|
101
101
|
except requests.exceptions.SSLError as e:
|
|
102
|
+
if 'Hostname mismatch' in str(e):
|
|
103
|
+
return "GitLab API endpoint not found: verify the GitLab URL"
|
|
102
104
|
return f"SSL certificate verification failed: {str(e)}"
|
|
103
105
|
except requests.exceptions.ConnectionError:
|
|
104
106
|
return f"Cannot connect to GitLab at {url}: connection refused"
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
import requests
|
|
1
4
|
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
|
2
5
|
|
|
3
6
|
|
|
@@ -14,6 +17,74 @@ class QtestConfiguration(BaseModel):
|
|
|
14
17
|
}
|
|
15
18
|
}
|
|
16
19
|
)
|
|
17
|
-
base_url: str = Field(description="QTest base
|
|
20
|
+
base_url: str = Field(description="QTest base URL")
|
|
18
21
|
qtest_api_token: SecretStr = Field(description="QTest API token")
|
|
19
22
|
|
|
23
|
+
@staticmethod
|
|
24
|
+
def check_connection(settings: dict) -> str | None:
|
|
25
|
+
"""Check connectivity and credentials for qTest.
|
|
26
|
+
|
|
27
|
+
Strategy:
|
|
28
|
+
- Validate token against an auth-required endpoint (so an incorrect token is detected).
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
None if successful, otherwise a short actionable error message.
|
|
32
|
+
"""
|
|
33
|
+
base_url_input = settings.get("base_url")
|
|
34
|
+
base_url = base_url_input.strip() if isinstance(base_url_input, str) else ""
|
|
35
|
+
if not base_url:
|
|
36
|
+
return "QTest base URL is required"
|
|
37
|
+
|
|
38
|
+
if not base_url.startswith(("http://", "https://")):
|
|
39
|
+
return "QTest base URL must start with http:// or https://"
|
|
40
|
+
|
|
41
|
+
base_url = base_url.rstrip("/")
|
|
42
|
+
# If user pasted /api/v3 (or similar), strip it so we can build canonical API URLs.
|
|
43
|
+
base_url = re.sub(r"/api/v\d+/?$", "", base_url, flags=re.IGNORECASE)
|
|
44
|
+
|
|
45
|
+
token = settings.get("qtest_api_token")
|
|
46
|
+
if token is None:
|
|
47
|
+
return "QTest API token is required"
|
|
48
|
+
token_value = token.get_secret_value() if hasattr(token, "get_secret_value") else str(token)
|
|
49
|
+
if not token_value or not token_value.strip():
|
|
50
|
+
return "QTest API token cannot be empty"
|
|
51
|
+
|
|
52
|
+
headers = {
|
|
53
|
+
"Authorization": f"Bearer {token_value}",
|
|
54
|
+
"Content-Type": "application/json",
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
# Auth-required endpoint to validate the token.
|
|
58
|
+
# /projects works on v3 and requires auth in typical qTest deployments.
|
|
59
|
+
token_check_url = f"{base_url}/api/v3/projects?pageSize=1&page=1"
|
|
60
|
+
|
|
61
|
+
try:
|
|
62
|
+
resp = requests.get(token_check_url, headers=headers, timeout=10)
|
|
63
|
+
if resp.status_code == 200:
|
|
64
|
+
return None
|
|
65
|
+
elif resp.status_code == 401:
|
|
66
|
+
return "Invalid or expired QTest API token"
|
|
67
|
+
elif resp.status_code == 403:
|
|
68
|
+
return "Access forbidden - token lacks required permissions"
|
|
69
|
+
elif resp.status_code == 404:
|
|
70
|
+
return "QTest API not found (404) - verify base URL (do not include /api/v3)"
|
|
71
|
+
elif resp.status_code == 429:
|
|
72
|
+
return "Rate limited (429) - please try again later"
|
|
73
|
+
elif 500 <= resp.status_code <= 599:
|
|
74
|
+
return f"QTest service error (HTTP {resp.status_code})"
|
|
75
|
+
else:
|
|
76
|
+
return f"QTest connection failed (HTTP {resp.status_code})"
|
|
77
|
+
|
|
78
|
+
except requests.exceptions.Timeout:
|
|
79
|
+
return "Connection timeout - qTest did not respond within 10 seconds"
|
|
80
|
+
except requests.exceptions.ConnectionError:
|
|
81
|
+
return "Connection error - unable to reach qTest. Check base URL and network."
|
|
82
|
+
except requests.exceptions.SSLError:
|
|
83
|
+
return "SSL error - certificate verification failed"
|
|
84
|
+
except requests.exceptions.RequestException as e:
|
|
85
|
+
return f"Request failed: {str(e)}"
|
|
86
|
+
except Exception:
|
|
87
|
+
return "Unexpected error during qTest connection check"
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
|