alita-sdk 0.3.227__py3-none-any.whl → 0.3.229__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alita_sdk/configurations/__init__.py +66 -0
- alita_sdk/configurations/ado.py +41 -0
- alita_sdk/configurations/azure_search.py +21 -0
- alita_sdk/configurations/bigquery.py +23 -0
- alita_sdk/configurations/bitbucket.py +31 -0
- alita_sdk/configurations/confluence.py +36 -0
- alita_sdk/configurations/delta_lake.py +24 -0
- alita_sdk/configurations/github.py +45 -0
- alita_sdk/configurations/gitlab.py +31 -0
- alita_sdk/configurations/jira.py +36 -0
- alita_sdk/configurations/pgvector.py +18 -0
- alita_sdk/configurations/postman.py +30 -0
- alita_sdk/configurations/qtest.py +20 -0
- alita_sdk/configurations/service_now.py +31 -0
- alita_sdk/configurations/slack.py +35 -0
- alita_sdk/configurations/testrail.py +20 -0
- alita_sdk/runtime/toolkits/configurations.py +4 -0
- alita_sdk/runtime/toolkits/tools.py +1 -1
- alita_sdk/tools/ado/__init__.py +7 -7
- alita_sdk/tools/ado/repos/__init__.py +22 -31
- alita_sdk/tools/ado/wiki/ado_wrapper.py +13 -7
- alita_sdk/tools/aws/delta_lake/__init__.py +9 -13
- alita_sdk/tools/azure_ai/search/__init__.py +9 -15
- alita_sdk/tools/bitbucket/__init__.py +8 -11
- alita_sdk/tools/confluence/__init__.py +20 -20
- alita_sdk/tools/confluence/api_wrapper.py +11 -4
- alita_sdk/tools/elitea_base.py +1 -1
- alita_sdk/tools/github/__init__.py +22 -45
- alita_sdk/tools/gitlab/__init__.py +8 -8
- alita_sdk/tools/google/bigquery/__init__.py +5 -26
- alita_sdk/tools/jira/__init__.py +18 -18
- alita_sdk/tools/postman/__init__.py +5 -5
- alita_sdk/tools/qtest/__init__.py +3 -2
- alita_sdk/tools/servicenow/__init__.py +7 -5
- alita_sdk/tools/slack/__init__.py +6 -7
- alita_sdk/tools/testrail/__init__.py +8 -11
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +6 -5
- {alita_sdk-0.3.227.dist-info → alita_sdk-0.3.229.dist-info}/METADATA +1 -1
- {alita_sdk-0.3.227.dist-info → alita_sdk-0.3.229.dist-info}/RECORD +42 -25
- {alita_sdk-0.3.227.dist-info → alita_sdk-0.3.229.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.227.dist-info → alita_sdk-0.3.229.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.227.dist-info → alita_sdk-0.3.229.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,66 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
logger = logging.getLogger(__name__)
|
4
|
+
|
5
|
+
AVAILABLE_CONFIGURATIONS = {}
|
6
|
+
AVAILABLE_CLASS_CONFIGURATIONS = {}
|
7
|
+
FAILED_IMPORTS = {}
|
8
|
+
|
9
|
+
|
10
|
+
def _safe_import_configuration(
|
11
|
+
configuration_name, module_path, configuration_class_name
|
12
|
+
):
|
13
|
+
"""Safely import a configuration module and register available functions/classes."""
|
14
|
+
try:
|
15
|
+
module = __import__(f'alita_sdk.configurations.{module_path}', fromlist=[''])
|
16
|
+
configuration_class = getattr(module, configuration_class_name)
|
17
|
+
AVAILABLE_CONFIGURATIONS[configuration_name] = configuration_class.model_json_schema()
|
18
|
+
AVAILABLE_CLASS_CONFIGURATIONS[configuration_name] = configuration_class
|
19
|
+
logger.debug(f"Successfully imported {configuration_name}")
|
20
|
+
except Exception as e:
|
21
|
+
FAILED_IMPORTS[configuration_name] = str(e)
|
22
|
+
logger.debug(f"Failed to import {configuration_name}: {e}")
|
23
|
+
|
24
|
+
# Safe imports for all tools
|
25
|
+
_safe_import_configuration('github', 'github', 'GithubConfiguration')
|
26
|
+
_safe_import_configuration('pgvector', 'pgvector', 'PgVectorConfiguration')
|
27
|
+
_safe_import_configuration('ado', 'ado', 'AdoConfiguration')
|
28
|
+
_safe_import_configuration('ado_repos', 'ado', 'AdoReposConfiguration')
|
29
|
+
_safe_import_configuration('gitlab', 'gitlab', 'GitlabConfiguration')
|
30
|
+
_safe_import_configuration('qtest', 'qtest', 'QtestConfiguration')
|
31
|
+
_safe_import_configuration('bitbucket', 'bitbucket', 'BitbucketConfiguration')
|
32
|
+
_safe_import_configuration('confluence', 'confluence', 'ConfluenceConfiguration')
|
33
|
+
_safe_import_configuration('jira', 'jira', 'JiraConfiguration')
|
34
|
+
_safe_import_configuration('postman', 'postman', 'PostmanConfiguration')
|
35
|
+
_safe_import_configuration('service_now', 'service_now', 'ServiceNowConfiguration')
|
36
|
+
_safe_import_configuration('testrail', 'testrail', 'TestRailConfiguration')
|
37
|
+
_safe_import_configuration('slack', 'slack', 'SlackConfiguration')
|
38
|
+
_safe_import_configuration('azure_search', 'azure_search', 'AzureSearchConfiguration')
|
39
|
+
_safe_import_configuration('delta_lake', 'delta_lake', 'DeltaLakeConfiguration')
|
40
|
+
_safe_import_configuration('bigquery', 'bigquery', 'BigQueryConfiguration')
|
41
|
+
|
42
|
+
# Log import summary
|
43
|
+
available_count = len(AVAILABLE_CONFIGURATIONS)
|
44
|
+
total_attempted = len(AVAILABLE_CONFIGURATIONS) + len(FAILED_IMPORTS)
|
45
|
+
logger.info(f"Configuration imports completed: {available_count}/{total_attempted} successful")
|
46
|
+
|
47
|
+
|
48
|
+
def get_configurations():
|
49
|
+
"""Return all available configuration schemas."""
|
50
|
+
return AVAILABLE_CONFIGURATIONS.copy()
|
51
|
+
|
52
|
+
|
53
|
+
def get_class_configurations():
|
54
|
+
"""Return all available configuration classes."""
|
55
|
+
return AVAILABLE_CLASS_CONFIGURATIONS.copy()
|
56
|
+
|
57
|
+
|
58
|
+
def get_available_configurations():
|
59
|
+
"""Return list of available configuration class names."""
|
60
|
+
return list(AVAILABLE_CONFIGURATIONS.keys())
|
61
|
+
|
62
|
+
|
63
|
+
__all__ = [
|
64
|
+
'get_configurations',
|
65
|
+
'get_available_configurations',
|
66
|
+
]
|
@@ -0,0 +1,41 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class AdoConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Ado",
|
11
|
+
"icon_url": None,
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "ado"
|
14
|
+
}
|
15
|
+
}
|
16
|
+
)
|
17
|
+
organization_url: Optional[str] = Field(description="Base API URL")
|
18
|
+
project: Optional[str] = Field(description="ADO project", default=None)
|
19
|
+
token: Optional[SecretStr] = Field(description="ADO Token")
|
20
|
+
|
21
|
+
|
22
|
+
class AdoReposConfiguration(BaseModel):
|
23
|
+
model_config = ConfigDict(
|
24
|
+
json_schema_extra={
|
25
|
+
"metadata": {
|
26
|
+
"label": "ADO repos",
|
27
|
+
"icon_url": "ado-repos-icon.svg",
|
28
|
+
"section": "credentials",
|
29
|
+
"type": "ado_repos"
|
30
|
+
}
|
31
|
+
}
|
32
|
+
)
|
33
|
+
repository_id: Optional[str] = Field(description="ADO repository ID", default=None)
|
34
|
+
|
35
|
+
ado_configuration: AdoConfiguration = Field(
|
36
|
+
default_factory=AdoConfiguration,
|
37
|
+
description="ADO configuration",
|
38
|
+
json_schema_extra={
|
39
|
+
'configuration_types': ['ado']
|
40
|
+
}
|
41
|
+
)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class AzureSearchConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Azure Search",
|
11
|
+
"icon_url": "azure-search.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "azure_search",
|
14
|
+
"categories": ["search"],
|
15
|
+
"extra_categories": ["azure", "cognitive search", "vector database", "knowledge base"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
endpoint: Optional[str] = Field(description="Azure Search endpoint")
|
20
|
+
api_base: Optional[str] = Field(description="Azure Search API base")
|
21
|
+
api_key: Optional[SecretStr] = Field(description="API key")
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class BigQueryConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Google BigQuery",
|
11
|
+
"icon_url": "google.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "bigquery",
|
14
|
+
"categories": ["database"],
|
15
|
+
"extra_categories": ["google", "gcp", "data warehouse", "analytics"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
api_key: Optional[SecretStr] = Field(description="GCP API key")
|
20
|
+
project: Optional[str] = Field(description="BigQuery project ID")
|
21
|
+
location: Optional[str] = Field(description="BigQuery location")
|
22
|
+
dataset: Optional[str] = Field(description="BigQuery dataset name")
|
23
|
+
table: Optional[str] = Field(description="BigQuery table name")
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class BitbucketConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Bitbucket",
|
11
|
+
"icon_url": "bitbucket-icon.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Username & Password",
|
18
|
+
"fields": ["username", "password"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "bitbucket",
|
25
|
+
"categories": ["code repositories"],
|
26
|
+
"extra_categories": ["bitbucket", "git", "repository", "code", "version control"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
username: Optional[str] = Field(description="Bitbucket Username", default=None)
|
31
|
+
password: Optional[SecretStr] = Field(description="Bitbucket Password/App Password", default=None)
|
@@ -0,0 +1,36 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class ConfluenceConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Confluence",
|
11
|
+
"icon_url": "confluence.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "API Key",
|
18
|
+
"fields": ["username", "api_key"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "Token",
|
22
|
+
"fields": ["token"]
|
23
|
+
}
|
24
|
+
]
|
25
|
+
},
|
26
|
+
},
|
27
|
+
"section": "credentials",
|
28
|
+
"type": "confluence",
|
29
|
+
"categories": ["documentation"],
|
30
|
+
"extra_categories": ["confluence", "wiki", "documentation", "knowledge base"],
|
31
|
+
}
|
32
|
+
}
|
33
|
+
)
|
34
|
+
username: Optional[str] = Field(description="Confluence Username", default=None)
|
35
|
+
api_key: Optional[SecretStr] = Field(description="Confluence API Key", default=None)
|
36
|
+
token: Optional[SecretStr] = Field(description="Confluence Token", default=None)
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class DeltaLakeConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "AWS Delta Lake",
|
11
|
+
"icon_url": "delta-lake.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "delta_lake",
|
14
|
+
"categories": ["database"],
|
15
|
+
"extra_categories": ["aws", "data lake", "analytics", "storage"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
aws_access_key_id: Optional[SecretStr] = Field(description="AWS access key ID")
|
20
|
+
aws_secret_access_key: Optional[SecretStr] = Field(description="AWS secret access key")
|
21
|
+
aws_session_token: Optional[SecretStr] = Field(description="AWS session token (optional)")
|
22
|
+
aws_region: Optional[str] = Field(description="AWS region for Delta Lake storage")
|
23
|
+
s3_path: Optional[str] = Field(description="S3 path to Delta Lake data (e.g., s3://bucket/path)")
|
24
|
+
table_path: Optional[str] = Field(description="Delta Lake table path (if not using s3_path)")
|
@@ -0,0 +1,45 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class GithubConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "GitHub",
|
11
|
+
"icon_url": None,
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Token",
|
18
|
+
"fields": ["access_token"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "Password",
|
22
|
+
"fields": ["username", "password"]
|
23
|
+
},
|
24
|
+
{
|
25
|
+
"name": "App private key",
|
26
|
+
"fields": ["app_id", "app_private_key"]
|
27
|
+
}
|
28
|
+
]
|
29
|
+
},
|
30
|
+
},
|
31
|
+
"section": "credentials",
|
32
|
+
"type": "github",
|
33
|
+
"categories": ["code repositories"],
|
34
|
+
"extra_categories": ["github", "git", "repository", "code", "version control"],
|
35
|
+
}
|
36
|
+
}
|
37
|
+
)
|
38
|
+
base_url: Optional[str] = Field(description="Base API URL", default="https://api.github.com")
|
39
|
+
app_id: Optional[str] = Field(description="Github APP ID", default=None)
|
40
|
+
app_private_key: Optional[SecretStr] = Field(description="Github APP private key", default=None)
|
41
|
+
|
42
|
+
access_token: Optional[SecretStr] = Field(description="Github Access Token", default=None)
|
43
|
+
|
44
|
+
username: Optional[str] = Field(description="Github Username", default=None)
|
45
|
+
password: Optional[SecretStr] = Field(description="Github Password", default=None)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class GitlabConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "GitLab",
|
11
|
+
"icon_url": None,
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": True,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "GitLab private token",
|
18
|
+
"fields": ["private_token"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
}
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "gitlab",
|
25
|
+
"categories": ["code repositories"],
|
26
|
+
"extra_categories": ["gitlab", "git", "repository", "code", "version control"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
private_token: Optional[SecretStr] = Field(description="GitLab private token", default=None)
|
31
|
+
|
@@ -0,0 +1,36 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class JiraConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Jira",
|
11
|
+
"icon_url": "jira.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "API Key",
|
18
|
+
"fields": ["username", "api_key"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "Token",
|
22
|
+
"fields": ["token"]
|
23
|
+
}
|
24
|
+
]
|
25
|
+
},
|
26
|
+
},
|
27
|
+
"section": "credentials",
|
28
|
+
"type": "jira",
|
29
|
+
"categories": ["project management"],
|
30
|
+
"extra_categories": ["jira", "issue tracking", "project management", "agile"],
|
31
|
+
}
|
32
|
+
}
|
33
|
+
)
|
34
|
+
username: Optional[str] = Field(description="Jira Username", default=None)
|
35
|
+
api_key: Optional[SecretStr] = Field(description="Jira API Key", default=None)
|
36
|
+
token: Optional[SecretStr] = Field(description="Jira Token", default=None)
|
@@ -0,0 +1,18 @@
|
|
1
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
2
|
+
|
3
|
+
|
4
|
+
class PgVectorConfiguration(BaseModel):
|
5
|
+
model_config = ConfigDict(
|
6
|
+
json_schema_extra={
|
7
|
+
"metadata": {
|
8
|
+
"label": "PgVector",
|
9
|
+
"icon_url": None,
|
10
|
+
"section": "vectorstorage",
|
11
|
+
"type": "pgvector"
|
12
|
+
}
|
13
|
+
}
|
14
|
+
)
|
15
|
+
connection_string: SecretStr = Field(
|
16
|
+
description="Connection string for PgVector database",
|
17
|
+
default=None
|
18
|
+
)
|
@@ -0,0 +1,30 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class PostmanConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Postman",
|
11
|
+
"icon_url": "postman.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "API Key",
|
18
|
+
"fields": ["api_key"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "postman",
|
25
|
+
"categories": ["api testing"],
|
26
|
+
"extra_categories": ["postman", "api", "testing", "collection"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
api_key: Optional[SecretStr] = Field(description="Postman API Key", default=None)
|
@@ -0,0 +1,20 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class QtestConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "QTest",
|
11
|
+
"icon_url": "qtest.svg",
|
12
|
+
"categories": ["test management"],
|
13
|
+
"section": "credentials",
|
14
|
+
"type": "qtest",
|
15
|
+
"extra_categories": ["quality assurance", "test case management", "test planning"]
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
qtest_api_token: Optional[SecretStr] = Field(description="QTest API token", default=None)
|
20
|
+
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class ServiceNowConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "ServiceNow",
|
11
|
+
"icon_url": "servicenow.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Username & Password",
|
18
|
+
"fields": ["username", "password"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "service_now",
|
25
|
+
"categories": ["service management"],
|
26
|
+
"extra_categories": ["servicenow", "itsm", "service management", "incident"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
username: Optional[str] = Field(description="ServiceNow Username", default=None)
|
31
|
+
password: Optional[SecretStr] = Field(description="ServiceNow Password", default=None)
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class SlackConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Slack",
|
11
|
+
"icon_url": "slack.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Bot Token",
|
18
|
+
"fields": ["bot_token"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "User Token",
|
22
|
+
"fields": ["user_token"]
|
23
|
+
}
|
24
|
+
]
|
25
|
+
},
|
26
|
+
},
|
27
|
+
"section": "credentials",
|
28
|
+
"type": "slack",
|
29
|
+
"categories": ["communication"],
|
30
|
+
"extra_categories": ["slack", "chat", "messaging", "collaboration"],
|
31
|
+
}
|
32
|
+
}
|
33
|
+
)
|
34
|
+
name: Optional[SecretStr] = Field(description="Slack Bot Token")
|
35
|
+
slack_token: Optional[SecretStr] = Field(description="Slack Token like XOXB-*****-*****-*****-*****")
|
@@ -0,0 +1,20 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class TestRailConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "TestRail",
|
11
|
+
"icon_url": "testrail.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "testrail",
|
14
|
+
"categories": ["test management"],
|
15
|
+
"extra_categories": ["testrail", "test management", "quality assurance", "testing"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
email: Optional[str] = Field(description="TestRail Email", default=None)
|
20
|
+
password: Optional[SecretStr] = Field(description="TestRail Password", default=None)
|
@@ -75,7 +75,7 @@ def get_tools(tools_list: list, alita_client, llm, memory_store: BaseStore = Non
|
|
75
75
|
llm=tool['settings'].get('llm'),
|
76
76
|
# indexer settings
|
77
77
|
connection_string=tool['settings'].get('connection_string', None),
|
78
|
-
collection_name=
|
78
|
+
collection_name=tool.get('toolkit_name'),
|
79
79
|
embedding_model=tool['settings'].get('embedding_model', None),
|
80
80
|
embedding_model_params=tool['settings'].get('embedding_model_params', None),
|
81
81
|
vectorstore_type="PGVector"
|
alita_sdk/tools/ado/__init__.py
CHANGED
@@ -15,13 +15,13 @@ def get_tools(tool_type, tool):
|
|
15
15
|
"limit": tool['settings'].get('limit', 5),
|
16
16
|
"toolkit_name": tool.get('toolkit_name', ''),
|
17
17
|
# indexer settings
|
18
|
-
"llm":tool['settings'].get('llm', None),
|
19
|
-
"connection_string":tool['settings'].get('connection_string', None),
|
20
|
-
"collection_name":
|
21
|
-
"doctype":'doc',
|
22
|
-
"embedding_model":"HuggingFaceEmbeddings",
|
23
|
-
"embedding_model_params":{"model_name": "sentence-transformers/all-MiniLM-L6-v2"},
|
24
|
-
"vectorstore_type":"PGVector"
|
18
|
+
"llm": tool['settings'].get('llm', None),
|
19
|
+
"connection_string": tool['settings'].get('connection_string', None),
|
20
|
+
"collection_name": tool['toolkit_name'],
|
21
|
+
"doctype": 'doc',
|
22
|
+
"embedding_model": "HuggingFaceEmbeddings",
|
23
|
+
"embedding_model_params": {"model_name": "sentence-transformers/all-MiniLM-L6-v2"},
|
24
|
+
"vectorstore_type": "PGVector"
|
25
25
|
}
|
26
26
|
if tool_type == 'ado_plans':
|
27
27
|
return AzureDevOpsPlansToolkit().get_toolkit(**config_dict).get_tools()
|