alita-sdk 0.3.228__py3-none-any.whl → 0.3.230__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alita_sdk/configurations/__init__.py +66 -0
- alita_sdk/configurations/ado.py +41 -0
- alita_sdk/configurations/azure_search.py +21 -0
- alita_sdk/configurations/bigquery.py +23 -0
- alita_sdk/configurations/bitbucket.py +31 -0
- alita_sdk/configurations/confluence.py +36 -0
- alita_sdk/configurations/delta_lake.py +24 -0
- alita_sdk/configurations/github.py +45 -0
- alita_sdk/configurations/gitlab.py +31 -0
- alita_sdk/configurations/jira.py +36 -0
- alita_sdk/configurations/pgvector.py +18 -0
- alita_sdk/configurations/postman.py +30 -0
- alita_sdk/configurations/qtest.py +20 -0
- alita_sdk/configurations/service_now.py +31 -0
- alita_sdk/configurations/slack.py +35 -0
- alita_sdk/configurations/testrail.py +20 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaDocLoader.py +29 -0
- alita_sdk/runtime/langchain/document_loaders/constants.py +6 -0
- alita_sdk/runtime/langchain/document_loaders/utils.py +22 -1
- alita_sdk/runtime/toolkits/configurations.py +4 -0
- alita_sdk/tools/ado/__init__.py +2 -3
- alita_sdk/tools/ado/repos/__init__.py +26 -33
- alita_sdk/tools/ado/repos/repos_wrapper.py +1 -0
- alita_sdk/tools/ado/test_plan/__init__.py +17 -15
- alita_sdk/tools/ado/test_plan/test_plan_wrapper.py +1 -0
- alita_sdk/tools/ado/wiki/__init__.py +18 -20
- alita_sdk/tools/ado/wiki/ado_wrapper.py +1 -0
- alita_sdk/tools/ado/work_item/__init__.py +18 -19
- alita_sdk/tools/ado/work_item/ado_wrapper.py +1 -0
- alita_sdk/tools/aws/delta_lake/__init__.py +9 -13
- alita_sdk/tools/azure_ai/search/__init__.py +13 -16
- alita_sdk/tools/bitbucket/__init__.py +14 -12
- alita_sdk/tools/confluence/__init__.py +25 -21
- alita_sdk/tools/figma/__init__.py +8 -5
- alita_sdk/tools/figma/api_wrapper.py +37 -12
- alita_sdk/tools/github/__init__.py +22 -46
- alita_sdk/tools/gitlab/__init__.py +14 -9
- alita_sdk/tools/google/bigquery/__init__.py +11 -27
- alita_sdk/tools/jira/__init__.py +23 -19
- alita_sdk/tools/postman/__init__.py +11 -6
- alita_sdk/tools/qtest/__init__.py +9 -3
- alita_sdk/tools/servicenow/__init__.py +12 -6
- alita_sdk/tools/sharepoint/__init__.py +8 -5
- alita_sdk/tools/slack/__init__.py +12 -9
- alita_sdk/tools/testrail/__init__.py +14 -12
- alita_sdk/tools/utils/content_parser.py +16 -26
- {alita_sdk-0.3.228.dist-info → alita_sdk-0.3.230.dist-info}/METADATA +2 -2
- {alita_sdk-0.3.228.dist-info → alita_sdk-0.3.230.dist-info}/RECORD +51 -33
- {alita_sdk-0.3.228.dist-info → alita_sdk-0.3.230.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.228.dist-info → alita_sdk-0.3.230.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.228.dist-info → alita_sdk-0.3.230.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,66 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
logger = logging.getLogger(__name__)
|
4
|
+
|
5
|
+
AVAILABLE_CONFIGURATIONS = {}
|
6
|
+
AVAILABLE_CLASS_CONFIGURATIONS = {}
|
7
|
+
FAILED_IMPORTS = {}
|
8
|
+
|
9
|
+
|
10
|
+
def _safe_import_configuration(
|
11
|
+
configuration_name, module_path, configuration_class_name
|
12
|
+
):
|
13
|
+
"""Safely import a configuration module and register available functions/classes."""
|
14
|
+
try:
|
15
|
+
module = __import__(f'alita_sdk.configurations.{module_path}', fromlist=[''])
|
16
|
+
configuration_class = getattr(module, configuration_class_name)
|
17
|
+
AVAILABLE_CONFIGURATIONS[configuration_name] = configuration_class.model_json_schema()
|
18
|
+
AVAILABLE_CLASS_CONFIGURATIONS[configuration_name] = configuration_class
|
19
|
+
logger.debug(f"Successfully imported {configuration_name}")
|
20
|
+
except Exception as e:
|
21
|
+
FAILED_IMPORTS[configuration_name] = str(e)
|
22
|
+
logger.debug(f"Failed to import {configuration_name}: {e}")
|
23
|
+
|
24
|
+
# Safe imports for all tools
|
25
|
+
_safe_import_configuration('github', 'github', 'GithubConfiguration')
|
26
|
+
_safe_import_configuration('pgvector', 'pgvector', 'PgVectorConfiguration')
|
27
|
+
_safe_import_configuration('ado', 'ado', 'AdoConfiguration')
|
28
|
+
_safe_import_configuration('ado_repos', 'ado', 'AdoReposConfiguration')
|
29
|
+
_safe_import_configuration('gitlab', 'gitlab', 'GitlabConfiguration')
|
30
|
+
_safe_import_configuration('qtest', 'qtest', 'QtestConfiguration')
|
31
|
+
_safe_import_configuration('bitbucket', 'bitbucket', 'BitbucketConfiguration')
|
32
|
+
_safe_import_configuration('confluence', 'confluence', 'ConfluenceConfiguration')
|
33
|
+
_safe_import_configuration('jira', 'jira', 'JiraConfiguration')
|
34
|
+
_safe_import_configuration('postman', 'postman', 'PostmanConfiguration')
|
35
|
+
_safe_import_configuration('service_now', 'service_now', 'ServiceNowConfiguration')
|
36
|
+
_safe_import_configuration('testrail', 'testrail', 'TestRailConfiguration')
|
37
|
+
_safe_import_configuration('slack', 'slack', 'SlackConfiguration')
|
38
|
+
_safe_import_configuration('azure_search', 'azure_search', 'AzureSearchConfiguration')
|
39
|
+
_safe_import_configuration('delta_lake', 'delta_lake', 'DeltaLakeConfiguration')
|
40
|
+
_safe_import_configuration('bigquery', 'bigquery', 'BigQueryConfiguration')
|
41
|
+
|
42
|
+
# Log import summary
|
43
|
+
available_count = len(AVAILABLE_CONFIGURATIONS)
|
44
|
+
total_attempted = len(AVAILABLE_CONFIGURATIONS) + len(FAILED_IMPORTS)
|
45
|
+
logger.info(f"Configuration imports completed: {available_count}/{total_attempted} successful")
|
46
|
+
|
47
|
+
|
48
|
+
def get_configurations():
|
49
|
+
"""Return all available configuration schemas."""
|
50
|
+
return AVAILABLE_CONFIGURATIONS.copy()
|
51
|
+
|
52
|
+
|
53
|
+
def get_class_configurations():
|
54
|
+
"""Return all available configuration classes."""
|
55
|
+
return AVAILABLE_CLASS_CONFIGURATIONS.copy()
|
56
|
+
|
57
|
+
|
58
|
+
def get_available_configurations():
|
59
|
+
"""Return list of available configuration class names."""
|
60
|
+
return list(AVAILABLE_CONFIGURATIONS.keys())
|
61
|
+
|
62
|
+
|
63
|
+
__all__ = [
|
64
|
+
'get_configurations',
|
65
|
+
'get_available_configurations',
|
66
|
+
]
|
@@ -0,0 +1,41 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class AdoConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Ado",
|
11
|
+
"icon_url": None,
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "ado"
|
14
|
+
}
|
15
|
+
}
|
16
|
+
)
|
17
|
+
organization_url: Optional[str] = Field(description="Base API URL")
|
18
|
+
project: Optional[str] = Field(description="ADO project", default=None)
|
19
|
+
token: Optional[SecretStr] = Field(description="ADO Token")
|
20
|
+
|
21
|
+
|
22
|
+
class AdoReposConfiguration(BaseModel):
|
23
|
+
model_config = ConfigDict(
|
24
|
+
json_schema_extra={
|
25
|
+
"metadata": {
|
26
|
+
"label": "ADO repos",
|
27
|
+
"icon_url": "ado-repos-icon.svg",
|
28
|
+
"section": "credentials",
|
29
|
+
"type": "ado_repos"
|
30
|
+
}
|
31
|
+
}
|
32
|
+
)
|
33
|
+
repository_id: Optional[str] = Field(description="ADO repository ID", default=None)
|
34
|
+
|
35
|
+
ado_configuration: AdoConfiguration = Field(
|
36
|
+
default_factory=AdoConfiguration,
|
37
|
+
description="ADO configuration",
|
38
|
+
json_schema_extra={
|
39
|
+
'configuration_types': ['ado']
|
40
|
+
}
|
41
|
+
)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class AzureSearchConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Azure Search",
|
11
|
+
"icon_url": "azure-search.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "azure_search",
|
14
|
+
"categories": ["search"],
|
15
|
+
"extra_categories": ["azure", "cognitive search", "vector database", "knowledge base"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
endpoint: Optional[str] = Field(description="Azure Search endpoint")
|
20
|
+
api_base: Optional[str] = Field(description="Azure Search API base")
|
21
|
+
api_key: Optional[SecretStr] = Field(description="API key")
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class BigQueryConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Google BigQuery",
|
11
|
+
"icon_url": "google.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "bigquery",
|
14
|
+
"categories": ["database"],
|
15
|
+
"extra_categories": ["google", "gcp", "data warehouse", "analytics"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
api_key: Optional[SecretStr] = Field(description="GCP API key")
|
20
|
+
project: Optional[str] = Field(description="BigQuery project ID")
|
21
|
+
location: Optional[str] = Field(description="BigQuery location")
|
22
|
+
dataset: Optional[str] = Field(description="BigQuery dataset name")
|
23
|
+
table: Optional[str] = Field(description="BigQuery table name")
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class BitbucketConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Bitbucket",
|
11
|
+
"icon_url": "bitbucket-icon.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Username & Password",
|
18
|
+
"fields": ["username", "password"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "bitbucket",
|
25
|
+
"categories": ["code repositories"],
|
26
|
+
"extra_categories": ["bitbucket", "git", "repository", "code", "version control"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
username: Optional[str] = Field(description="Bitbucket Username", default=None)
|
31
|
+
password: Optional[SecretStr] = Field(description="Bitbucket Password/App Password", default=None)
|
@@ -0,0 +1,36 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class ConfluenceConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Confluence",
|
11
|
+
"icon_url": "confluence.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "API Key",
|
18
|
+
"fields": ["username", "api_key"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "Token",
|
22
|
+
"fields": ["token"]
|
23
|
+
}
|
24
|
+
]
|
25
|
+
},
|
26
|
+
},
|
27
|
+
"section": "credentials",
|
28
|
+
"type": "confluence",
|
29
|
+
"categories": ["documentation"],
|
30
|
+
"extra_categories": ["confluence", "wiki", "documentation", "knowledge base"],
|
31
|
+
}
|
32
|
+
}
|
33
|
+
)
|
34
|
+
username: Optional[str] = Field(description="Confluence Username", default=None)
|
35
|
+
api_key: Optional[SecretStr] = Field(description="Confluence API Key", default=None)
|
36
|
+
token: Optional[SecretStr] = Field(description="Confluence Token", default=None)
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class DeltaLakeConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "AWS Delta Lake",
|
11
|
+
"icon_url": "delta-lake.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "delta_lake",
|
14
|
+
"categories": ["database"],
|
15
|
+
"extra_categories": ["aws", "data lake", "analytics", "storage"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
aws_access_key_id: Optional[SecretStr] = Field(description="AWS access key ID")
|
20
|
+
aws_secret_access_key: Optional[SecretStr] = Field(description="AWS secret access key")
|
21
|
+
aws_session_token: Optional[SecretStr] = Field(description="AWS session token (optional)")
|
22
|
+
aws_region: Optional[str] = Field(description="AWS region for Delta Lake storage")
|
23
|
+
s3_path: Optional[str] = Field(description="S3 path to Delta Lake data (e.g., s3://bucket/path)")
|
24
|
+
table_path: Optional[str] = Field(description="Delta Lake table path (if not using s3_path)")
|
@@ -0,0 +1,45 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class GithubConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "GitHub",
|
11
|
+
"icon_url": None,
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Token",
|
18
|
+
"fields": ["access_token"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "Password",
|
22
|
+
"fields": ["username", "password"]
|
23
|
+
},
|
24
|
+
{
|
25
|
+
"name": "App private key",
|
26
|
+
"fields": ["app_id", "app_private_key"]
|
27
|
+
}
|
28
|
+
]
|
29
|
+
},
|
30
|
+
},
|
31
|
+
"section": "credentials",
|
32
|
+
"type": "github",
|
33
|
+
"categories": ["code repositories"],
|
34
|
+
"extra_categories": ["github", "git", "repository", "code", "version control"],
|
35
|
+
}
|
36
|
+
}
|
37
|
+
)
|
38
|
+
base_url: Optional[str] = Field(description="Base API URL", default="https://api.github.com")
|
39
|
+
app_id: Optional[str] = Field(description="Github APP ID", default=None)
|
40
|
+
app_private_key: Optional[SecretStr] = Field(description="Github APP private key", default=None)
|
41
|
+
|
42
|
+
access_token: Optional[SecretStr] = Field(description="Github Access Token", default=None)
|
43
|
+
|
44
|
+
username: Optional[str] = Field(description="Github Username", default=None)
|
45
|
+
password: Optional[SecretStr] = Field(description="Github Password", default=None)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class GitlabConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "GitLab",
|
11
|
+
"icon_url": None,
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": True,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "GitLab private token",
|
18
|
+
"fields": ["private_token"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
}
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "gitlab",
|
25
|
+
"categories": ["code repositories"],
|
26
|
+
"extra_categories": ["gitlab", "git", "repository", "code", "version control"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
private_token: Optional[SecretStr] = Field(description="GitLab private token", default=None)
|
31
|
+
|
@@ -0,0 +1,36 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class JiraConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Jira",
|
11
|
+
"icon_url": "jira.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "API Key",
|
18
|
+
"fields": ["username", "api_key"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "Token",
|
22
|
+
"fields": ["token"]
|
23
|
+
}
|
24
|
+
]
|
25
|
+
},
|
26
|
+
},
|
27
|
+
"section": "credentials",
|
28
|
+
"type": "jira",
|
29
|
+
"categories": ["project management"],
|
30
|
+
"extra_categories": ["jira", "issue tracking", "project management", "agile"],
|
31
|
+
}
|
32
|
+
}
|
33
|
+
)
|
34
|
+
username: Optional[str] = Field(description="Jira Username", default=None)
|
35
|
+
api_key: Optional[SecretStr] = Field(description="Jira API Key", default=None)
|
36
|
+
token: Optional[SecretStr] = Field(description="Jira Token", default=None)
|
@@ -0,0 +1,18 @@
|
|
1
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
2
|
+
|
3
|
+
|
4
|
+
class PgVectorConfiguration(BaseModel):
|
5
|
+
model_config = ConfigDict(
|
6
|
+
json_schema_extra={
|
7
|
+
"metadata": {
|
8
|
+
"label": "PgVector",
|
9
|
+
"icon_url": None,
|
10
|
+
"section": "vectorstorage",
|
11
|
+
"type": "pgvector"
|
12
|
+
}
|
13
|
+
}
|
14
|
+
)
|
15
|
+
connection_string: SecretStr = Field(
|
16
|
+
description="Connection string for PgVector database",
|
17
|
+
default=None
|
18
|
+
)
|
@@ -0,0 +1,30 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class PostmanConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Postman",
|
11
|
+
"icon_url": "postman.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "API Key",
|
18
|
+
"fields": ["api_key"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "postman",
|
25
|
+
"categories": ["api testing"],
|
26
|
+
"extra_categories": ["postman", "api", "testing", "collection"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
api_key: Optional[SecretStr] = Field(description="Postman API Key", default=None)
|
@@ -0,0 +1,20 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class QtestConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "QTest",
|
11
|
+
"icon_url": "qtest.svg",
|
12
|
+
"categories": ["test management"],
|
13
|
+
"section": "credentials",
|
14
|
+
"type": "qtest",
|
15
|
+
"extra_categories": ["quality assurance", "test case management", "test planning"]
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
qtest_api_token: Optional[SecretStr] = Field(description="QTest API token", default=None)
|
20
|
+
|
@@ -0,0 +1,31 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class ServiceNowConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "ServiceNow",
|
11
|
+
"icon_url": "servicenow.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Username & Password",
|
18
|
+
"fields": ["username", "password"]
|
19
|
+
}
|
20
|
+
]
|
21
|
+
},
|
22
|
+
},
|
23
|
+
"section": "credentials",
|
24
|
+
"type": "service_now",
|
25
|
+
"categories": ["service management"],
|
26
|
+
"extra_categories": ["servicenow", "itsm", "service management", "incident"],
|
27
|
+
}
|
28
|
+
}
|
29
|
+
)
|
30
|
+
username: Optional[str] = Field(description="ServiceNow Username", default=None)
|
31
|
+
password: Optional[SecretStr] = Field(description="ServiceNow Password", default=None)
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class SlackConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "Slack",
|
11
|
+
"icon_url": "slack.svg",
|
12
|
+
"sections": {
|
13
|
+
"auth": {
|
14
|
+
"required": False,
|
15
|
+
"subsections": [
|
16
|
+
{
|
17
|
+
"name": "Bot Token",
|
18
|
+
"fields": ["bot_token"]
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "User Token",
|
22
|
+
"fields": ["user_token"]
|
23
|
+
}
|
24
|
+
]
|
25
|
+
},
|
26
|
+
},
|
27
|
+
"section": "credentials",
|
28
|
+
"type": "slack",
|
29
|
+
"categories": ["communication"],
|
30
|
+
"extra_categories": ["slack", "chat", "messaging", "collaboration"],
|
31
|
+
}
|
32
|
+
}
|
33
|
+
)
|
34
|
+
name: Optional[SecretStr] = Field(description="Slack Bot Token")
|
35
|
+
slack_token: Optional[SecretStr] = Field(description="Slack Token like XOXB-*****-*****-*****-*****")
|
@@ -0,0 +1,20 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field, SecretStr
|
4
|
+
|
5
|
+
|
6
|
+
class TestRailConfiguration(BaseModel):
|
7
|
+
model_config = ConfigDict(
|
8
|
+
json_schema_extra={
|
9
|
+
"metadata": {
|
10
|
+
"label": "TestRail",
|
11
|
+
"icon_url": "testrail.svg",
|
12
|
+
"section": "credentials",
|
13
|
+
"type": "testrail",
|
14
|
+
"categories": ["test management"],
|
15
|
+
"extra_categories": ["testrail", "test management", "quality assurance", "testing"],
|
16
|
+
}
|
17
|
+
}
|
18
|
+
)
|
19
|
+
email: Optional[str] = Field(description="TestRail Email", default=None)
|
20
|
+
password: Optional[SecretStr] = Field(description="TestRail Password", default=None)
|
@@ -0,0 +1,29 @@
|
|
1
|
+
from langchain_core.document_loaders import BaseLoader
|
2
|
+
from langchain_core.documents import Document
|
3
|
+
|
4
|
+
from .utils import file_to_bytes
|
5
|
+
|
6
|
+
|
7
|
+
class AlitaDocLoader(BaseLoader):
|
8
|
+
|
9
|
+
def __init__(self, **kwargs):
|
10
|
+
if kwargs.get('file_path'):
|
11
|
+
self.file_path = kwargs['file_path']
|
12
|
+
elif kwargs.get('file_content'):
|
13
|
+
self.file_content = kwargs['file_content']
|
14
|
+
self.file_name = kwargs['file_name']
|
15
|
+
else:
|
16
|
+
raise ValueError(
|
17
|
+
"Path parameter is required (either as 'file_path' positional argument or 'path' keyword argument)")
|
18
|
+
|
19
|
+
def load(self):
|
20
|
+
result_content = self.get_content()
|
21
|
+
return [Document(page_content=result_content, metadata={'source': str(self.file_path if hasattr(self, 'file_path') else self.file_name)})]
|
22
|
+
|
23
|
+
def get_content(self):
|
24
|
+
try:
|
25
|
+
import textract
|
26
|
+
content = textract.process(None, extension='doc', input_data=self.file_content if hasattr(self, 'file_content') else file_to_bytes(self.file_path)).decode('utf-8')
|
27
|
+
except Exception as e:
|
28
|
+
content = f"[Error extracting doc: {str(e)}]"
|
29
|
+
return content
|
@@ -18,6 +18,7 @@ from langchain_community.document_loaders import (
|
|
18
18
|
UnstructuredPowerPointLoader, PythonLoader)
|
19
19
|
|
20
20
|
from .AlitaCSVLoader import AlitaCSVLoader
|
21
|
+
from .AlitaDocLoader import AlitaDocLoader
|
21
22
|
from .AlitaDocxMammothLoader import AlitaDocxMammothLoader
|
22
23
|
from .AlitaExcelLoader import AlitaExcelLoader
|
23
24
|
from .AlitaImageLoader import AlitaImageLoader
|
@@ -124,6 +125,11 @@ loaders_map = {
|
|
124
125
|
'is_multimodal_processing': True,
|
125
126
|
'kwargs': {}
|
126
127
|
},
|
128
|
+
'.doc': {
|
129
|
+
'class': AlitaDocLoader,
|
130
|
+
'is_multimodal_processing': True,
|
131
|
+
'kwargs': {}
|
132
|
+
},
|
127
133
|
'.json': {
|
128
134
|
'class': AlitaTextLoader,
|
129
135
|
'is_multimodal_processing': False,
|
@@ -60,4 +60,25 @@ def create_temp_file(file_content: bytes):
|
|
60
60
|
temp_file.flush() # Ensure data is written
|
61
61
|
|
62
62
|
# Get the file path for operations
|
63
|
-
return temp_file.name
|
63
|
+
return temp_file.name
|
64
|
+
|
65
|
+
def file_to_bytes(filepath):
|
66
|
+
"""
|
67
|
+
Reads a file and returns its content as a bytes object.
|
68
|
+
|
69
|
+
Args:
|
70
|
+
filepath (str): The path to the file.
|
71
|
+
|
72
|
+
Returns:
|
73
|
+
bytes: The content of the file as a bytes object.
|
74
|
+
"""
|
75
|
+
try:
|
76
|
+
with open(filepath, "rb") as f:
|
77
|
+
file_content_bytes = f.read()
|
78
|
+
return file_content_bytes
|
79
|
+
except FileNotFoundError:
|
80
|
+
logger.error(f"File not found: {filepath}")
|
81
|
+
return None
|
82
|
+
except Exception as e:
|
83
|
+
logger.error(f"Error reading file {filepath}: {e}")
|
84
|
+
return None
|
alita_sdk/tools/ado/__init__.py
CHANGED
@@ -10,13 +10,12 @@ def get_tools(tool_type, tool):
|
|
10
10
|
# common
|
11
11
|
"selected_tools": tool['settings'].get('selected_tools', []),
|
12
12
|
"organization_url": tool['settings']['organization_url'],
|
13
|
-
"
|
14
|
-
"token": tool['settings'].get('token', None),
|
13
|
+
"ado_configuration": tool['settings']['ado_configuration'],
|
15
14
|
"limit": tool['settings'].get('limit', 5),
|
16
15
|
"toolkit_name": tool.get('toolkit_name', ''),
|
17
16
|
# indexer settings
|
18
17
|
"llm": tool['settings'].get('llm', None),
|
19
|
-
"
|
18
|
+
"pgvector_configuration": tool['settings'].get('pgvector_configuration', {}),
|
20
19
|
"collection_name": tool['toolkit_name'],
|
21
20
|
"doctype": 'doc',
|
22
21
|
"embedding_model": "HuggingFaceEmbeddings",
|