altimate-engine 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. altimate_engine-0.1.0/.gitignore +36 -0
  2. altimate_engine-0.1.0/PKG-INFO +76 -0
  3. altimate_engine-0.1.0/README.md +31 -0
  4. altimate_engine-0.1.0/pyproject.toml +66 -0
  5. altimate_engine-0.1.0/src/altimate_engine/__init__.py +3 -0
  6. altimate_engine-0.1.0/src/altimate_engine/__main__.py +5 -0
  7. altimate_engine-0.1.0/src/altimate_engine/app/__init__.py +0 -0
  8. altimate_engine-0.1.0/src/altimate_engine/ci/__init__.py +0 -0
  9. altimate_engine-0.1.0/src/altimate_engine/ci/cost_gate.py +162 -0
  10. altimate_engine-0.1.0/src/altimate_engine/connections.py +281 -0
  11. altimate_engine-0.1.0/src/altimate_engine/connectors/__init__.py +21 -0
  12. altimate_engine-0.1.0/src/altimate_engine/connectors/base.py +46 -0
  13. altimate_engine-0.1.0/src/altimate_engine/connectors/bigquery.py +188 -0
  14. altimate_engine-0.1.0/src/altimate_engine/connectors/databricks.py +200 -0
  15. altimate_engine-0.1.0/src/altimate_engine/connectors/duckdb.py +91 -0
  16. altimate_engine-0.1.0/src/altimate_engine/connectors/mysql.py +129 -0
  17. altimate_engine-0.1.0/src/altimate_engine/connectors/postgres.py +109 -0
  18. altimate_engine-0.1.0/src/altimate_engine/connectors/redshift.py +106 -0
  19. altimate_engine-0.1.0/src/altimate_engine/connectors/snowflake.py +150 -0
  20. altimate_engine-0.1.0/src/altimate_engine/connectors/sqlserver.py +201 -0
  21. altimate_engine-0.1.0/src/altimate_engine/credential_store.py +123 -0
  22. altimate_engine-0.1.0/src/altimate_engine/dbt/__init__.py +1 -0
  23. altimate_engine-0.1.0/src/altimate_engine/dbt/lineage.py +168 -0
  24. altimate_engine-0.1.0/src/altimate_engine/dbt/manifest.py +112 -0
  25. altimate_engine-0.1.0/src/altimate_engine/dbt/profiles.py +164 -0
  26. altimate_engine-0.1.0/src/altimate_engine/dbt/runner.py +68 -0
  27. altimate_engine-0.1.0/src/altimate_engine/docker_discovery.py +118 -0
  28. altimate_engine-0.1.0/src/altimate_engine/finops/__init__.py +0 -0
  29. altimate_engine-0.1.0/src/altimate_engine/finops/credit_analyzer.py +346 -0
  30. altimate_engine-0.1.0/src/altimate_engine/finops/query_history.py +218 -0
  31. altimate_engine-0.1.0/src/altimate_engine/finops/role_access.py +255 -0
  32. altimate_engine-0.1.0/src/altimate_engine/finops/unused_resources.py +226 -0
  33. altimate_engine-0.1.0/src/altimate_engine/finops/warehouse_advisor.py +245 -0
  34. altimate_engine-0.1.0/src/altimate_engine/local/__init__.py +1 -0
  35. altimate_engine-0.1.0/src/altimate_engine/local/schema_sync.py +242 -0
  36. altimate_engine-0.1.0/src/altimate_engine/local/test_local.py +74 -0
  37. altimate_engine-0.1.0/src/altimate_engine/models.py +1082 -0
  38. altimate_engine-0.1.0/src/altimate_engine/py.typed +0 -0
  39. altimate_engine-0.1.0/src/altimate_engine/schema/__init__.py +1 -0
  40. altimate_engine-0.1.0/src/altimate_engine/schema/cache.py +394 -0
  41. altimate_engine-0.1.0/src/altimate_engine/schema/inspector.py +122 -0
  42. altimate_engine-0.1.0/src/altimate_engine/schema/pii_detector.py +234 -0
  43. altimate_engine-0.1.0/src/altimate_engine/schema/tags.py +151 -0
  44. altimate_engine-0.1.0/src/altimate_engine/server.py +973 -0
  45. altimate_engine-0.1.0/src/altimate_engine/sql/__init__.py +1 -0
  46. altimate_engine-0.1.0/src/altimate_engine/sql/autocomplete.py +152 -0
  47. altimate_engine-0.1.0/src/altimate_engine/sql/diff.py +63 -0
  48. altimate_engine-0.1.0/src/altimate_engine/sql/executor.py +116 -0
  49. altimate_engine-0.1.0/src/altimate_engine/sql/explainer.py +116 -0
  50. altimate_engine-0.1.0/src/altimate_engine/sql/feedback_store.py +392 -0
  51. altimate_engine-0.1.0/src/altimate_engine/sql/guard.py +657 -0
  52. altimate_engine-0.1.0/src/altimate_engine/ssh_tunnel.py +108 -0
  53. altimate_engine-0.1.0/tests/__init__.py +0 -0
  54. altimate_engine-0.1.0/tests/test_autocomplete.py +159 -0
  55. altimate_engine-0.1.0/tests/test_connections.py +501 -0
  56. altimate_engine-0.1.0/tests/test_connectors.py +86 -0
  57. altimate_engine-0.1.0/tests/test_cost_gate.py +186 -0
  58. altimate_engine-0.1.0/tests/test_credential_store.py +178 -0
  59. altimate_engine-0.1.0/tests/test_dbt_profiles.py +277 -0
  60. altimate_engine-0.1.0/tests/test_diff.py +197 -0
  61. altimate_engine-0.1.0/tests/test_docker_discovery.py +256 -0
  62. altimate_engine-0.1.0/tests/test_enterprise_connectors.py +188 -0
  63. altimate_engine-0.1.0/tests/test_executor.py +240 -0
  64. altimate_engine-0.1.0/tests/test_explainer.py +62 -0
  65. altimate_engine-0.1.0/tests/test_feedback_store.py +414 -0
  66. altimate_engine-0.1.0/tests/test_finops.py +580 -0
  67. altimate_engine-0.1.0/tests/test_guard.py +207 -0
  68. altimate_engine-0.1.0/tests/test_guard_new.py +786 -0
  69. altimate_engine-0.1.0/tests/test_local.py +194 -0
  70. altimate_engine-0.1.0/tests/test_manifest.py +422 -0
  71. altimate_engine-0.1.0/tests/test_pii_detector.py +381 -0
  72. altimate_engine-0.1.0/tests/test_schema_cache.py +239 -0
  73. altimate_engine-0.1.0/tests/test_server.py +456 -0
  74. altimate_engine-0.1.0/tests/test_server_guard.py +178 -0
  75. altimate_engine-0.1.0/tests/test_server_guard_new.py +684 -0
  76. altimate_engine-0.1.0/tests/test_ssh_tunnel.py +180 -0
  77. altimate_engine-0.1.0/tests/test_tags.py +259 -0
@@ -0,0 +1,36 @@
1
+ # Dependencies
2
+ node_modules/
3
+ .venv/
4
+
5
+ # Build artifacts
6
+ .turbo/
7
+ dist/
8
+ *.tsbuildinfo
9
+
10
+ # OS files
11
+ .DS_Store
12
+ Thumbs.db
13
+
14
+ # IDE
15
+ .idea/
16
+ .vscode/
17
+ *.swp
18
+ *.swo
19
+
20
+ # Environment
21
+ .env
22
+ .env.local
23
+
24
+ # Python
25
+ __pycache__/
26
+ *.pyc
27
+ *.pyo
28
+ *.egg-info/
29
+
30
+ # SQLite databases (feedback store creates these at runtime)
31
+ *.db
32
+
33
+ # Large intermediate files at repo root (generated during benchmark runs)
34
+ /queries.json
35
+ /queries_1k.json
36
+ /results/
@@ -0,0 +1,76 @@
1
+ Metadata-Version: 2.4
2
+ Name: altimate-engine
3
+ Version: 0.1.0
4
+ Summary: Python engine for Altimate Code — SQL analysis, lineage, dbt integration
5
+ Project-URL: Homepage, https://github.com/AltimateAI/altimate-code
6
+ Project-URL: Documentation, https://altimate-code.sh
7
+ Project-URL: Repository, https://github.com/AltimateAI/altimate-code
8
+ Project-URL: Issues, https://github.com/AltimateAI/altimate-code/issues
9
+ Author-email: Altimate Inc <info@altimate.ai>
10
+ License-Expression: MIT
11
+ Keywords: bigquery,data-engineering,dbt,lineage,snowflake,sql
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Topic :: Database
19
+ Classifier: Topic :: Software Development :: Libraries
20
+ Requires-Python: >=3.10
21
+ Requires-Dist: pydantic>=2.0
22
+ Requires-Dist: pyyaml>=6.0
23
+ Provides-Extra: dev
24
+ Requires-Dist: duckdb>=0.9; extra == 'dev'
25
+ Requires-Dist: pytest>=7.0; extra == 'dev'
26
+ Requires-Dist: ruff>=0.4; extra == 'dev'
27
+ Provides-Extra: docker
28
+ Requires-Dist: docker>=7.0; extra == 'docker'
29
+ Provides-Extra: security
30
+ Requires-Dist: keyring>=24.0; extra == 'security'
31
+ Provides-Extra: tunneling
32
+ Requires-Dist: paramiko>=3.0; extra == 'tunneling'
33
+ Requires-Dist: sshtunnel>=0.4; extra == 'tunneling'
34
+ Provides-Extra: warehouses
35
+ Requires-Dist: boto3>=1.28; extra == 'warehouses'
36
+ Requires-Dist: cryptography>=41.0; extra == 'warehouses'
37
+ Requires-Dist: databricks-sql-connector>=3.0; extra == 'warehouses'
38
+ Requires-Dist: duckdb>=0.9; extra == 'warehouses'
39
+ Requires-Dist: google-cloud-bigquery>=3.0; extra == 'warehouses'
40
+ Requires-Dist: mysql-connector-python>=8.0; extra == 'warehouses'
41
+ Requires-Dist: psycopg2-binary>=2.9; extra == 'warehouses'
42
+ Requires-Dist: pyodbc>=5.0; extra == 'warehouses'
43
+ Requires-Dist: snowflake-connector-python>=3.0; extra == 'warehouses'
44
+ Description-Content-Type: text/markdown
45
+
46
+ # altimate-engine
47
+
48
+ Python engine for [Altimate Code](https://github.com/AltimateAI/altimate-code) — SQL analysis, column-level lineage, and dbt integration.
49
+
50
+ ## Installation
51
+
52
+ ```
53
+ pip install altimate-engine
54
+ ```
55
+
56
+ For warehouse connectivity (Snowflake, BigQuery, Databricks, etc.):
57
+
58
+ ```
59
+ pip install altimate-engine[warehouses]
60
+ ```
61
+
62
+ ## Usage
63
+
64
+ This package is designed to be used as a sidecar process for the Altimate Code CLI. It communicates via JSON-RPC over stdio.
65
+
66
+ ```python
67
+ python -m altimate_engine.server
68
+ ```
69
+
70
+ ## Documentation
71
+
72
+ See the main repository for full documentation: https://github.com/AltimateAI/altimate-code
73
+
74
+ ## License
75
+
76
+ MIT
@@ -0,0 +1,31 @@
1
+ # altimate-engine
2
+
3
+ Python engine for [Altimate Code](https://github.com/AltimateAI/altimate-code) — SQL analysis, column-level lineage, and dbt integration.
4
+
5
+ ## Installation
6
+
7
+ ```
8
+ pip install altimate-engine
9
+ ```
10
+
11
+ For warehouse connectivity (Snowflake, BigQuery, Databricks, etc.):
12
+
13
+ ```
14
+ pip install altimate-engine[warehouses]
15
+ ```
16
+
17
+ ## Usage
18
+
19
+ This package is designed to be used as a sidecar process for the Altimate Code CLI. It communicates via JSON-RPC over stdio.
20
+
21
+ ```python
22
+ python -m altimate_engine.server
23
+ ```
24
+
25
+ ## Documentation
26
+
27
+ See the main repository for full documentation: https://github.com/AltimateAI/altimate-code
28
+
29
+ ## License
30
+
31
+ MIT
@@ -0,0 +1,66 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "altimate-engine"
7
+ version = "0.1.0"
8
+ description = "Python engine for Altimate Code — SQL analysis, lineage, dbt integration"
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ requires-python = ">=3.10"
12
+ authors = [
13
+ { name = "Altimate Inc", email = "info@altimate.ai" },
14
+ ]
15
+ keywords = ["sql", "dbt", "data-engineering", "lineage", "snowflake", "bigquery"]
16
+ classifiers = [
17
+ "Development Status :: 4 - Beta",
18
+ "License :: OSI Approved :: MIT License",
19
+ "Programming Language :: Python :: 3",
20
+ "Programming Language :: Python :: 3.10",
21
+ "Programming Language :: Python :: 3.11",
22
+ "Programming Language :: Python :: 3.12",
23
+ "Topic :: Database",
24
+ "Topic :: Software Development :: Libraries",
25
+ ]
26
+ dependencies = [
27
+ "pydantic>=2.0",
28
+ "pyyaml>=6.0",
29
+ ]
30
+
31
+ [project.urls]
32
+ Homepage = "https://github.com/AltimateAI/altimate-code"
33
+ Documentation = "https://altimate-code.sh"
34
+ Repository = "https://github.com/AltimateAI/altimate-code"
35
+ Issues = "https://github.com/AltimateAI/altimate-code/issues"
36
+
37
+ [project.optional-dependencies]
38
+ warehouses = [
39
+ "psycopg2-binary>=2.9",
40
+ "snowflake-connector-python>=3.0",
41
+ "duckdb>=0.9",
42
+ "cryptography>=41.0",
43
+ "google-cloud-bigquery>=3.0",
44
+ "databricks-sql-connector>=3.0",
45
+ "boto3>=1.28",
46
+ "mysql-connector-python>=8.0",
47
+ "pyodbc>=5.0",
48
+ ]
49
+ security = ["keyring>=24.0"]
50
+ docker = ["docker>=7.0"]
51
+ tunneling = ["sshtunnel>=0.4", "paramiko>=3.0"]
52
+ dev = [
53
+ "pytest>=7.0",
54
+ "ruff>=0.4",
55
+ "duckdb>=0.9",
56
+ ]
57
+
58
+ [tool.hatch.build.targets.wheel]
59
+ packages = ["src/altimate_engine"]
60
+
61
+ [tool.pytest.ini_options]
62
+ testpaths = ["tests"]
63
+
64
+ [tool.ruff]
65
+ target-version = "py310"
66
+ line-length = 120
@@ -0,0 +1,3 @@
1
+ """Altimate Engine — Python sidecar for the Altimate Code CLI."""
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,5 @@
1
+ """Entry point for `python -m altimate_engine`."""
2
+
3
+ from altimate_engine.server import main
4
+
5
+ main()
@@ -0,0 +1,162 @@
1
+ """CI cost gate — scan changed SQL files for critical issues.
2
+
3
+ Reads SQL files, runs lint analysis, and returns
4
+ pass/fail based on whether CRITICAL severity issues are found.
5
+
6
+ Skips:
7
+ - Jinja templates ({{ }}, {% %})
8
+ - Parse errors (likely Jinja or non-standard SQL)
9
+ - Non-SQL files
10
+ """
11
+
12
+ from __future__ import annotations
13
+
14
+ import os
15
+ import re
16
+ from typing import Any
17
+
18
+ from altimate_engine.sql.guard import guard_lint
19
+
20
+
21
+ # Jinja pattern: {{ ... }} or {% ... %} or {# ... #}
22
+ _JINJA_PATTERN = re.compile(r"\{\{.*?\}\}|\{%.*?%\}|\{#.*?#\}", re.DOTALL)
23
+
24
+
25
+ def _has_jinja(sql: str) -> bool:
26
+ """Check if SQL contains Jinja template syntax."""
27
+ return bool(_JINJA_PATTERN.search(sql))
28
+
29
+
30
+ def _split_statements(sql: str) -> list[str]:
31
+ """Split SQL on semicolons, filtering empty statements."""
32
+ statements = []
33
+ for stmt in sql.split(";"):
34
+ stmt = stmt.strip()
35
+ if stmt:
36
+ statements.append(stmt)
37
+ return statements
38
+
39
+
40
+ def scan_files(
41
+ file_paths: list[str],
42
+ dialect: str = "snowflake",
43
+ ) -> dict[str, Any]:
44
+ """Scan SQL files for critical issues.
45
+
46
+ Args:
47
+ file_paths: List of SQL file paths to scan.
48
+ dialect: SQL dialect for analysis (default: snowflake).
49
+
50
+ Returns:
51
+ Dict with pass/fail status, per-file results, and summary.
52
+ """
53
+ file_results: list[dict[str, Any]] = []
54
+ total_issues = 0
55
+ critical_count = 0
56
+ files_scanned = 0
57
+ files_skipped = 0
58
+
59
+ for path in file_paths:
60
+ # Skip non-SQL files
61
+ if not path.endswith(".sql"):
62
+ files_skipped += 1
63
+ file_results.append({
64
+ "file": path,
65
+ "status": "skipped",
66
+ "reason": "not a SQL file",
67
+ "issues": [],
68
+ })
69
+ continue
70
+
71
+ # Read file
72
+ if not os.path.isfile(path):
73
+ files_skipped += 1
74
+ file_results.append({
75
+ "file": path,
76
+ "status": "skipped",
77
+ "reason": "file not found",
78
+ "issues": [],
79
+ })
80
+ continue
81
+
82
+ try:
83
+ with open(path, "r", encoding="utf-8") as f:
84
+ content = f.read()
85
+ except Exception as e:
86
+ files_skipped += 1
87
+ file_results.append({
88
+ "file": path,
89
+ "status": "skipped",
90
+ "reason": f"read error: {e}",
91
+ "issues": [],
92
+ })
93
+ continue
94
+
95
+ # Skip Jinja templates
96
+ if _has_jinja(content):
97
+ files_skipped += 1
98
+ file_results.append({
99
+ "file": path,
100
+ "status": "skipped",
101
+ "reason": "contains Jinja templates",
102
+ "issues": [],
103
+ })
104
+ continue
105
+
106
+ # Split and analyze each statement
107
+ statements = _split_statements(content)
108
+ if not statements:
109
+ files_skipped += 1
110
+ file_results.append({
111
+ "file": path,
112
+ "status": "skipped",
113
+ "reason": "empty file",
114
+ "issues": [],
115
+ })
116
+ continue
117
+
118
+ files_scanned += 1
119
+ file_issues: list[dict[str, Any]] = []
120
+
121
+ for stmt in statements:
122
+ # Run lint analysis
123
+ lint_result = guard_lint(stmt)
124
+ if lint_result.get("error"):
125
+ # Parse error — skip this statement (likely incomplete SQL)
126
+ continue
127
+
128
+ for finding in lint_result.get("findings", lint_result.get("issues", [])):
129
+ severity = finding.get("severity", "warning")
130
+ file_issues.append({
131
+ "type": finding.get("rule", finding.get("type", "UNKNOWN")),
132
+ "severity": severity,
133
+ "message": finding.get("message", ""),
134
+ "source": "lint",
135
+ })
136
+ total_issues += 1
137
+ if severity in ("error", "critical"):
138
+ critical_count += 1
139
+
140
+ status = "fail" if any(
141
+ i["severity"] in ("error", "critical") for i in file_issues
142
+ ) else "pass"
143
+
144
+ file_results.append({
145
+ "file": path,
146
+ "status": status,
147
+ "issues": file_issues,
148
+ })
149
+
150
+ passed = critical_count == 0
151
+
152
+ return {
153
+ "success": True,
154
+ "passed": passed,
155
+ "exit_code": 0 if passed else 1,
156
+ "files_scanned": files_scanned,
157
+ "files_skipped": files_skipped,
158
+ "total_issues": total_issues,
159
+ "critical_count": critical_count,
160
+ "file_results": file_results,
161
+ "error": None,
162
+ }
@@ -0,0 +1,281 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import os
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ from altimate_engine.connectors.base import Connector
9
+ from altimate_engine.credential_store import resolve_config
10
+ from altimate_engine.ssh_tunnel import start, stop
11
+
12
+ SSH_FIELDS = {
13
+ "ssh_host",
14
+ "ssh_port",
15
+ "ssh_user",
16
+ "ssh_auth_type",
17
+ "ssh_key_path",
18
+ "ssh_password",
19
+ }
20
+
21
+
22
+ class ConnectionRegistry:
23
+ _connections: dict[str, dict[str, Any]] = {}
24
+ _loaded: bool = False
25
+
26
+ @classmethod
27
+ def load(cls) -> None:
28
+ if cls._loaded:
29
+ return
30
+
31
+ global_config = Path.home() / ".altimate-code" / "connections.json"
32
+ if global_config.exists():
33
+ with open(global_config) as f:
34
+ cls._connections.update(json.load(f))
35
+
36
+ project_config = Path.cwd() / ".altimate-code" / "connections.json"
37
+ if project_config.exists():
38
+ with open(project_config) as f:
39
+ cls._connections.update(json.load(f))
40
+
41
+ for key, value in os.environ.items():
42
+ if key.startswith("ALTIMATE_CODE_CONN_"):
43
+ name = key[len("ALTIMATE_CODE_CONN_") :].lower()
44
+ try:
45
+ cls._connections[name] = json.loads(value)
46
+ except json.JSONDecodeError:
47
+ pass
48
+
49
+ cls._loaded = True
50
+
51
+ @classmethod
52
+ def get(cls, name: str) -> Connector:
53
+ cls.load()
54
+
55
+ if name not in cls._connections:
56
+ raise ValueError(f"Connection '{name}' not found in registry")
57
+
58
+ config = dict(cls._connections[name])
59
+ config = resolve_config(name, config)
60
+
61
+ ssh_host = config.get("ssh_host")
62
+ if ssh_host:
63
+ if config.get("connection_string"):
64
+ raise ValueError(
65
+ "SSH tunneling requires explicit host/port — "
66
+ "cannot be used with connection_string"
67
+ )
68
+ ssh_config = {
69
+ k: config.pop(k) for k in list(config.keys()) if k in SSH_FIELDS
70
+ }
71
+ local_port = start(
72
+ name=name,
73
+ ssh_host=ssh_config.get("ssh_host", ""),
74
+ remote_host=config.get("host", "localhost"),
75
+ remote_port=config.get("port", 5432),
76
+ ssh_port=ssh_config.get("ssh_port", 22),
77
+ ssh_user=ssh_config.get("ssh_user"),
78
+ ssh_auth_type=ssh_config.get("ssh_auth_type", "key"),
79
+ ssh_key_path=ssh_config.get("ssh_key_path"),
80
+ ssh_password=ssh_config.get("ssh_password"),
81
+ )
82
+ config["host"] = "127.0.0.1"
83
+ config["port"] = local_port
84
+
85
+ dialect = config.get("type", "duckdb")
86
+
87
+ if dialect == "duckdb":
88
+ from altimate_engine.connectors.duckdb import DuckDBConnector
89
+
90
+ return DuckDBConnector(
91
+ path=config.get("path", ":memory:"),
92
+ **{k: v for k, v in config.items() if k not in ("type", "path")},
93
+ )
94
+ elif dialect == "postgres":
95
+ from altimate_engine.connectors.postgres import PostgresConnector
96
+
97
+ return PostgresConnector(
98
+ connection_string=config.get("connection_string", ""),
99
+ **{
100
+ k: v
101
+ for k, v in config.items()
102
+ if k not in ("type", "connection_string")
103
+ },
104
+ )
105
+ elif dialect == "snowflake":
106
+ from altimate_engine.connectors.snowflake import SnowflakeConnector
107
+
108
+ _snowflake_keys = {
109
+ "type",
110
+ "account",
111
+ "user",
112
+ "password",
113
+ "private_key_path",
114
+ "private_key_passphrase",
115
+ "warehouse",
116
+ "database",
117
+ "schema",
118
+ "role",
119
+ }
120
+ return SnowflakeConnector(
121
+ account=config.get("account", ""),
122
+ user=config.get("user", ""),
123
+ password=config.get("password"),
124
+ private_key_path=config.get("private_key_path"),
125
+ private_key_passphrase=config.get("private_key_passphrase"),
126
+ warehouse=config.get("warehouse"),
127
+ database=config.get("database"),
128
+ schema=config.get("schema"),
129
+ role=config.get("role"),
130
+ **{k: v for k, v in config.items() if k not in _snowflake_keys},
131
+ )
132
+ elif dialect == "bigquery":
133
+ from altimate_engine.connectors.bigquery import BigQueryConnector
134
+
135
+ _bigquery_keys = {"type", "project", "credentials_path", "location"}
136
+ return BigQueryConnector(
137
+ project=config.get("project", ""),
138
+ credentials_path=config.get("credentials_path"),
139
+ location=config.get("location", "US"),
140
+ **{k: v for k, v in config.items() if k not in _bigquery_keys},
141
+ )
142
+ elif dialect == "databricks":
143
+ from altimate_engine.connectors.databricks import DatabricksConnector
144
+
145
+ _databricks_keys = {
146
+ "type",
147
+ "server_hostname",
148
+ "http_path",
149
+ "access_token",
150
+ "catalog",
151
+ "schema",
152
+ }
153
+ return DatabricksConnector(
154
+ server_hostname=config.get("server_hostname", ""),
155
+ http_path=config.get("http_path", ""),
156
+ access_token=config.get("access_token"),
157
+ catalog=config.get("catalog"),
158
+ schema=config.get("schema"),
159
+ **{k: v for k, v in config.items() if k not in _databricks_keys},
160
+ )
161
+ elif dialect == "redshift":
162
+ from altimate_engine.connectors.redshift import RedshiftConnector
163
+
164
+ _redshift_keys = {
165
+ "type",
166
+ "host",
167
+ "port",
168
+ "database",
169
+ "user",
170
+ "password",
171
+ "connection_string",
172
+ "iam_role",
173
+ "region",
174
+ "cluster_identifier",
175
+ }
176
+ return RedshiftConnector(
177
+ host=config.get("host", ""),
178
+ port=config.get("port", 5439),
179
+ database=config.get("database", "dev"),
180
+ user=config.get("user"),
181
+ password=config.get("password"),
182
+ connection_string=config.get("connection_string"),
183
+ iam_role=config.get("iam_role"),
184
+ region=config.get("region"),
185
+ cluster_identifier=config.get("cluster_identifier"),
186
+ **{k: v for k, v in config.items() if k not in _redshift_keys},
187
+ )
188
+ elif dialect == "mysql":
189
+ from altimate_engine.connectors.mysql import MySQLConnector
190
+
191
+ _mysql_keys = {
192
+ "type",
193
+ "host",
194
+ "port",
195
+ "database",
196
+ "user",
197
+ "password",
198
+ "ssl_ca",
199
+ "ssl_cert",
200
+ "ssl_key",
201
+ }
202
+ return MySQLConnector(
203
+ host=config.get("host", "localhost"),
204
+ port=config.get("port", 3306),
205
+ database=config.get("database"),
206
+ user=config.get("user"),
207
+ password=config.get("password"),
208
+ ssl_ca=config.get("ssl_ca"),
209
+ ssl_cert=config.get("ssl_cert"),
210
+ ssl_key=config.get("ssl_key"),
211
+ **{k: v for k, v in config.items() if k not in _mysql_keys},
212
+ )
213
+ elif dialect == "sqlserver":
214
+ from altimate_engine.connectors.sqlserver import SQLServerConnector
215
+
216
+ _sqlserver_keys = {
217
+ "type",
218
+ "host",
219
+ "port",
220
+ "database",
221
+ "user",
222
+ "password",
223
+ "driver",
224
+ "azure_auth",
225
+ "trust_server_certificate",
226
+ }
227
+ return SQLServerConnector(
228
+ host=config.get("host", "localhost"),
229
+ port=config.get("port", 1433),
230
+ database=config.get("database"),
231
+ user=config.get("user"),
232
+ password=config.get("password"),
233
+ driver=config.get("driver", "ODBC Driver 18 for SQL Server"),
234
+ azure_auth=config.get("azure_auth", False),
235
+ trust_server_certificate=config.get("trust_server_certificate", False),
236
+ **{k: v for k, v in config.items() if k not in _sqlserver_keys},
237
+ )
238
+ else:
239
+ raise ValueError(f"Unsupported connector type: {dialect}")
240
+
241
+ @classmethod
242
+ def list(cls) -> list[dict[str, Any]]:
243
+ cls.load()
244
+ return [
245
+ {"name": name, "type": config.get("type", "unknown")}
246
+ for name, config in cls._connections.items()
247
+ ]
248
+
249
+ @classmethod
250
+ def test(cls, name: str) -> dict[str, Any]:
251
+ try:
252
+ connector = cls.get(name)
253
+ connector.connect()
254
+ connector.execute("SELECT 1")
255
+ connector.close()
256
+ return {"connected": True, "error": None}
257
+ except Exception as e:
258
+ return {"connected": False, "error": str(e)}
259
+ finally:
260
+ stop(name)
261
+
262
+ @classmethod
263
+ def add(cls, name: str, config: dict[str, Any]) -> dict[str, Any]:
264
+ from altimate_engine.credential_store import save_connection
265
+
266
+ result = save_connection(name, config)
267
+ cls._loaded = False
268
+ return result
269
+
270
+ @classmethod
271
+ def remove(cls, name: str) -> bool:
272
+ from altimate_engine.credential_store import remove_connection
273
+
274
+ result = remove_connection(name)
275
+ cls._loaded = False
276
+ return result
277
+
278
+ @classmethod
279
+ def reload(cls) -> None:
280
+ cls._loaded = False
281
+ cls._connections.clear()
@@ -0,0 +1,21 @@
1
+ from altimate_engine.connectors.base import Connector
2
+ from altimate_engine.connectors.duckdb import DuckDBConnector
3
+ from altimate_engine.connectors.postgres import PostgresConnector
4
+ from altimate_engine.connectors.snowflake import SnowflakeConnector
5
+ from altimate_engine.connectors.bigquery import BigQueryConnector
6
+ from altimate_engine.connectors.databricks import DatabricksConnector
7
+ from altimate_engine.connectors.redshift import RedshiftConnector
8
+ from altimate_engine.connectors.mysql import MySQLConnector
9
+ from altimate_engine.connectors.sqlserver import SQLServerConnector
10
+
11
+ __all__ = [
12
+ "Connector",
13
+ "DuckDBConnector",
14
+ "PostgresConnector",
15
+ "SnowflakeConnector",
16
+ "BigQueryConnector",
17
+ "DatabricksConnector",
18
+ "RedshiftConnector",
19
+ "MySQLConnector",
20
+ "SQLServerConnector",
21
+ ]