sql-glider 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sql_glider-0.1.2.dist-info/METADATA +721 -0
- sql_glider-0.1.2.dist-info/RECORD +26 -0
- sql_glider-0.1.2.dist-info/WHEEL +4 -0
- sql_glider-0.1.2.dist-info/entry_points.txt +6 -0
- sql_glider-0.1.2.dist-info/licenses/LICENSE +201 -0
- sqlglider/__init__.py +3 -0
- sqlglider/_version.py +34 -0
- sqlglider/cli.py +1137 -0
- sqlglider/global_models.py +17 -0
- sqlglider/graph/__init__.py +42 -0
- sqlglider/graph/builder.py +310 -0
- sqlglider/graph/merge.py +136 -0
- sqlglider/graph/models.py +289 -0
- sqlglider/graph/query.py +287 -0
- sqlglider/graph/serialization.py +107 -0
- sqlglider/lineage/__init__.py +10 -0
- sqlglider/lineage/analyzer.py +1183 -0
- sqlglider/lineage/formatters.py +335 -0
- sqlglider/templating/__init__.py +51 -0
- sqlglider/templating/base.py +103 -0
- sqlglider/templating/jinja.py +163 -0
- sqlglider/templating/registry.py +124 -0
- sqlglider/templating/variables.py +295 -0
- sqlglider/utils/__init__.py +11 -0
- sqlglider/utils/config.py +130 -0
- sqlglider/utils/file_utils.py +38 -0
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"""Templater registry with plugin discovery via entry points.
|
|
2
|
+
|
|
3
|
+
This module handles discovering and instantiating templaters from
|
|
4
|
+
Python entry points, allowing third-party packages to register
|
|
5
|
+
custom templaters.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
from typing import Dict, List, Type
|
|
10
|
+
|
|
11
|
+
from sqlglider.templating.base import Templater, TemplaterError
|
|
12
|
+
|
|
13
|
+
# Cache for discovered templaters
|
|
14
|
+
_templater_cache: Dict[str, Type[Templater]] = {}
|
|
15
|
+
_discovery_done: bool = False
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _discover_templaters() -> None:
|
|
19
|
+
"""Discover templaters from entry points.
|
|
20
|
+
|
|
21
|
+
Uses importlib.metadata to find all registered templaters
|
|
22
|
+
in the 'sqlglider.templaters' entry point group.
|
|
23
|
+
"""
|
|
24
|
+
global _discovery_done, _templater_cache
|
|
25
|
+
|
|
26
|
+
if _discovery_done:
|
|
27
|
+
return
|
|
28
|
+
|
|
29
|
+
if sys.version_info >= (3, 10):
|
|
30
|
+
from importlib.metadata import entry_points
|
|
31
|
+
|
|
32
|
+
eps = entry_points(group="sqlglider.templaters")
|
|
33
|
+
else:
|
|
34
|
+
from importlib.metadata import entry_points
|
|
35
|
+
|
|
36
|
+
all_eps = entry_points()
|
|
37
|
+
eps = all_eps.get("sqlglider.templaters", [])
|
|
38
|
+
|
|
39
|
+
for ep in eps:
|
|
40
|
+
try:
|
|
41
|
+
templater_class = ep.load()
|
|
42
|
+
if isinstance(templater_class, type) and issubclass(
|
|
43
|
+
templater_class, Templater
|
|
44
|
+
):
|
|
45
|
+
_templater_cache[ep.name] = templater_class
|
|
46
|
+
except Exception:
|
|
47
|
+
# Skip templaters that fail to load
|
|
48
|
+
# This allows graceful handling of missing optional dependencies
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
_discovery_done = True
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def get_templater(name: str) -> Templater:
|
|
55
|
+
"""Get a templater instance by name.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
name: The name of the templater (e.g., "jinja", "none").
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
An instance of the requested templater.
|
|
62
|
+
|
|
63
|
+
Raises:
|
|
64
|
+
TemplaterError: If the templater is not found.
|
|
65
|
+
|
|
66
|
+
Example:
|
|
67
|
+
>>> templater = get_templater("jinja")
|
|
68
|
+
>>> rendered = templater.render("SELECT * FROM {{ table }}", {"table": "users"})
|
|
69
|
+
"""
|
|
70
|
+
_discover_templaters()
|
|
71
|
+
|
|
72
|
+
if name not in _templater_cache:
|
|
73
|
+
available = ", ".join(sorted(_templater_cache.keys()))
|
|
74
|
+
raise TemplaterError(
|
|
75
|
+
f"Unknown templater '{name}'. Available templaters: {available or 'none'}"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
return _templater_cache[name]()
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def list_templaters() -> List[str]:
|
|
82
|
+
"""List all available templater names.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
A sorted list of available templater names.
|
|
86
|
+
|
|
87
|
+
Example:
|
|
88
|
+
>>> templaters = list_templaters()
|
|
89
|
+
>>> print(templaters)
|
|
90
|
+
['jinja', 'none']
|
|
91
|
+
"""
|
|
92
|
+
_discover_templaters()
|
|
93
|
+
return sorted(_templater_cache.keys())
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def register_templater(name: str, templater_class: Type[Templater]) -> None:
|
|
97
|
+
"""Register a templater programmatically.
|
|
98
|
+
|
|
99
|
+
This is primarily useful for testing or for registering templaters
|
|
100
|
+
that aren't installed via entry points.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
name: The name to register the templater under.
|
|
104
|
+
templater_class: The templater class to register.
|
|
105
|
+
|
|
106
|
+
Raises:
|
|
107
|
+
ValueError: If templater_class is not a subclass of Templater.
|
|
108
|
+
"""
|
|
109
|
+
if not isinstance(templater_class, type) or not issubclass(
|
|
110
|
+
templater_class, Templater
|
|
111
|
+
):
|
|
112
|
+
raise ValueError(f"{templater_class} must be a subclass of Templater")
|
|
113
|
+
|
|
114
|
+
_templater_cache[name] = templater_class
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def clear_registry() -> None:
|
|
118
|
+
"""Clear the templater registry.
|
|
119
|
+
|
|
120
|
+
This is primarily useful for testing.
|
|
121
|
+
"""
|
|
122
|
+
global _discovery_done, _templater_cache
|
|
123
|
+
_templater_cache.clear()
|
|
124
|
+
_discovery_done = False
|
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
"""Variable loading utilities for SQL templating.
|
|
2
|
+
|
|
3
|
+
This module provides functions for loading template variables from
|
|
4
|
+
multiple sources with a defined priority order:
|
|
5
|
+
1. CLI arguments (highest priority)
|
|
6
|
+
2. Variables file (JSON/YAML/TOML)
|
|
7
|
+
3. Config file inline variables
|
|
8
|
+
4. Environment variables (lowest priority)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
import tomllib
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any, Dict, List, Optional
|
|
16
|
+
|
|
17
|
+
from rich.console import Console
|
|
18
|
+
|
|
19
|
+
console = Console(stderr=True)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def load_variables_file(path: Path) -> Dict[str, Any]:
|
|
23
|
+
"""Load variables from a JSON, YAML, or TOML file.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
path: Path to the variables file. Must have .json, .yaml, .yml, or .toml extension.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
A dictionary of variables loaded from the file.
|
|
30
|
+
|
|
31
|
+
Raises:
|
|
32
|
+
FileNotFoundError: If the file does not exist.
|
|
33
|
+
ValueError: If the file format is not supported or cannot be parsed.
|
|
34
|
+
"""
|
|
35
|
+
if not path.exists():
|
|
36
|
+
raise FileNotFoundError(f"Variables file not found: {path}")
|
|
37
|
+
|
|
38
|
+
suffix = path.suffix.lower()
|
|
39
|
+
|
|
40
|
+
if suffix == ".json":
|
|
41
|
+
return _load_json_file(path)
|
|
42
|
+
elif suffix in (".yaml", ".yml"):
|
|
43
|
+
return _load_yaml_file(path)
|
|
44
|
+
elif suffix == ".toml":
|
|
45
|
+
return _load_toml_file(path)
|
|
46
|
+
else:
|
|
47
|
+
raise ValueError(
|
|
48
|
+
f"Unsupported variables file format: {suffix}. "
|
|
49
|
+
"Use .json, .yaml, .yml, or .toml"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _load_json_file(path: Path) -> Dict[str, Any]:
|
|
54
|
+
"""Load variables from a JSON file."""
|
|
55
|
+
try:
|
|
56
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
57
|
+
data = json.load(f)
|
|
58
|
+
|
|
59
|
+
if not isinstance(data, dict):
|
|
60
|
+
raise ValueError(
|
|
61
|
+
f"Variables file {path} must contain a JSON object, "
|
|
62
|
+
f"got {type(data).__name__}"
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
return data
|
|
66
|
+
|
|
67
|
+
except json.JSONDecodeError as e:
|
|
68
|
+
raise ValueError(f"Invalid JSON in {path}: {e}") from e
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _load_yaml_file(path: Path) -> Dict[str, Any]:
|
|
72
|
+
"""Load variables from a YAML file.
|
|
73
|
+
|
|
74
|
+
Requires PyYAML to be installed.
|
|
75
|
+
"""
|
|
76
|
+
try:
|
|
77
|
+
import yaml
|
|
78
|
+
except ImportError:
|
|
79
|
+
raise ValueError(
|
|
80
|
+
f"Cannot load YAML file {path}: PyYAML is not installed. "
|
|
81
|
+
"Install it with: uv add pyyaml"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
86
|
+
data = yaml.safe_load(f)
|
|
87
|
+
|
|
88
|
+
if data is None:
|
|
89
|
+
return {}
|
|
90
|
+
|
|
91
|
+
if not isinstance(data, dict):
|
|
92
|
+
raise ValueError(
|
|
93
|
+
f"Variables file {path} must contain a YAML mapping, "
|
|
94
|
+
f"got {type(data).__name__}"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return data
|
|
98
|
+
|
|
99
|
+
except yaml.YAMLError as e:
|
|
100
|
+
raise ValueError(f"Invalid YAML in {path}: {e}") from e
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _load_toml_file(path: Path) -> Dict[str, Any]:
|
|
104
|
+
"""Load variables from a TOML file.
|
|
105
|
+
|
|
106
|
+
Uses Python 3.11+ built-in tomllib.
|
|
107
|
+
"""
|
|
108
|
+
try:
|
|
109
|
+
with open(path, "rb") as f:
|
|
110
|
+
data = tomllib.load(f)
|
|
111
|
+
|
|
112
|
+
if not isinstance(data, dict):
|
|
113
|
+
raise ValueError(
|
|
114
|
+
f"Variables file {path} must contain a TOML table, "
|
|
115
|
+
f"got {type(data).__name__}"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
return data
|
|
119
|
+
|
|
120
|
+
except tomllib.TOMLDecodeError as e:
|
|
121
|
+
raise ValueError(f"Invalid TOML in {path}: {e}") from e
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def parse_cli_variables(var_args: Optional[List[str]]) -> Dict[str, Any]:
|
|
125
|
+
"""Parse CLI variable arguments in key=value format.
|
|
126
|
+
|
|
127
|
+
Supports basic type inference:
|
|
128
|
+
- Integers: 123
|
|
129
|
+
- Floats: 12.34
|
|
130
|
+
- Booleans: true, false (case-insensitive)
|
|
131
|
+
- Strings: everything else
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
var_args: List of variable strings in "key=value" format.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
A dictionary of parsed variables.
|
|
138
|
+
|
|
139
|
+
Raises:
|
|
140
|
+
ValueError: If a variable string is not in key=value format.
|
|
141
|
+
"""
|
|
142
|
+
if not var_args:
|
|
143
|
+
return {}
|
|
144
|
+
|
|
145
|
+
variables: Dict[str, Any] = {}
|
|
146
|
+
|
|
147
|
+
for var_str in var_args:
|
|
148
|
+
if "=" not in var_str:
|
|
149
|
+
raise ValueError(
|
|
150
|
+
f"Invalid variable format: '{var_str}'. Expected 'key=value'"
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
key, value = var_str.split("=", 1)
|
|
154
|
+
key = key.strip()
|
|
155
|
+
value = value.strip()
|
|
156
|
+
|
|
157
|
+
if not key:
|
|
158
|
+
raise ValueError(f"Empty variable name in: '{var_str}'")
|
|
159
|
+
|
|
160
|
+
variables[key] = _infer_type(value)
|
|
161
|
+
|
|
162
|
+
return variables
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _infer_type(value: str) -> Any:
|
|
166
|
+
"""Infer the type of a string value.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
value: The string value to parse.
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
The value converted to the inferred type.
|
|
173
|
+
"""
|
|
174
|
+
# Check for boolean
|
|
175
|
+
if value.lower() == "true":
|
|
176
|
+
return True
|
|
177
|
+
if value.lower() == "false":
|
|
178
|
+
return False
|
|
179
|
+
|
|
180
|
+
# Check for integer
|
|
181
|
+
try:
|
|
182
|
+
return int(value)
|
|
183
|
+
except ValueError:
|
|
184
|
+
pass
|
|
185
|
+
|
|
186
|
+
# Check for float
|
|
187
|
+
try:
|
|
188
|
+
return float(value)
|
|
189
|
+
except ValueError:
|
|
190
|
+
pass
|
|
191
|
+
|
|
192
|
+
# Default to string
|
|
193
|
+
return value
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def load_env_variables(prefix: str = "SQLGLIDER_VAR_") -> Dict[str, Any]:
|
|
197
|
+
"""Load variables from environment variables.
|
|
198
|
+
|
|
199
|
+
Environment variables with the specified prefix are loaded and the
|
|
200
|
+
prefix is stripped from the key. For example, SQLGLIDER_VAR_SCHEMA
|
|
201
|
+
becomes the variable "schema" (lowercased).
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
prefix: The prefix to look for in environment variable names.
|
|
205
|
+
|
|
206
|
+
Returns:
|
|
207
|
+
A dictionary of variables from environment variables.
|
|
208
|
+
"""
|
|
209
|
+
variables: Dict[str, Any] = {}
|
|
210
|
+
|
|
211
|
+
for key, value in os.environ.items():
|
|
212
|
+
if key.startswith(prefix):
|
|
213
|
+
# Remove prefix and lowercase the key
|
|
214
|
+
var_name = key[len(prefix) :].lower()
|
|
215
|
+
if var_name:
|
|
216
|
+
variables[var_name] = _infer_type(value)
|
|
217
|
+
|
|
218
|
+
return variables
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def merge_variables(*sources: Optional[Dict[str, Any]]) -> Dict[str, Any]:
|
|
222
|
+
"""Merge variables from multiple sources with priority.
|
|
223
|
+
|
|
224
|
+
Later sources in the argument list have higher priority and will
|
|
225
|
+
override values from earlier sources.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
*sources: Variable dictionaries to merge, from lowest to highest priority.
|
|
229
|
+
None values are skipped.
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
A merged dictionary of variables.
|
|
233
|
+
|
|
234
|
+
Example:
|
|
235
|
+
>>> env_vars = {"schema": "default", "table": "users"}
|
|
236
|
+
>>> config_vars = {"schema": "config_schema"}
|
|
237
|
+
>>> cli_vars = {"schema": "cli_schema", "column": "id"}
|
|
238
|
+
>>> merged = merge_variables(env_vars, config_vars, cli_vars)
|
|
239
|
+
>>> print(merged)
|
|
240
|
+
{"schema": "cli_schema", "table": "users", "column": "id"}
|
|
241
|
+
"""
|
|
242
|
+
result: Dict[str, Any] = {}
|
|
243
|
+
|
|
244
|
+
for source in sources:
|
|
245
|
+
if source is not None:
|
|
246
|
+
result.update(source)
|
|
247
|
+
|
|
248
|
+
return result
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def load_all_variables(
|
|
252
|
+
cli_vars: Optional[List[str]] = None,
|
|
253
|
+
vars_file: Optional[Path] = None,
|
|
254
|
+
config_vars: Optional[Dict[str, Any]] = None,
|
|
255
|
+
use_env: bool = True,
|
|
256
|
+
) -> Dict[str, Any]:
|
|
257
|
+
"""Load and merge variables from all sources.
|
|
258
|
+
|
|
259
|
+
Priority order (highest to lowest):
|
|
260
|
+
1. CLI arguments
|
|
261
|
+
2. Variables file
|
|
262
|
+
3. Config file inline variables
|
|
263
|
+
4. Environment variables
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
cli_vars: List of CLI variable strings in "key=value" format.
|
|
267
|
+
vars_file: Path to a variables file (JSON, YAML, or TOML).
|
|
268
|
+
config_vars: Variables from the configuration file.
|
|
269
|
+
use_env: Whether to load environment variables.
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
A merged dictionary of variables from all sources.
|
|
273
|
+
"""
|
|
274
|
+
sources: List[Optional[Dict[str, Any]]] = []
|
|
275
|
+
|
|
276
|
+
# Load in priority order (lowest first)
|
|
277
|
+
if use_env:
|
|
278
|
+
sources.append(load_env_variables())
|
|
279
|
+
|
|
280
|
+
if config_vars:
|
|
281
|
+
sources.append(config_vars)
|
|
282
|
+
|
|
283
|
+
if vars_file:
|
|
284
|
+
try:
|
|
285
|
+
sources.append(load_variables_file(vars_file))
|
|
286
|
+
except (FileNotFoundError, ValueError) as e:
|
|
287
|
+
console.print(f"[yellow]Warning:[/yellow] {e}")
|
|
288
|
+
|
|
289
|
+
if cli_vars:
|
|
290
|
+
try:
|
|
291
|
+
sources.append(parse_cli_variables(cli_vars))
|
|
292
|
+
except ValueError as e:
|
|
293
|
+
console.print(f"[yellow]Warning:[/yellow] {e}")
|
|
294
|
+
|
|
295
|
+
return merge_variables(*sources)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""Utility functions for SQL Glider."""
|
|
2
|
+
|
|
3
|
+
from sqlglider.utils.config import ConfigSettings, find_config_file, load_config
|
|
4
|
+
from sqlglider.utils.file_utils import read_sql_file
|
|
5
|
+
|
|
6
|
+
__all__ = [
|
|
7
|
+
"ConfigSettings",
|
|
8
|
+
"find_config_file",
|
|
9
|
+
"load_config",
|
|
10
|
+
"read_sql_file",
|
|
11
|
+
]
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Configuration management for SQL Glider.
|
|
2
|
+
|
|
3
|
+
Loads configuration from sqlglider.toml in the current working directory.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import tomllib
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Dict, Optional
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
|
|
13
|
+
console = Console(stderr=True)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TemplatingConfig(BaseModel):
|
|
17
|
+
"""Configuration for the templating system.
|
|
18
|
+
|
|
19
|
+
All fields are optional.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
variables_file: Optional[str] = None
|
|
23
|
+
variables: Optional[Dict[str, Any]] = None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ConfigSettings(BaseModel):
|
|
27
|
+
"""Configuration settings for SQL Glider.
|
|
28
|
+
|
|
29
|
+
All fields are optional. None values indicate the setting was not
|
|
30
|
+
specified in the config file.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
dialect: Optional[str] = None
|
|
34
|
+
level: Optional[str] = None
|
|
35
|
+
output_format: Optional[str] = None
|
|
36
|
+
templater: Optional[str] = None
|
|
37
|
+
templating: Optional[TemplatingConfig] = None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def find_config_file(start_path: Optional[Path] = None) -> Optional[Path]:
|
|
41
|
+
"""Find sqlglider.toml in the current working directory.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
start_path: Starting directory to search for config file.
|
|
45
|
+
Defaults to current working directory.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Path to config file if found, None otherwise.
|
|
49
|
+
"""
|
|
50
|
+
if start_path is None:
|
|
51
|
+
start_path = Path.cwd()
|
|
52
|
+
|
|
53
|
+
config_path = start_path / "sqlglider.toml"
|
|
54
|
+
|
|
55
|
+
if config_path.exists() and config_path.is_file():
|
|
56
|
+
return config_path
|
|
57
|
+
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def load_config(config_path: Optional[Path] = None) -> ConfigSettings:
|
|
62
|
+
"""Load configuration from sqlglider.toml.
|
|
63
|
+
|
|
64
|
+
Priority order:
|
|
65
|
+
1. Explicit config_path parameter
|
|
66
|
+
2. sqlglider.toml in current working directory
|
|
67
|
+
3. Empty ConfigSettings (all None)
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
config_path: Optional explicit path to config file.
|
|
71
|
+
If not provided, searches current working directory.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
ConfigSettings with values from TOML file or None for unset fields.
|
|
75
|
+
Always returns a valid ConfigSettings object, even on errors.
|
|
76
|
+
|
|
77
|
+
Error Handling:
|
|
78
|
+
- Missing file: Returns empty ConfigSettings (silent)
|
|
79
|
+
- Malformed TOML: Warns user and returns empty ConfigSettings
|
|
80
|
+
- Invalid values: Warns user and sets affected fields to None
|
|
81
|
+
- Unknown keys: Ignored (forward compatibility)
|
|
82
|
+
"""
|
|
83
|
+
# Find config file
|
|
84
|
+
if config_path is None:
|
|
85
|
+
config_path = find_config_file()
|
|
86
|
+
|
|
87
|
+
# No config file found - return empty settings
|
|
88
|
+
if config_path is None:
|
|
89
|
+
return ConfigSettings()
|
|
90
|
+
|
|
91
|
+
try:
|
|
92
|
+
# Read and parse TOML file
|
|
93
|
+
with open(config_path, "rb") as f:
|
|
94
|
+
toml_data = tomllib.load(f)
|
|
95
|
+
|
|
96
|
+
# Extract sqlglider section
|
|
97
|
+
sqlglider_config = toml_data.get("sqlglider", {})
|
|
98
|
+
|
|
99
|
+
# Validate and create ConfigSettings
|
|
100
|
+
# Pydantic will validate types and ignore unknown fields
|
|
101
|
+
try:
|
|
102
|
+
return ConfigSettings(**sqlglider_config)
|
|
103
|
+
except Exception as e:
|
|
104
|
+
console.print(
|
|
105
|
+
f"[yellow]Warning:[/yellow] Invalid configuration in {config_path}: {e}",
|
|
106
|
+
)
|
|
107
|
+
console.print("[yellow]Using default settings[/yellow]")
|
|
108
|
+
return ConfigSettings()
|
|
109
|
+
|
|
110
|
+
except tomllib.TOMLDecodeError as e:
|
|
111
|
+
console.print(
|
|
112
|
+
f"[yellow]Warning:[/yellow] Failed to parse {config_path}: {e}",
|
|
113
|
+
)
|
|
114
|
+
console.print("[yellow]Using default settings[/yellow]")
|
|
115
|
+
return ConfigSettings()
|
|
116
|
+
|
|
117
|
+
except (OSError, IOError) as e:
|
|
118
|
+
console.print(
|
|
119
|
+
f"[yellow]Warning:[/yellow] Could not read {config_path}: {e}",
|
|
120
|
+
)
|
|
121
|
+
console.print("[yellow]Using default settings[/yellow]")
|
|
122
|
+
return ConfigSettings()
|
|
123
|
+
|
|
124
|
+
except Exception as e:
|
|
125
|
+
# Catch-all for unexpected errors
|
|
126
|
+
console.print(
|
|
127
|
+
f"[yellow]Warning:[/yellow] Unexpected error loading config: {e}",
|
|
128
|
+
)
|
|
129
|
+
console.print("[yellow]Using default settings[/yellow]")
|
|
130
|
+
return ConfigSettings()
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""File utility functions for SQL Glider."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def read_sql_file(file_path: Path) -> str:
|
|
7
|
+
"""
|
|
8
|
+
Read a SQL file and return its contents as a string.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
file_path: Path to the SQL file to read
|
|
12
|
+
|
|
13
|
+
Returns:
|
|
14
|
+
The contents of the SQL file as a string
|
|
15
|
+
|
|
16
|
+
Raises:
|
|
17
|
+
FileNotFoundError: If the file does not exist
|
|
18
|
+
PermissionError: If the file cannot be read
|
|
19
|
+
UnicodeDecodeError: If the file encoding is not UTF-8
|
|
20
|
+
"""
|
|
21
|
+
if not file_path.exists():
|
|
22
|
+
raise FileNotFoundError(f"SQL file not found: {file_path}")
|
|
23
|
+
|
|
24
|
+
if not file_path.is_file():
|
|
25
|
+
raise ValueError(f"Path is not a file: {file_path}")
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
return file_path.read_text(encoding="utf-8")
|
|
29
|
+
except PermissionError as e:
|
|
30
|
+
raise PermissionError(f"Cannot read file {file_path}: {e}") from e
|
|
31
|
+
except UnicodeDecodeError as e:
|
|
32
|
+
raise UnicodeDecodeError(
|
|
33
|
+
e.encoding,
|
|
34
|
+
e.object,
|
|
35
|
+
e.start,
|
|
36
|
+
e.end,
|
|
37
|
+
f"File {file_path} is not valid UTF-8: {e.reason}",
|
|
38
|
+
) from e
|