anysite-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of anysite-cli might be problematic. Click here for more details.
- anysite/__init__.py +4 -0
- anysite/__main__.py +6 -0
- anysite/api/__init__.py +21 -0
- anysite/api/client.py +271 -0
- anysite/api/errors.py +137 -0
- anysite/api/schemas.py +333 -0
- anysite/batch/__init__.py +1 -0
- anysite/batch/executor.py +176 -0
- anysite/batch/input.py +160 -0
- anysite/batch/rate_limiter.py +98 -0
- anysite/cli/__init__.py +1 -0
- anysite/cli/config.py +176 -0
- anysite/cli/executor.py +388 -0
- anysite/cli/options.py +249 -0
- anysite/config/__init__.py +11 -0
- anysite/config/paths.py +46 -0
- anysite/config/settings.py +187 -0
- anysite/dataset/__init__.py +37 -0
- anysite/dataset/analyzer.py +268 -0
- anysite/dataset/cli.py +644 -0
- anysite/dataset/collector.py +686 -0
- anysite/dataset/db_loader.py +248 -0
- anysite/dataset/errors.py +30 -0
- anysite/dataset/exporters.py +121 -0
- anysite/dataset/history.py +153 -0
- anysite/dataset/models.py +245 -0
- anysite/dataset/notifications.py +87 -0
- anysite/dataset/scheduler.py +107 -0
- anysite/dataset/storage.py +171 -0
- anysite/dataset/transformer.py +213 -0
- anysite/db/__init__.py +38 -0
- anysite/db/adapters/__init__.py +1 -0
- anysite/db/adapters/base.py +158 -0
- anysite/db/adapters/postgres.py +201 -0
- anysite/db/adapters/sqlite.py +183 -0
- anysite/db/cli.py +687 -0
- anysite/db/config.py +92 -0
- anysite/db/manager.py +166 -0
- anysite/db/operations/__init__.py +1 -0
- anysite/db/operations/insert.py +199 -0
- anysite/db/operations/query.py +43 -0
- anysite/db/schema/__init__.py +1 -0
- anysite/db/schema/inference.py +213 -0
- anysite/db/schema/types.py +71 -0
- anysite/db/utils/__init__.py +1 -0
- anysite/db/utils/sanitize.py +99 -0
- anysite/main.py +498 -0
- anysite/models/__init__.py +1 -0
- anysite/output/__init__.py +11 -0
- anysite/output/console.py +45 -0
- anysite/output/formatters.py +301 -0
- anysite/output/templates.py +76 -0
- anysite/py.typed +0 -0
- anysite/streaming/__init__.py +1 -0
- anysite/streaming/progress.py +121 -0
- anysite/streaming/writer.py +130 -0
- anysite/utils/__init__.py +1 -0
- anysite/utils/fields.py +242 -0
- anysite/utils/retry.py +109 -0
- anysite_cli-0.1.0.dist-info/METADATA +437 -0
- anysite_cli-0.1.0.dist-info/RECORD +64 -0
- anysite_cli-0.1.0.dist-info/WHEEL +4 -0
- anysite_cli-0.1.0.dist-info/entry_points.txt +2 -0
- anysite_cli-0.1.0.dist-info/licenses/LICENSE +21 -0
anysite/db/config.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"""Database connection configuration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, model_validator
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class DatabaseType(str, Enum):
|
|
13
|
+
"""Supported database types."""
|
|
14
|
+
|
|
15
|
+
SQLITE = "sqlite"
|
|
16
|
+
POSTGRES = "postgres"
|
|
17
|
+
MYSQL = "mysql"
|
|
18
|
+
DUCKDB = "duckdb"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ConnectionConfig(BaseModel):
|
|
22
|
+
"""Configuration for a database connection."""
|
|
23
|
+
|
|
24
|
+
name: str
|
|
25
|
+
type: DatabaseType
|
|
26
|
+
host: str | None = None
|
|
27
|
+
port: int | None = None
|
|
28
|
+
database: str | None = None
|
|
29
|
+
user: str | None = None
|
|
30
|
+
password_env: str | None = None
|
|
31
|
+
url_env: str | None = None
|
|
32
|
+
path: str | None = None
|
|
33
|
+
ssl: bool = False
|
|
34
|
+
options: dict[str, Any] = {}
|
|
35
|
+
|
|
36
|
+
@model_validator(mode="after")
|
|
37
|
+
def validate_config(self) -> ConnectionConfig:
|
|
38
|
+
"""Validate that required fields are present for the database type."""
|
|
39
|
+
if self.type in (DatabaseType.SQLITE, DatabaseType.DUCKDB) and not self.path:
|
|
40
|
+
raise ValueError(f"{self.type.value} requires 'path'")
|
|
41
|
+
if self.type in (DatabaseType.POSTGRES, DatabaseType.MYSQL) and not self.url_env and not self.host:
|
|
42
|
+
raise ValueError(f"{self.type.value} requires 'host' or 'url_env'")
|
|
43
|
+
return self
|
|
44
|
+
|
|
45
|
+
def get_password(self) -> str | None:
|
|
46
|
+
"""Resolve password from environment variable."""
|
|
47
|
+
if self.password_env:
|
|
48
|
+
value = os.environ.get(self.password_env)
|
|
49
|
+
if value is None:
|
|
50
|
+
raise ValueError(
|
|
51
|
+
f"Environment variable '{self.password_env}' is not set"
|
|
52
|
+
)
|
|
53
|
+
return value
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
def get_url(self) -> str | None:
|
|
57
|
+
"""Resolve connection URL from environment variable."""
|
|
58
|
+
if self.url_env:
|
|
59
|
+
value = os.environ.get(self.url_env)
|
|
60
|
+
if value is None:
|
|
61
|
+
raise ValueError(
|
|
62
|
+
f"Environment variable '{self.url_env}' is not set"
|
|
63
|
+
)
|
|
64
|
+
return value
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
def to_dict(self) -> dict[str, Any]:
|
|
68
|
+
"""Convert to dictionary for YAML serialization, omitting None values."""
|
|
69
|
+
data: dict[str, Any] = {"type": self.type.value}
|
|
70
|
+
for field in ("host", "port", "database", "user", "password_env", "url_env", "path"):
|
|
71
|
+
value = getattr(self, field)
|
|
72
|
+
if value is not None:
|
|
73
|
+
data[field] = value
|
|
74
|
+
if self.ssl:
|
|
75
|
+
data["ssl"] = True
|
|
76
|
+
if self.options:
|
|
77
|
+
data["options"] = self.options
|
|
78
|
+
return data
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def from_dict(cls, name: str, data: dict[str, Any]) -> ConnectionConfig:
|
|
82
|
+
"""Create a ConnectionConfig from a dictionary."""
|
|
83
|
+
return cls(name=name, **data)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class OnConflict(str, Enum):
|
|
87
|
+
"""Conflict resolution strategy for inserts."""
|
|
88
|
+
|
|
89
|
+
ERROR = "error"
|
|
90
|
+
IGNORE = "ignore"
|
|
91
|
+
REPLACE = "replace"
|
|
92
|
+
UPDATE = "update"
|
anysite/db/manager.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"""Connection manager for database connections."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import yaml
|
|
9
|
+
|
|
10
|
+
from anysite.config.paths import ensure_config_dir, get_config_dir
|
|
11
|
+
from anysite.db.adapters.base import DatabaseAdapter
|
|
12
|
+
from anysite.db.config import ConnectionConfig, DatabaseType
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_connections_path() -> Path:
|
|
16
|
+
"""Get the path to the connections YAML file."""
|
|
17
|
+
return get_config_dir() / "connections.yaml"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ConnectionManager:
|
|
21
|
+
"""Manages named database connections stored in YAML."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, path: Path | None = None) -> None:
|
|
24
|
+
self.path = path or get_connections_path()
|
|
25
|
+
self._connections: dict[str, ConnectionConfig] | None = None
|
|
26
|
+
|
|
27
|
+
def _load(self) -> dict[str, ConnectionConfig]:
|
|
28
|
+
"""Load connections from YAML file."""
|
|
29
|
+
if self._connections is not None:
|
|
30
|
+
return self._connections
|
|
31
|
+
|
|
32
|
+
connections: dict[str, ConnectionConfig] = {}
|
|
33
|
+
if self.path.exists():
|
|
34
|
+
with open(self.path) as f:
|
|
35
|
+
data = yaml.safe_load(f) or {}
|
|
36
|
+
raw = data.get("connections", {})
|
|
37
|
+
for name, config_data in raw.items():
|
|
38
|
+
connections[name] = ConnectionConfig.from_dict(name, config_data)
|
|
39
|
+
|
|
40
|
+
self._connections = connections
|
|
41
|
+
return connections
|
|
42
|
+
|
|
43
|
+
def _save(self) -> None:
|
|
44
|
+
"""Save connections to YAML file."""
|
|
45
|
+
connections = self._load()
|
|
46
|
+
data: dict[str, Any] = {
|
|
47
|
+
"connections": {
|
|
48
|
+
name: config.to_dict()
|
|
49
|
+
for name, config in connections.items()
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
ensure_config_dir()
|
|
53
|
+
with open(self.path, "w") as f:
|
|
54
|
+
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
|
|
55
|
+
|
|
56
|
+
def add(self, config: ConnectionConfig) -> None:
|
|
57
|
+
"""Add or update a connection.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
config: Connection configuration.
|
|
61
|
+
"""
|
|
62
|
+
connections = self._load()
|
|
63
|
+
connections[config.name] = config
|
|
64
|
+
self._save()
|
|
65
|
+
|
|
66
|
+
def remove(self, name: str) -> bool:
|
|
67
|
+
"""Remove a connection by name.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
name: Connection name.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
True if the connection was removed, False if not found.
|
|
74
|
+
"""
|
|
75
|
+
connections = self._load()
|
|
76
|
+
if name not in connections:
|
|
77
|
+
return False
|
|
78
|
+
del connections[name]
|
|
79
|
+
self._save()
|
|
80
|
+
return True
|
|
81
|
+
|
|
82
|
+
def get(self, name: str) -> ConnectionConfig | None:
|
|
83
|
+
"""Get a connection config by name.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
name: Connection name.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
ConnectionConfig or None if not found.
|
|
90
|
+
"""
|
|
91
|
+
connections = self._load()
|
|
92
|
+
return connections.get(name)
|
|
93
|
+
|
|
94
|
+
def list(self) -> list[ConnectionConfig]:
|
|
95
|
+
"""List all connections.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
List of connection configs.
|
|
99
|
+
"""
|
|
100
|
+
connections = self._load()
|
|
101
|
+
return list(connections.values())
|
|
102
|
+
|
|
103
|
+
def test(self, name: str) -> dict[str, str]:
|
|
104
|
+
"""Test a connection by connecting and getting server info.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
name: Connection name.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Server info dictionary.
|
|
111
|
+
|
|
112
|
+
Raises:
|
|
113
|
+
ValueError: If connection not found.
|
|
114
|
+
Exception: If connection fails.
|
|
115
|
+
"""
|
|
116
|
+
config = self.get(name)
|
|
117
|
+
if config is None:
|
|
118
|
+
raise ValueError(f"Connection '{name}' not found")
|
|
119
|
+
|
|
120
|
+
adapter = self.get_adapter(config)
|
|
121
|
+
with adapter:
|
|
122
|
+
return adapter.get_server_info()
|
|
123
|
+
|
|
124
|
+
def get_adapter(self, config: ConnectionConfig) -> DatabaseAdapter:
|
|
125
|
+
"""Get a database adapter for the given config.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
config: Connection configuration.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
Appropriate DatabaseAdapter instance.
|
|
132
|
+
|
|
133
|
+
Raises:
|
|
134
|
+
ValueError: If database type is not supported.
|
|
135
|
+
"""
|
|
136
|
+
if config.type == DatabaseType.SQLITE:
|
|
137
|
+
from anysite.db.adapters.sqlite import SQLiteAdapter
|
|
138
|
+
|
|
139
|
+
return SQLiteAdapter(config)
|
|
140
|
+
|
|
141
|
+
elif config.type == DatabaseType.POSTGRES:
|
|
142
|
+
from anysite.db import check_db_deps
|
|
143
|
+
from anysite.db.adapters.postgres import PostgresAdapter
|
|
144
|
+
|
|
145
|
+
check_db_deps("postgres")
|
|
146
|
+
return PostgresAdapter(config)
|
|
147
|
+
|
|
148
|
+
else:
|
|
149
|
+
raise ValueError(f"Unsupported database type: {config.type.value}")
|
|
150
|
+
|
|
151
|
+
def get_adapter_by_name(self, name: str) -> DatabaseAdapter:
|
|
152
|
+
"""Get a database adapter by connection name.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
name: Connection name.
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
Appropriate DatabaseAdapter instance.
|
|
159
|
+
|
|
160
|
+
Raises:
|
|
161
|
+
ValueError: If connection not found or type unsupported.
|
|
162
|
+
"""
|
|
163
|
+
config = self.get(name)
|
|
164
|
+
if config is None:
|
|
165
|
+
raise ValueError(f"Connection '{name}' not found")
|
|
166
|
+
return self.get_adapter(config)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Database operations."""
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
"""Insert operations for streaming JSON data into database tables."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import IO, Any, TextIO
|
|
9
|
+
|
|
10
|
+
from anysite.db.adapters.base import DatabaseAdapter
|
|
11
|
+
from anysite.db.config import OnConflict
|
|
12
|
+
from anysite.db.schema.inference import infer_table_schema
|
|
13
|
+
from anysite.db.utils.sanitize import sanitize_table_name
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def insert_from_stream(
|
|
17
|
+
adapter: DatabaseAdapter,
|
|
18
|
+
table: str,
|
|
19
|
+
stream: TextIO | IO[str],
|
|
20
|
+
on_conflict: OnConflict = OnConflict.ERROR,
|
|
21
|
+
conflict_columns: list[str] | None = None,
|
|
22
|
+
auto_create: bool = False,
|
|
23
|
+
primary_key: str | None = None,
|
|
24
|
+
batch_size: int = 100,
|
|
25
|
+
quiet: bool = False,
|
|
26
|
+
) -> int:
|
|
27
|
+
"""Read JSONL from a stream and insert rows into a database table.
|
|
28
|
+
|
|
29
|
+
Each line is parsed as a JSON object. If the input is a JSON array,
|
|
30
|
+
the array elements are used as rows.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
adapter: Connected database adapter.
|
|
34
|
+
table: Target table name.
|
|
35
|
+
stream: Input stream (stdin or file).
|
|
36
|
+
on_conflict: Conflict resolution strategy.
|
|
37
|
+
conflict_columns: Columns for upsert conflict detection.
|
|
38
|
+
auto_create: Create the table automatically if it doesn't exist.
|
|
39
|
+
primary_key: Primary key column for auto-created tables.
|
|
40
|
+
batch_size: Number of rows per batch insert.
|
|
41
|
+
quiet: Suppress progress output.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Total number of rows inserted.
|
|
45
|
+
"""
|
|
46
|
+
rows = _read_json_stream(stream)
|
|
47
|
+
if not rows:
|
|
48
|
+
return 0
|
|
49
|
+
|
|
50
|
+
# Auto-create table if requested
|
|
51
|
+
if auto_create and not adapter.table_exists(table):
|
|
52
|
+
schema = infer_table_schema(table, rows)
|
|
53
|
+
dialect = _get_dialect(adapter)
|
|
54
|
+
sql_types = schema.to_sql_types(dialect)
|
|
55
|
+
adapter.create_table(table, sql_types, primary_key=primary_key)
|
|
56
|
+
if not quiet:
|
|
57
|
+
import typer
|
|
58
|
+
|
|
59
|
+
safe = sanitize_table_name(table)
|
|
60
|
+
typer.echo(f"Created table {safe} with {len(sql_types)} columns", err=True)
|
|
61
|
+
|
|
62
|
+
total = 0
|
|
63
|
+
for i in range(0, len(rows), batch_size):
|
|
64
|
+
batch = rows[i : i + batch_size]
|
|
65
|
+
count = adapter.insert_batch(
|
|
66
|
+
table, batch, on_conflict=on_conflict, conflict_columns=conflict_columns
|
|
67
|
+
)
|
|
68
|
+
total += count
|
|
69
|
+
|
|
70
|
+
return total
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _read_json_stream(stream: TextIO | IO[str]) -> list[dict[str, Any]]:
|
|
74
|
+
"""Read JSON or JSONL data from a stream.
|
|
75
|
+
|
|
76
|
+
Handles three formats:
|
|
77
|
+
1. JSON array: [{"a": 1}, {"b": 2}]
|
|
78
|
+
2. JSONL: one JSON object per line
|
|
79
|
+
3. Single JSON object: {"a": 1}
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
stream: Input stream.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
List of row dictionaries.
|
|
86
|
+
"""
|
|
87
|
+
content = stream.read().strip()
|
|
88
|
+
if not content:
|
|
89
|
+
return []
|
|
90
|
+
|
|
91
|
+
# Try parsing as a JSON array or single object first
|
|
92
|
+
try:
|
|
93
|
+
data = json.loads(content)
|
|
94
|
+
if isinstance(data, list):
|
|
95
|
+
return [row for row in data if isinstance(row, dict)]
|
|
96
|
+
elif isinstance(data, dict):
|
|
97
|
+
return [data]
|
|
98
|
+
except json.JSONDecodeError:
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
# Parse as JSONL (one JSON object per line)
|
|
102
|
+
rows: list[dict[str, Any]] = []
|
|
103
|
+
for line in content.split("\n"):
|
|
104
|
+
line = line.strip()
|
|
105
|
+
if not line:
|
|
106
|
+
continue
|
|
107
|
+
try:
|
|
108
|
+
obj = json.loads(line)
|
|
109
|
+
if isinstance(obj, dict):
|
|
110
|
+
rows.append(obj)
|
|
111
|
+
except json.JSONDecodeError:
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
return rows
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def insert_from_file(
|
|
118
|
+
adapter: DatabaseAdapter,
|
|
119
|
+
table: str,
|
|
120
|
+
file_path: Path,
|
|
121
|
+
on_conflict: OnConflict = OnConflict.ERROR,
|
|
122
|
+
conflict_columns: list[str] | None = None,
|
|
123
|
+
auto_create: bool = False,
|
|
124
|
+
primary_key: str | None = None,
|
|
125
|
+
batch_size: int = 100,
|
|
126
|
+
quiet: bool = False,
|
|
127
|
+
) -> int:
|
|
128
|
+
"""Read JSONL from a file and insert rows into a database table.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
adapter: Connected database adapter.
|
|
132
|
+
table: Target table name.
|
|
133
|
+
file_path: Path to JSONL/JSON file.
|
|
134
|
+
on_conflict: Conflict resolution strategy.
|
|
135
|
+
conflict_columns: Columns for upsert conflict detection.
|
|
136
|
+
auto_create: Create the table automatically if it doesn't exist.
|
|
137
|
+
primary_key: Primary key column for auto-created tables.
|
|
138
|
+
batch_size: Number of rows per batch insert.
|
|
139
|
+
quiet: Suppress progress output.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Total number of rows inserted.
|
|
143
|
+
"""
|
|
144
|
+
with open(file_path) as f:
|
|
145
|
+
return insert_from_stream(
|
|
146
|
+
adapter,
|
|
147
|
+
table,
|
|
148
|
+
f,
|
|
149
|
+
on_conflict=on_conflict,
|
|
150
|
+
conflict_columns=conflict_columns,
|
|
151
|
+
auto_create=auto_create,
|
|
152
|
+
primary_key=primary_key,
|
|
153
|
+
batch_size=batch_size,
|
|
154
|
+
quiet=quiet,
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def insert_from_stdin(
|
|
159
|
+
adapter: DatabaseAdapter,
|
|
160
|
+
table: str,
|
|
161
|
+
on_conflict: OnConflict = OnConflict.ERROR,
|
|
162
|
+
conflict_columns: list[str] | None = None,
|
|
163
|
+
auto_create: bool = False,
|
|
164
|
+
primary_key: str | None = None,
|
|
165
|
+
batch_size: int = 100,
|
|
166
|
+
quiet: bool = False,
|
|
167
|
+
) -> int:
|
|
168
|
+
"""Read JSONL from stdin and insert rows.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
adapter: Connected database adapter.
|
|
172
|
+
table: Target table name.
|
|
173
|
+
on_conflict: Conflict resolution strategy.
|
|
174
|
+
conflict_columns: Columns for upsert conflict detection.
|
|
175
|
+
auto_create: Create the table automatically if it doesn't exist.
|
|
176
|
+
primary_key: Primary key column for auto-created tables.
|
|
177
|
+
batch_size: Number of rows per batch insert.
|
|
178
|
+
quiet: Suppress progress output.
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
Total number of rows inserted.
|
|
182
|
+
"""
|
|
183
|
+
return insert_from_stream(
|
|
184
|
+
adapter,
|
|
185
|
+
table,
|
|
186
|
+
sys.stdin,
|
|
187
|
+
on_conflict=on_conflict,
|
|
188
|
+
conflict_columns=conflict_columns,
|
|
189
|
+
auto_create=auto_create,
|
|
190
|
+
primary_key=primary_key,
|
|
191
|
+
batch_size=batch_size,
|
|
192
|
+
quiet=quiet,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _get_dialect(adapter: DatabaseAdapter) -> str:
|
|
197
|
+
"""Get the SQL dialect name from an adapter."""
|
|
198
|
+
info = adapter.get_server_info()
|
|
199
|
+
return info.get("type", "sqlite")
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""Query operations for database tables."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from anysite.db.adapters.base import DatabaseAdapter
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def execute_query(
|
|
12
|
+
adapter: DatabaseAdapter,
|
|
13
|
+
sql: str,
|
|
14
|
+
params: tuple[Any, ...] | None = None,
|
|
15
|
+
) -> list[dict[str, Any]]:
|
|
16
|
+
"""Execute a SQL query and return results.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
adapter: Connected database adapter.
|
|
20
|
+
sql: SQL query string.
|
|
21
|
+
params: Optional query parameters.
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
List of row dictionaries.
|
|
25
|
+
"""
|
|
26
|
+
return adapter.fetch_all(sql, params)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def execute_query_from_file(
|
|
30
|
+
adapter: DatabaseAdapter,
|
|
31
|
+
file_path: Path,
|
|
32
|
+
) -> list[dict[str, Any]]:
|
|
33
|
+
"""Execute a SQL query from a file.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
adapter: Connected database adapter.
|
|
37
|
+
file_path: Path to SQL file.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
List of row dictionaries.
|
|
41
|
+
"""
|
|
42
|
+
sql = file_path.read_text().strip()
|
|
43
|
+
return execute_query(adapter, sql)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Schema inference for database tables."""
|