krons 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kronos/__init__.py +0 -0
- kronos/core/__init__.py +145 -0
- kronos/core/broadcaster.py +116 -0
- kronos/core/element.py +225 -0
- kronos/core/event.py +316 -0
- kronos/core/eventbus.py +116 -0
- kronos/core/flow.py +356 -0
- kronos/core/graph.py +442 -0
- kronos/core/node.py +982 -0
- kronos/core/pile.py +575 -0
- kronos/core/processor.py +494 -0
- kronos/core/progression.py +296 -0
- kronos/enforcement/__init__.py +57 -0
- kronos/enforcement/common/__init__.py +34 -0
- kronos/enforcement/common/boolean.py +85 -0
- kronos/enforcement/common/choice.py +97 -0
- kronos/enforcement/common/mapping.py +118 -0
- kronos/enforcement/common/model.py +102 -0
- kronos/enforcement/common/number.py +98 -0
- kronos/enforcement/common/string.py +140 -0
- kronos/enforcement/context.py +129 -0
- kronos/enforcement/policy.py +80 -0
- kronos/enforcement/registry.py +153 -0
- kronos/enforcement/rule.py +312 -0
- kronos/enforcement/service.py +370 -0
- kronos/enforcement/validator.py +198 -0
- kronos/errors.py +146 -0
- kronos/operations/__init__.py +32 -0
- kronos/operations/builder.py +228 -0
- kronos/operations/flow.py +398 -0
- kronos/operations/node.py +101 -0
- kronos/operations/registry.py +92 -0
- kronos/protocols.py +414 -0
- kronos/py.typed +0 -0
- kronos/services/__init__.py +81 -0
- kronos/services/backend.py +286 -0
- kronos/services/endpoint.py +608 -0
- kronos/services/hook.py +471 -0
- kronos/services/imodel.py +465 -0
- kronos/services/registry.py +115 -0
- kronos/services/utilities/__init__.py +36 -0
- kronos/services/utilities/header_factory.py +87 -0
- kronos/services/utilities/rate_limited_executor.py +271 -0
- kronos/services/utilities/rate_limiter.py +180 -0
- kronos/services/utilities/resilience.py +414 -0
- kronos/session/__init__.py +41 -0
- kronos/session/exchange.py +258 -0
- kronos/session/message.py +60 -0
- kronos/session/session.py +411 -0
- kronos/specs/__init__.py +25 -0
- kronos/specs/adapters/__init__.py +0 -0
- kronos/specs/adapters/_utils.py +45 -0
- kronos/specs/adapters/dataclass_field.py +246 -0
- kronos/specs/adapters/factory.py +56 -0
- kronos/specs/adapters/pydantic_adapter.py +309 -0
- kronos/specs/adapters/sql_ddl.py +946 -0
- kronos/specs/catalog/__init__.py +36 -0
- kronos/specs/catalog/_audit.py +39 -0
- kronos/specs/catalog/_common.py +43 -0
- kronos/specs/catalog/_content.py +59 -0
- kronos/specs/catalog/_enforcement.py +70 -0
- kronos/specs/factory.py +120 -0
- kronos/specs/operable.py +314 -0
- kronos/specs/phrase.py +405 -0
- kronos/specs/protocol.py +140 -0
- kronos/specs/spec.py +506 -0
- kronos/types/__init__.py +60 -0
- kronos/types/_sentinel.py +311 -0
- kronos/types/base.py +369 -0
- kronos/types/db_types.py +260 -0
- kronos/types/identity.py +66 -0
- kronos/utils/__init__.py +40 -0
- kronos/utils/_hash.py +234 -0
- kronos/utils/_json_dump.py +392 -0
- kronos/utils/_lazy_init.py +63 -0
- kronos/utils/_to_list.py +165 -0
- kronos/utils/_to_num.py +85 -0
- kronos/utils/_utils.py +375 -0
- kronos/utils/concurrency/__init__.py +205 -0
- kronos/utils/concurrency/_async_call.py +333 -0
- kronos/utils/concurrency/_cancel.py +122 -0
- kronos/utils/concurrency/_errors.py +96 -0
- kronos/utils/concurrency/_patterns.py +363 -0
- kronos/utils/concurrency/_primitives.py +328 -0
- kronos/utils/concurrency/_priority_queue.py +135 -0
- kronos/utils/concurrency/_resource_tracker.py +110 -0
- kronos/utils/concurrency/_run_async.py +67 -0
- kronos/utils/concurrency/_task.py +95 -0
- kronos/utils/concurrency/_utils.py +79 -0
- kronos/utils/fuzzy/__init__.py +14 -0
- kronos/utils/fuzzy/_extract_json.py +90 -0
- kronos/utils/fuzzy/_fuzzy_json.py +288 -0
- kronos/utils/fuzzy/_fuzzy_match.py +149 -0
- kronos/utils/fuzzy/_string_similarity.py +187 -0
- kronos/utils/fuzzy/_to_dict.py +396 -0
- kronos/utils/sql/__init__.py +13 -0
- kronos/utils/sql/_sql_validation.py +142 -0
- krons-0.1.0.dist-info/METADATA +70 -0
- krons-0.1.0.dist-info/RECORD +101 -0
- krons-0.1.0.dist-info/WHEEL +4 -0
- krons-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,946 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""SQL DDL SpecAdapter: Spec -> SQL column definitions, Operable -> CREATE TABLE.
|
|
5
|
+
|
|
6
|
+
Generates SQL DDL statements from Spec/Operable definitions:
|
|
7
|
+
- FK[Model]: Foreign key references (UUID with REFERENCES constraint)
|
|
8
|
+
- Vector[dim]: pgvector VECTOR(dim) for embeddings
|
|
9
|
+
- Type mapping: Python types -> SQL types (TEXT, INTEGER, JSONB, etc.)
|
|
10
|
+
|
|
11
|
+
Schema Specifications (frozen dataclasses for diffing/introspection):
|
|
12
|
+
- ColumnSpec, IndexSpec, TriggerSpec, CheckConstraintSpec, UniqueConstraintSpec
|
|
13
|
+
- ForeignKeySpec: Full FK constraint with deferrable support
|
|
14
|
+
- TableSpec: Complete table schema representation
|
|
15
|
+
- SchemaSpec: Multi-table database schema
|
|
16
|
+
|
|
17
|
+
Enums for type-safe specification:
|
|
18
|
+
- OnAction: FK ON DELETE/UPDATE actions (CASCADE, SET NULL, etc.)
|
|
19
|
+
- IndexMethod: Index access methods (BTREE, GIN, HNSW, etc.)
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from __future__ import annotations
|
|
23
|
+
|
|
24
|
+
from dataclasses import dataclass
|
|
25
|
+
from datetime import date, datetime
|
|
26
|
+
from enum import StrEnum
|
|
27
|
+
from typing import TYPE_CHECKING, Annotated, Any, get_args, get_origin
|
|
28
|
+
from uuid import UUID
|
|
29
|
+
|
|
30
|
+
from kronos.types._sentinel import Unset, UnsetType, is_sentinel
|
|
31
|
+
from kronos.types.db_types import FK, FKMeta, Vector, VectorMeta, extract_kron_db_meta
|
|
32
|
+
from kronos.utils.sql import validate_identifier
|
|
33
|
+
|
|
34
|
+
from ..protocol import SpecAdapter
|
|
35
|
+
from ._utils import resolve_annotation_to_base_types
|
|
36
|
+
|
|
37
|
+
if TYPE_CHECKING:
|
|
38
|
+
from kronos.specs.operable import Operable
|
|
39
|
+
from kronos.specs.spec import Spec
|
|
40
|
+
|
|
41
|
+
__all__ = (
|
|
42
|
+
# Enums
|
|
43
|
+
"OnAction",
|
|
44
|
+
"IndexMethod",
|
|
45
|
+
# Metadata classes
|
|
46
|
+
"FK",
|
|
47
|
+
"FKMeta",
|
|
48
|
+
"Vector",
|
|
49
|
+
"VectorMeta",
|
|
50
|
+
# Spec dataclasses
|
|
51
|
+
"ColumnSpec",
|
|
52
|
+
"ForeignKeySpec",
|
|
53
|
+
"IndexSpec",
|
|
54
|
+
"TriggerSpec",
|
|
55
|
+
"CheckConstraintSpec",
|
|
56
|
+
"UniqueConstraintSpec",
|
|
57
|
+
"TableSpec",
|
|
58
|
+
"SchemaSpec",
|
|
59
|
+
# Extraction helpers
|
|
60
|
+
"extract_kron_db_meta",
|
|
61
|
+
# Adapter
|
|
62
|
+
"SQLSpecAdapter",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
# =============================================================================
|
|
67
|
+
# Enums for Type-Safe Specification
|
|
68
|
+
# =============================================================================
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class OnAction(StrEnum):
|
|
72
|
+
"""FK ON DELETE/ON UPDATE actions."""
|
|
73
|
+
|
|
74
|
+
CASCADE = "CASCADE"
|
|
75
|
+
SET_NULL = "SET NULL"
|
|
76
|
+
SET_DEFAULT = "SET DEFAULT"
|
|
77
|
+
RESTRICT = "RESTRICT"
|
|
78
|
+
NO_ACTION = "NO ACTION"
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class IndexMethod(StrEnum):
|
|
82
|
+
"""Index access methods."""
|
|
83
|
+
|
|
84
|
+
BTREE = "btree"
|
|
85
|
+
HASH = "hash"
|
|
86
|
+
GIST = "gist"
|
|
87
|
+
GIN = "gin"
|
|
88
|
+
SPGIST = "spgist"
|
|
89
|
+
BRIN = "brin"
|
|
90
|
+
IVFFLAT = "ivfflat" # pgvector
|
|
91
|
+
HNSW = "hnsw" # pgvector
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
# =============================================================================
|
|
95
|
+
# Type Mapping
|
|
96
|
+
# =============================================================================
|
|
97
|
+
|
|
98
|
+
PYTHON_TO_SQL: dict[type, str] = {
|
|
99
|
+
str: "TEXT",
|
|
100
|
+
int: "INTEGER",
|
|
101
|
+
float: "DOUBLE PRECISION",
|
|
102
|
+
bool: "BOOLEAN",
|
|
103
|
+
UUID: "UUID",
|
|
104
|
+
datetime: "TIMESTAMP WITH TIME ZONE",
|
|
105
|
+
date: "DATE",
|
|
106
|
+
bytes: "BYTEA",
|
|
107
|
+
dict: "JSONB",
|
|
108
|
+
list: "JSONB",
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def python_type_to_sql(
|
|
113
|
+
annotation: Any,
|
|
114
|
+
) -> tuple[str, bool, FKMeta | None, VectorMeta | None]:
|
|
115
|
+
"""Convert Python type to (sql_type, nullable, fk_meta, vector_meta)."""
|
|
116
|
+
fk_raw, vec_raw = extract_kron_db_meta(annotation, metas="BOTH")
|
|
117
|
+
fk = fk_raw if isinstance(fk_raw, FKMeta) else None
|
|
118
|
+
vec = vec_raw if isinstance(vec_raw, VectorMeta) else None
|
|
119
|
+
|
|
120
|
+
resolved = resolve_annotation_to_base_types(annotation)
|
|
121
|
+
nullable = resolved["nullable"]
|
|
122
|
+
annotation = resolved["base_type"]
|
|
123
|
+
|
|
124
|
+
if fk is not None:
|
|
125
|
+
return "UUID", nullable, fk, None
|
|
126
|
+
|
|
127
|
+
if vec is not None:
|
|
128
|
+
return f"VECTOR({vec.dim})", nullable, None, vec
|
|
129
|
+
|
|
130
|
+
if get_origin(annotation) is Annotated:
|
|
131
|
+
args = get_args(annotation)
|
|
132
|
+
if args:
|
|
133
|
+
annotation = args[0]
|
|
134
|
+
for arg in args[1:]:
|
|
135
|
+
if isinstance(arg, FKMeta):
|
|
136
|
+
return "UUID", nullable, arg, None
|
|
137
|
+
if isinstance(arg, VectorMeta):
|
|
138
|
+
return f"VECTOR({arg.dim})", nullable, None, arg
|
|
139
|
+
|
|
140
|
+
if annotation in PYTHON_TO_SQL:
|
|
141
|
+
return PYTHON_TO_SQL[annotation], nullable, None, None
|
|
142
|
+
|
|
143
|
+
if get_origin(annotation) in (dict, list):
|
|
144
|
+
return "JSONB", nullable, None, None
|
|
145
|
+
|
|
146
|
+
if hasattr(annotation, "__members__"):
|
|
147
|
+
return "TEXT", nullable, None, None
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
from pydantic import BaseModel
|
|
151
|
+
|
|
152
|
+
if isinstance(annotation, type) and issubclass(annotation, BaseModel):
|
|
153
|
+
return "JSONB", nullable, None, None
|
|
154
|
+
except ImportError:
|
|
155
|
+
pass
|
|
156
|
+
|
|
157
|
+
return "TEXT", nullable, None, None
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
# =============================================================================
|
|
161
|
+
# Schema Specification Dataclasses (Frozen for Hashability/Diffing)
|
|
162
|
+
# =============================================================================
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@dataclass(frozen=True, slots=True)
|
|
166
|
+
class ColumnSpec:
|
|
167
|
+
"""Specification for a database column.
|
|
168
|
+
|
|
169
|
+
Frozen dataclass for hashability and immutability, enabling schema diffing.
|
|
170
|
+
|
|
171
|
+
Attributes:
|
|
172
|
+
name: Column name.
|
|
173
|
+
type: SQL type (e.g., "TEXT", "UUID", "VECTOR(1536)").
|
|
174
|
+
nullable: Whether column allows NULL values.
|
|
175
|
+
default: DB-level default expression (e.g., "gen_random_uuid()").
|
|
176
|
+
is_primary_key: Whether this is the primary key.
|
|
177
|
+
is_unique: Whether column has unique constraint.
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
name: str
|
|
181
|
+
type: str
|
|
182
|
+
nullable: bool = True
|
|
183
|
+
default: str | None = None
|
|
184
|
+
is_primary_key: bool = False
|
|
185
|
+
is_unique: bool = False
|
|
186
|
+
|
|
187
|
+
def to_ddl(self) -> str:
|
|
188
|
+
"""Generate column DDL fragment."""
|
|
189
|
+
validate_identifier(self.name, "column name")
|
|
190
|
+
parts = [f'"{self.name}"', self.type]
|
|
191
|
+
|
|
192
|
+
if self.is_primary_key:
|
|
193
|
+
parts.append("PRIMARY KEY")
|
|
194
|
+
elif not self.nullable:
|
|
195
|
+
parts.append("NOT NULL")
|
|
196
|
+
|
|
197
|
+
if self.default is not None:
|
|
198
|
+
parts.append(f"DEFAULT {self.default}")
|
|
199
|
+
|
|
200
|
+
if self.is_unique and not self.is_primary_key:
|
|
201
|
+
parts.append("UNIQUE")
|
|
202
|
+
|
|
203
|
+
return " ".join(parts)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@dataclass(frozen=True, slots=True)
|
|
207
|
+
class ForeignKeySpec:
|
|
208
|
+
"""Specification for a foreign key constraint.
|
|
209
|
+
|
|
210
|
+
Supports deferrable constraints for complex transactions.
|
|
211
|
+
|
|
212
|
+
Attributes:
|
|
213
|
+
name: Constraint name.
|
|
214
|
+
columns: Local column(s) forming the FK.
|
|
215
|
+
ref_table: Referenced table name.
|
|
216
|
+
ref_columns: Referenced column(s).
|
|
217
|
+
on_delete: ON DELETE action.
|
|
218
|
+
on_update: ON UPDATE action.
|
|
219
|
+
deferrable: Whether constraint is deferrable.
|
|
220
|
+
initially_deferred: Whether constraint is initially deferred.
|
|
221
|
+
"""
|
|
222
|
+
|
|
223
|
+
name: str
|
|
224
|
+
columns: tuple[str, ...]
|
|
225
|
+
ref_table: str
|
|
226
|
+
ref_columns: tuple[str, ...] = ("id",)
|
|
227
|
+
on_delete: OnAction = OnAction.CASCADE
|
|
228
|
+
on_update: OnAction = OnAction.CASCADE
|
|
229
|
+
deferrable: bool = False
|
|
230
|
+
initially_deferred: bool = False
|
|
231
|
+
|
|
232
|
+
def to_ddl(self, table_name: str) -> str:
|
|
233
|
+
"""Generate ALTER TABLE ADD CONSTRAINT DDL."""
|
|
234
|
+
validate_identifier(table_name, "table name")
|
|
235
|
+
validate_identifier(self.name, "constraint name")
|
|
236
|
+
validate_identifier(self.ref_table, "referenced table name")
|
|
237
|
+
for col in self.columns:
|
|
238
|
+
validate_identifier(col, "column name")
|
|
239
|
+
for col in self.ref_columns:
|
|
240
|
+
validate_identifier(col, "referenced column name")
|
|
241
|
+
|
|
242
|
+
cols = ", ".join(f'"{c}"' for c in self.columns)
|
|
243
|
+
refs = ", ".join(f'"{c}"' for c in self.ref_columns)
|
|
244
|
+
|
|
245
|
+
ddl = (
|
|
246
|
+
f'ALTER TABLE "{table_name}" ADD CONSTRAINT "{self.name}" '
|
|
247
|
+
f'FOREIGN KEY ({cols}) REFERENCES "{self.ref_table}" ({refs}) '
|
|
248
|
+
f"ON DELETE {self.on_delete.value} ON UPDATE {self.on_update.value}"
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
if self.deferrable:
|
|
252
|
+
ddl += " DEFERRABLE"
|
|
253
|
+
if self.initially_deferred:
|
|
254
|
+
ddl += " INITIALLY DEFERRED"
|
|
255
|
+
|
|
256
|
+
return ddl
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
@dataclass(frozen=True, slots=True)
|
|
260
|
+
class IndexSpec:
|
|
261
|
+
"""Specification for a database index.
|
|
262
|
+
|
|
263
|
+
Supports partial indexes, covering indexes, and vector index methods.
|
|
264
|
+
|
|
265
|
+
Attributes:
|
|
266
|
+
name: Index name.
|
|
267
|
+
columns: Column(s) in the index.
|
|
268
|
+
unique: Whether index enforces uniqueness.
|
|
269
|
+
method: Index access method (btree, gin, hnsw, etc.).
|
|
270
|
+
where: Partial index condition.
|
|
271
|
+
concurrently: Whether to create index concurrently.
|
|
272
|
+
include: Columns to include in covering index.
|
|
273
|
+
"""
|
|
274
|
+
|
|
275
|
+
name: str
|
|
276
|
+
columns: tuple[str, ...]
|
|
277
|
+
unique: bool = False
|
|
278
|
+
method: IndexMethod = IndexMethod.BTREE
|
|
279
|
+
where: str | None = None
|
|
280
|
+
concurrently: bool = False
|
|
281
|
+
include: tuple[str, ...] = ()
|
|
282
|
+
|
|
283
|
+
def to_ddl(self, table_name: str, schema: str = "public") -> str:
|
|
284
|
+
"""Generate CREATE INDEX DDL."""
|
|
285
|
+
validate_identifier(table_name, "table name")
|
|
286
|
+
validate_identifier(schema, "schema name")
|
|
287
|
+
validate_identifier(self.name, "index name")
|
|
288
|
+
for col in self.columns:
|
|
289
|
+
validate_identifier(col, "column name")
|
|
290
|
+
for col in self.include:
|
|
291
|
+
validate_identifier(col, "included column name")
|
|
292
|
+
|
|
293
|
+
parts = ["CREATE"]
|
|
294
|
+
|
|
295
|
+
if self.unique:
|
|
296
|
+
parts.append("UNIQUE")
|
|
297
|
+
|
|
298
|
+
parts.append("INDEX")
|
|
299
|
+
|
|
300
|
+
if self.concurrently:
|
|
301
|
+
parts.append("CONCURRENTLY")
|
|
302
|
+
|
|
303
|
+
parts.append(f'IF NOT EXISTS "{self.name}"')
|
|
304
|
+
parts.append(f'ON "{schema}"."{table_name}"')
|
|
305
|
+
|
|
306
|
+
if self.method != IndexMethod.BTREE:
|
|
307
|
+
parts.append(f"USING {self.method.value}")
|
|
308
|
+
|
|
309
|
+
cols = ", ".join(f'"{c}"' for c in self.columns)
|
|
310
|
+
parts.append(f"({cols})")
|
|
311
|
+
|
|
312
|
+
if self.include:
|
|
313
|
+
include_cols = ", ".join(f'"{c}"' for c in self.include)
|
|
314
|
+
parts.append(f"INCLUDE ({include_cols})")
|
|
315
|
+
|
|
316
|
+
if self.where:
|
|
317
|
+
parts.append(f"WHERE {self.where}")
|
|
318
|
+
|
|
319
|
+
return " ".join(parts) + ";"
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
@dataclass(frozen=True, slots=True)
|
|
323
|
+
class TriggerSpec:
|
|
324
|
+
"""Specification for a database trigger.
|
|
325
|
+
|
|
326
|
+
Enables database-level business rules (immutability, audit, computed columns).
|
|
327
|
+
|
|
328
|
+
Attributes:
|
|
329
|
+
name: Trigger name.
|
|
330
|
+
timing: BEFORE, AFTER, or INSTEAD OF.
|
|
331
|
+
events: Events that fire the trigger (INSERT, UPDATE, DELETE).
|
|
332
|
+
function: Function to call (with schema, e.g., "public.audit_log").
|
|
333
|
+
for_each: ROW or STATEMENT.
|
|
334
|
+
when: Optional WHEN condition.
|
|
335
|
+
"""
|
|
336
|
+
|
|
337
|
+
name: str
|
|
338
|
+
timing: str # BEFORE, AFTER, INSTEAD OF
|
|
339
|
+
events: tuple[str, ...] # INSERT, UPDATE, DELETE
|
|
340
|
+
function: str # Function name with schema
|
|
341
|
+
for_each: str = "ROW"
|
|
342
|
+
when: str | None = None
|
|
343
|
+
|
|
344
|
+
def to_ddl(self, table_name: str, schema: str = "public") -> str:
|
|
345
|
+
"""Generate CREATE TRIGGER DDL."""
|
|
346
|
+
validate_identifier(table_name, "table name")
|
|
347
|
+
validate_identifier(schema, "schema name")
|
|
348
|
+
validate_identifier(self.name, "trigger name")
|
|
349
|
+
|
|
350
|
+
events_str = " OR ".join(self.events)
|
|
351
|
+
|
|
352
|
+
ddl = (
|
|
353
|
+
f'CREATE TRIGGER "{self.name}" '
|
|
354
|
+
f"{self.timing} {events_str} "
|
|
355
|
+
f'ON "{schema}"."{table_name}" '
|
|
356
|
+
f"FOR EACH {self.for_each} "
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
if self.when:
|
|
360
|
+
ddl += f"WHEN ({self.when}) "
|
|
361
|
+
|
|
362
|
+
ddl += f"EXECUTE FUNCTION {self.function}();"
|
|
363
|
+
|
|
364
|
+
return ddl
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
@dataclass(frozen=True, slots=True)
|
|
368
|
+
class CheckConstraintSpec:
|
|
369
|
+
"""Specification for a CHECK constraint.
|
|
370
|
+
|
|
371
|
+
Enables database-level validation complementing application rules.
|
|
372
|
+
|
|
373
|
+
Attributes:
|
|
374
|
+
name: Constraint name.
|
|
375
|
+
expression: CHECK expression (SQL boolean expression).
|
|
376
|
+
|
|
377
|
+
Warning:
|
|
378
|
+
The `expression` field accepts raw SQL and is NOT validated.
|
|
379
|
+
Only use expressions from trusted sources. Never pass user input
|
|
380
|
+
directly as this creates SQL injection vulnerabilities.
|
|
381
|
+
"""
|
|
382
|
+
|
|
383
|
+
name: str
|
|
384
|
+
expression: str
|
|
385
|
+
|
|
386
|
+
def to_ddl(self, table_name: str, schema: str = "public") -> str:
|
|
387
|
+
"""Generate ALTER TABLE ADD CONSTRAINT DDL."""
|
|
388
|
+
validate_identifier(table_name, "table name")
|
|
389
|
+
validate_identifier(schema, "schema name")
|
|
390
|
+
validate_identifier(self.name, "constraint name")
|
|
391
|
+
return (
|
|
392
|
+
f'ALTER TABLE "{schema}"."{table_name}" '
|
|
393
|
+
f'ADD CONSTRAINT "{self.name}" CHECK ({self.expression});'
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
@dataclass(frozen=True, slots=True)
|
|
398
|
+
class UniqueConstraintSpec:
|
|
399
|
+
"""Specification for a UNIQUE constraint.
|
|
400
|
+
|
|
401
|
+
Attributes:
|
|
402
|
+
name: Constraint name.
|
|
403
|
+
columns: Column(s) in the constraint.
|
|
404
|
+
"""
|
|
405
|
+
|
|
406
|
+
name: str
|
|
407
|
+
columns: tuple[str, ...]
|
|
408
|
+
|
|
409
|
+
def to_ddl(self, table_name: str, schema: str = "public") -> str:
|
|
410
|
+
"""Generate ALTER TABLE ADD CONSTRAINT DDL."""
|
|
411
|
+
validate_identifier(table_name, "table name")
|
|
412
|
+
validate_identifier(schema, "schema name")
|
|
413
|
+
validate_identifier(self.name, "constraint name")
|
|
414
|
+
for col in self.columns:
|
|
415
|
+
validate_identifier(col, "column name")
|
|
416
|
+
cols = ", ".join(f'"{c}"' for c in self.columns)
|
|
417
|
+
return (
|
|
418
|
+
f'ALTER TABLE "{schema}"."{table_name}" ADD CONSTRAINT "{self.name}" UNIQUE ({cols});'
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
@dataclass(frozen=True, slots=True)
|
|
423
|
+
class TableSpec:
|
|
424
|
+
"""Complete specification for a database table.
|
|
425
|
+
|
|
426
|
+
Hashable, diffable representation enabling state-based migrations.
|
|
427
|
+
|
|
428
|
+
Attributes:
|
|
429
|
+
name: Table name.
|
|
430
|
+
schema: Schema name (default: public).
|
|
431
|
+
columns: Tuple of column specifications.
|
|
432
|
+
primary_key: Primary key column(s).
|
|
433
|
+
foreign_keys: Tuple of FK specifications.
|
|
434
|
+
indexes: Tuple of index specifications.
|
|
435
|
+
triggers: Tuple of trigger specifications.
|
|
436
|
+
check_constraints: Tuple of CHECK constraint specifications.
|
|
437
|
+
unique_constraints: Tuple of UNIQUE constraint specifications.
|
|
438
|
+
"""
|
|
439
|
+
|
|
440
|
+
name: str
|
|
441
|
+
schema: str = "public"
|
|
442
|
+
columns: tuple[ColumnSpec, ...] = ()
|
|
443
|
+
primary_key: tuple[str, ...] = ("id",)
|
|
444
|
+
foreign_keys: tuple[ForeignKeySpec, ...] = ()
|
|
445
|
+
indexes: tuple[IndexSpec, ...] = ()
|
|
446
|
+
triggers: tuple[TriggerSpec, ...] = ()
|
|
447
|
+
check_constraints: tuple[CheckConstraintSpec, ...] = ()
|
|
448
|
+
unique_constraints: tuple[UniqueConstraintSpec, ...] = ()
|
|
449
|
+
|
|
450
|
+
@property
|
|
451
|
+
def qualified_name(self) -> str:
|
|
452
|
+
"""Get fully qualified table name."""
|
|
453
|
+
validate_identifier(self.schema, "schema name")
|
|
454
|
+
validate_identifier(self.name, "table name")
|
|
455
|
+
return f'"{self.schema}"."{self.name}"'
|
|
456
|
+
|
|
457
|
+
def get_column(self, name: str) -> ColumnSpec | None:
|
|
458
|
+
"""Get column spec by name."""
|
|
459
|
+
for col in self.columns:
|
|
460
|
+
if col.name == name:
|
|
461
|
+
return col
|
|
462
|
+
return None
|
|
463
|
+
|
|
464
|
+
def to_create_table_ddl(self, if_not_exists: bool = True) -> str:
|
|
465
|
+
"""Generate CREATE TABLE DDL (without FKs - added separately)."""
|
|
466
|
+
col_defs = [col.to_ddl() for col in self.columns]
|
|
467
|
+
col_separator = ",\n "
|
|
468
|
+
col_lines = col_separator.join(col_defs)
|
|
469
|
+
|
|
470
|
+
exists_clause = "IF NOT EXISTS " if if_not_exists else ""
|
|
471
|
+
return f"CREATE TABLE {exists_clause}{self.qualified_name} (\n {col_lines}\n);"
|
|
472
|
+
|
|
473
|
+
def to_full_ddl(self) -> list[str]:
|
|
474
|
+
"""Generate all DDL statements for this table.
|
|
475
|
+
|
|
476
|
+
Returns statements in execution order:
|
|
477
|
+
1. CREATE TABLE
|
|
478
|
+
2. UNIQUE constraints
|
|
479
|
+
3. CHECK constraints
|
|
480
|
+
4. Foreign keys
|
|
481
|
+
5. Indexes
|
|
482
|
+
6. Triggers
|
|
483
|
+
"""
|
|
484
|
+
statements = [self.to_create_table_ddl()]
|
|
485
|
+
|
|
486
|
+
for uc in self.unique_constraints:
|
|
487
|
+
statements.append(uc.to_ddl(self.name, self.schema))
|
|
488
|
+
|
|
489
|
+
for cc in self.check_constraints:
|
|
490
|
+
statements.append(cc.to_ddl(self.name, self.schema))
|
|
491
|
+
|
|
492
|
+
for fk in self.foreign_keys:
|
|
493
|
+
statements.append(fk.to_ddl(self.name) + ";")
|
|
494
|
+
|
|
495
|
+
for idx in self.indexes:
|
|
496
|
+
statements.append(idx.to_ddl(self.name, self.schema))
|
|
497
|
+
|
|
498
|
+
for trigger in self.triggers:
|
|
499
|
+
statements.append(trigger.to_ddl(self.name, self.schema))
|
|
500
|
+
|
|
501
|
+
return statements
|
|
502
|
+
|
|
503
|
+
@classmethod
|
|
504
|
+
def from_operable(
|
|
505
|
+
cls,
|
|
506
|
+
op: Operable,
|
|
507
|
+
name: str,
|
|
508
|
+
*,
|
|
509
|
+
schema: str = "public",
|
|
510
|
+
primary_key: str = "id",
|
|
511
|
+
indexes: list[dict[str, Any]] | None = None,
|
|
512
|
+
triggers: list[dict[str, Any]] | None = None,
|
|
513
|
+
check_constraints: list[dict[str, Any]] | None = None,
|
|
514
|
+
unique_constraints: list[dict[str, Any]] | None = None,
|
|
515
|
+
) -> TableSpec:
|
|
516
|
+
"""Create TableSpec from an Operable.
|
|
517
|
+
|
|
518
|
+
Args:
|
|
519
|
+
op: Operable with Specs defining the table structure.
|
|
520
|
+
name: Table name.
|
|
521
|
+
schema: Database schema (default "public").
|
|
522
|
+
primary_key: Primary key column name (default "id").
|
|
523
|
+
indexes: List of index definitions.
|
|
524
|
+
triggers: List of trigger definitions.
|
|
525
|
+
check_constraints: List of CHECK constraint definitions.
|
|
526
|
+
unique_constraints: List of UNIQUE constraint definitions.
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
TableSpec with columns and constraints derived from Operable.
|
|
530
|
+
"""
|
|
531
|
+
columns: list[ColumnSpec] = []
|
|
532
|
+
foreign_keys: list[ForeignKeySpec] = []
|
|
533
|
+
|
|
534
|
+
for spec in op.get_specs():
|
|
535
|
+
if not spec.name:
|
|
536
|
+
continue
|
|
537
|
+
|
|
538
|
+
sql_type, type_nullable, fk, _ = python_type_to_sql(spec.annotation)
|
|
539
|
+
nullable = type_nullable or spec.is_nullable
|
|
540
|
+
|
|
541
|
+
# Check for default value
|
|
542
|
+
default_value = None
|
|
543
|
+
if not is_sentinel(spec.metadata):
|
|
544
|
+
for meta in spec.metadata:
|
|
545
|
+
if meta.key == "default":
|
|
546
|
+
val = meta.value
|
|
547
|
+
if isinstance(val, str):
|
|
548
|
+
default_value = f"'{val}'"
|
|
549
|
+
elif isinstance(val, bool):
|
|
550
|
+
default_value = str(val).upper()
|
|
551
|
+
elif isinstance(val, (int, float)):
|
|
552
|
+
default_value = str(val)
|
|
553
|
+
break
|
|
554
|
+
|
|
555
|
+
is_pk = spec.name == primary_key
|
|
556
|
+
col_spec = ColumnSpec(
|
|
557
|
+
name=spec.name,
|
|
558
|
+
type=sql_type,
|
|
559
|
+
nullable=nullable and not is_pk,
|
|
560
|
+
default=default_value,
|
|
561
|
+
is_primary_key=is_pk,
|
|
562
|
+
)
|
|
563
|
+
columns.append(col_spec)
|
|
564
|
+
|
|
565
|
+
# Create FK constraint
|
|
566
|
+
if fk is not None:
|
|
567
|
+
fk_spec = ForeignKeySpec(
|
|
568
|
+
name=f"fk_{name}_{spec.name}",
|
|
569
|
+
columns=(spec.name,),
|
|
570
|
+
ref_table=fk.table_name,
|
|
571
|
+
ref_columns=(fk.column,),
|
|
572
|
+
on_delete=OnAction(fk.on_delete),
|
|
573
|
+
on_update=OnAction(fk.on_update),
|
|
574
|
+
deferrable=fk.deferrable,
|
|
575
|
+
initially_deferred=fk.initially_deferred,
|
|
576
|
+
)
|
|
577
|
+
foreign_keys.append(fk_spec)
|
|
578
|
+
|
|
579
|
+
# Build index specs
|
|
580
|
+
index_specs: list[IndexSpec] = []
|
|
581
|
+
for idx_def in indexes or []:
|
|
582
|
+
idx_cols = tuple(idx_def.get("columns", []))
|
|
583
|
+
idx_name = idx_def.get("name") or f"idx_{name}_{'_'.join(idx_cols)}"
|
|
584
|
+
idx_spec = IndexSpec(
|
|
585
|
+
name=idx_name,
|
|
586
|
+
columns=idx_cols,
|
|
587
|
+
unique=idx_def.get("unique", False),
|
|
588
|
+
method=IndexMethod(idx_def.get("method", "btree")),
|
|
589
|
+
where=idx_def.get("where"),
|
|
590
|
+
include=tuple(idx_def.get("include", [])),
|
|
591
|
+
)
|
|
592
|
+
index_specs.append(idx_spec)
|
|
593
|
+
|
|
594
|
+
# Build trigger specs
|
|
595
|
+
trigger_specs: list[TriggerSpec] = []
|
|
596
|
+
for trg_def in triggers or []:
|
|
597
|
+
trg_spec = TriggerSpec(
|
|
598
|
+
name=trg_def["name"],
|
|
599
|
+
timing=trg_def["timing"],
|
|
600
|
+
events=tuple(trg_def["events"]),
|
|
601
|
+
function=trg_def["function"],
|
|
602
|
+
for_each=trg_def.get("for_each", "ROW"),
|
|
603
|
+
when=trg_def.get("when"),
|
|
604
|
+
)
|
|
605
|
+
trigger_specs.append(trg_spec)
|
|
606
|
+
|
|
607
|
+
# Build CHECK constraint specs
|
|
608
|
+
check_specs: list[CheckConstraintSpec] = []
|
|
609
|
+
for chk_def in check_constraints or []:
|
|
610
|
+
chk_spec = CheckConstraintSpec(
|
|
611
|
+
name=chk_def["name"],
|
|
612
|
+
expression=chk_def["expression"],
|
|
613
|
+
)
|
|
614
|
+
check_specs.append(chk_spec)
|
|
615
|
+
|
|
616
|
+
# Build UNIQUE constraint specs
|
|
617
|
+
unique_specs: list[UniqueConstraintSpec] = []
|
|
618
|
+
for uq_def in unique_constraints or []:
|
|
619
|
+
uq_spec = UniqueConstraintSpec(
|
|
620
|
+
name=uq_def["name"],
|
|
621
|
+
columns=tuple(uq_def["columns"]),
|
|
622
|
+
)
|
|
623
|
+
unique_specs.append(uq_spec)
|
|
624
|
+
|
|
625
|
+
return cls(
|
|
626
|
+
name=name,
|
|
627
|
+
schema=schema,
|
|
628
|
+
columns=tuple(columns),
|
|
629
|
+
primary_key=(primary_key,),
|
|
630
|
+
foreign_keys=tuple(foreign_keys),
|
|
631
|
+
indexes=tuple(index_specs),
|
|
632
|
+
triggers=tuple(trigger_specs),
|
|
633
|
+
check_constraints=tuple(check_specs),
|
|
634
|
+
unique_constraints=tuple(unique_specs),
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
|
|
638
|
+
@dataclass(frozen=True, slots=True)
|
|
639
|
+
class SchemaSpec:
|
|
640
|
+
"""Complete database schema specification.
|
|
641
|
+
|
|
642
|
+
Represents entire database schema for diffing and migration planning.
|
|
643
|
+
|
|
644
|
+
Attributes:
|
|
645
|
+
tables: Tuple of table specifications.
|
|
646
|
+
version: Schema version hash (computed from table specs).
|
|
647
|
+
"""
|
|
648
|
+
|
|
649
|
+
tables: tuple[TableSpec, ...] = ()
|
|
650
|
+
version: str | None = None
|
|
651
|
+
|
|
652
|
+
def get_table(self, name: str) -> TableSpec | None:
|
|
653
|
+
"""Get table spec by name."""
|
|
654
|
+
for table in self.tables:
|
|
655
|
+
if table.name == name:
|
|
656
|
+
return table
|
|
657
|
+
return None
|
|
658
|
+
|
|
659
|
+
@classmethod
|
|
660
|
+
def from_operables(
|
|
661
|
+
cls,
|
|
662
|
+
operables: dict[str, Operable],
|
|
663
|
+
*,
|
|
664
|
+
schema: str = "public",
|
|
665
|
+
) -> SchemaSpec:
|
|
666
|
+
"""Create SchemaSpec from a mapping of table names to Operables.
|
|
667
|
+
|
|
668
|
+
Args:
|
|
669
|
+
operables: Mapping of table name -> Operable.
|
|
670
|
+
schema: Default schema for all tables.
|
|
671
|
+
|
|
672
|
+
Returns:
|
|
673
|
+
SchemaSpec with version hash computed from table definitions.
|
|
674
|
+
"""
|
|
675
|
+
from kronos.utils import compute_hash
|
|
676
|
+
|
|
677
|
+
tables = [
|
|
678
|
+
TableSpec.from_operable(op, name, schema=schema)
|
|
679
|
+
for name, op in sorted(operables.items())
|
|
680
|
+
]
|
|
681
|
+
|
|
682
|
+
# Compute version hash
|
|
683
|
+
table_data = [
|
|
684
|
+
{
|
|
685
|
+
"name": t.name,
|
|
686
|
+
"schema": t.schema,
|
|
687
|
+
"columns": [
|
|
688
|
+
{
|
|
689
|
+
"name": c.name,
|
|
690
|
+
"type": c.type,
|
|
691
|
+
"nullable": c.nullable,
|
|
692
|
+
"default": c.default,
|
|
693
|
+
}
|
|
694
|
+
for c in t.columns
|
|
695
|
+
],
|
|
696
|
+
"foreign_keys": [
|
|
697
|
+
{"name": fk.name, "columns": fk.columns, "ref_table": fk.ref_table}
|
|
698
|
+
for fk in t.foreign_keys
|
|
699
|
+
],
|
|
700
|
+
"indexes": [
|
|
701
|
+
{"name": idx.name, "columns": idx.columns, "unique": idx.unique}
|
|
702
|
+
for idx in t.indexes
|
|
703
|
+
],
|
|
704
|
+
}
|
|
705
|
+
for t in tables
|
|
706
|
+
]
|
|
707
|
+
version = compute_hash(table_data)
|
|
708
|
+
|
|
709
|
+
return cls(tables=tuple(tables), version=version)
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
# =============================================================================
|
|
713
|
+
# SQLSpecAdapter
|
|
714
|
+
# =============================================================================
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
class SQLSpecAdapter(SpecAdapter[str]):
|
|
718
|
+
"""SQL DDL adapter: Spec -> column definition, Operable -> CREATE TABLE.
|
|
719
|
+
|
|
720
|
+
One-way adapter for DDL generation. Does not support instance operations.
|
|
721
|
+
|
|
722
|
+
Usage:
|
|
723
|
+
op = Operable([Spec(str, name="title"), Spec(int, name="views", default=0)])
|
|
724
|
+
ddl = SQLSpecAdapter.compose_structure(op, "articles", schema="public")
|
|
725
|
+
"""
|
|
726
|
+
|
|
727
|
+
@classmethod
|
|
728
|
+
def create_field(cls, spec: Spec) -> str:
|
|
729
|
+
"""Convert Spec to SQL column definition, e.g., '"name" TEXT NOT NULL'."""
|
|
730
|
+
annotation = spec.annotation
|
|
731
|
+
sql_type, type_nullable, _, _ = python_type_to_sql(annotation)
|
|
732
|
+
nullable = type_nullable or spec.is_nullable
|
|
733
|
+
|
|
734
|
+
has_default = False
|
|
735
|
+
default_value = None
|
|
736
|
+
if not is_sentinel(spec.metadata):
|
|
737
|
+
for meta in spec.metadata:
|
|
738
|
+
if meta.key == "default":
|
|
739
|
+
has_default = True
|
|
740
|
+
default_value = meta.value
|
|
741
|
+
break
|
|
742
|
+
|
|
743
|
+
# Validate identifier before use
|
|
744
|
+
validate_identifier(spec.name, "column")
|
|
745
|
+
|
|
746
|
+
parts = [f'"{spec.name}"', sql_type]
|
|
747
|
+
|
|
748
|
+
if not nullable and not has_default:
|
|
749
|
+
parts.append("NOT NULL")
|
|
750
|
+
|
|
751
|
+
if has_default and default_value is not None:
|
|
752
|
+
if isinstance(default_value, str):
|
|
753
|
+
parts.append(f"DEFAULT '{default_value}'")
|
|
754
|
+
elif isinstance(default_value, bool):
|
|
755
|
+
parts.append(f"DEFAULT {str(default_value).upper()}")
|
|
756
|
+
elif isinstance(default_value, (int, float)):
|
|
757
|
+
parts.append(f"DEFAULT {default_value}")
|
|
758
|
+
|
|
759
|
+
return " ".join(parts)
|
|
760
|
+
|
|
761
|
+
@classmethod
|
|
762
|
+
def compose_structure(
|
|
763
|
+
cls,
|
|
764
|
+
op: Operable,
|
|
765
|
+
name: str,
|
|
766
|
+
/,
|
|
767
|
+
*,
|
|
768
|
+
include: set[str] | UnsetType = Unset,
|
|
769
|
+
exclude: set[str] | UnsetType = Unset,
|
|
770
|
+
**kwargs: Any,
|
|
771
|
+
) -> str:
|
|
772
|
+
"""Generate CREATE TABLE DDL from Operable.
|
|
773
|
+
|
|
774
|
+
Args:
|
|
775
|
+
op: Operable with Specs
|
|
776
|
+
name: Table name
|
|
777
|
+
include/exclude: Field name filters
|
|
778
|
+
**kwargs: schema (default "public"), if_not_exists (default True),
|
|
779
|
+
primary_key (column name), base_columns (prepend definitions)
|
|
780
|
+
|
|
781
|
+
Returns:
|
|
782
|
+
CREATE TABLE DDL statement with FK constraints
|
|
783
|
+
"""
|
|
784
|
+
schema = kwargs.get("schema", "public")
|
|
785
|
+
if_not_exists = kwargs.get("if_not_exists", True)
|
|
786
|
+
primary_key = kwargs.get("primary_key")
|
|
787
|
+
base_columns: list[str] = kwargs.get("base_columns", [])
|
|
788
|
+
|
|
789
|
+
# Validate table and schema names
|
|
790
|
+
validate_identifier(name, "table")
|
|
791
|
+
validate_identifier(schema, "schema")
|
|
792
|
+
|
|
793
|
+
specs = op.get_specs(include=include, exclude=exclude)
|
|
794
|
+
|
|
795
|
+
columns: list[str] = list(base_columns)
|
|
796
|
+
foreign_keys: list[str] = []
|
|
797
|
+
|
|
798
|
+
for spec in specs:
|
|
799
|
+
if not spec.name:
|
|
800
|
+
continue
|
|
801
|
+
|
|
802
|
+
col_def = cls.create_field(spec)
|
|
803
|
+
|
|
804
|
+
if primary_key and spec.name == primary_key:
|
|
805
|
+
col_def = col_def.replace(" NOT NULL", "") + " PRIMARY KEY"
|
|
806
|
+
|
|
807
|
+
columns.append(col_def)
|
|
808
|
+
|
|
809
|
+
fk = extract_kron_db_meta(spec.annotation, metas="FK")
|
|
810
|
+
if isinstance(fk, FKMeta):
|
|
811
|
+
# Validate FK-related identifiers
|
|
812
|
+
validate_identifier(spec.name, "column")
|
|
813
|
+
validate_identifier(fk.table_name, "referenced table")
|
|
814
|
+
validate_identifier(fk.column, "referenced column")
|
|
815
|
+
|
|
816
|
+
fk_constraint = (
|
|
817
|
+
f'CONSTRAINT "fk_{name}_{spec.name}" '
|
|
818
|
+
f'FOREIGN KEY ("{spec.name}") '
|
|
819
|
+
f'REFERENCES "{fk.table_name}"("{fk.column}") '
|
|
820
|
+
f"ON DELETE {OnAction(fk.on_delete)} ON UPDATE {OnAction(fk.on_update)}"
|
|
821
|
+
)
|
|
822
|
+
|
|
823
|
+
if fk.deferrable:
|
|
824
|
+
fk_constraint += " DEFERRABLE"
|
|
825
|
+
if fk.initially_deferred:
|
|
826
|
+
fk_constraint += " INITIALLY DEFERRED"
|
|
827
|
+
|
|
828
|
+
foreign_keys.append(fk_constraint)
|
|
829
|
+
|
|
830
|
+
all_defs = columns + foreign_keys
|
|
831
|
+
exists_clause = "IF NOT EXISTS " if if_not_exists else ""
|
|
832
|
+
qualified_name = f'"{schema}"."{name}"'
|
|
833
|
+
|
|
834
|
+
ddl = f"CREATE TABLE {exists_clause}{qualified_name} (\n"
|
|
835
|
+
ddl += ",\n".join(f" {col}" for col in all_defs)
|
|
836
|
+
ddl += "\n);"
|
|
837
|
+
|
|
838
|
+
return ddl
|
|
839
|
+
|
|
840
|
+
@classmethod
|
|
841
|
+
def extract_specs(cls, structure: Any) -> tuple[Spec, ...]:
|
|
842
|
+
"""Extract Specs from Pydantic model. Delegates to PydanticSpecAdapter."""
|
|
843
|
+
from .pydantic_adapter import PydanticSpecAdapter
|
|
844
|
+
|
|
845
|
+
return PydanticSpecAdapter.extract_specs(structure)
|
|
846
|
+
|
|
847
|
+
@classmethod
|
|
848
|
+
def create_index(
|
|
849
|
+
cls,
|
|
850
|
+
table_name: str,
|
|
851
|
+
column: str,
|
|
852
|
+
*,
|
|
853
|
+
index_name: str | None = None,
|
|
854
|
+
unique: bool = False,
|
|
855
|
+
method: str | IndexMethod = IndexMethod.BTREE,
|
|
856
|
+
schema: str = "public",
|
|
857
|
+
) -> str:
|
|
858
|
+
"""Generate CREATE INDEX statement with configurable method."""
|
|
859
|
+
validate_identifier(table_name, "table")
|
|
860
|
+
validate_identifier(column, "column")
|
|
861
|
+
validate_identifier(schema, "schema")
|
|
862
|
+
|
|
863
|
+
idx_name = index_name or f"idx_{table_name}_{column}"
|
|
864
|
+
validate_identifier(idx_name, "index")
|
|
865
|
+
|
|
866
|
+
method_val = method.value if isinstance(method, IndexMethod) else method
|
|
867
|
+
unique_clause = "UNIQUE " if unique else ""
|
|
868
|
+
qualified_table = f'"{schema}"."{table_name}"'
|
|
869
|
+
|
|
870
|
+
return (
|
|
871
|
+
f"CREATE {unique_clause}INDEX IF NOT EXISTS {idx_name} "
|
|
872
|
+
f'ON {qualified_table} USING {method_val} ("{column}");'
|
|
873
|
+
)
|
|
874
|
+
|
|
875
|
+
@classmethod
|
|
876
|
+
def create_vector_index(
|
|
877
|
+
cls,
|
|
878
|
+
table_name: str,
|
|
879
|
+
column: str = "embedding",
|
|
880
|
+
*,
|
|
881
|
+
index_name: str | None = None,
|
|
882
|
+
method: str | IndexMethod = IndexMethod.IVFFLAT,
|
|
883
|
+
lists: int = 100,
|
|
884
|
+
schema: str = "public",
|
|
885
|
+
) -> str:
|
|
886
|
+
"""Generate pgvector index (ivfflat or hnsw with vector_cosine_ops).
|
|
887
|
+
|
|
888
|
+
Raises:
|
|
889
|
+
ValueError: If method is not 'ivfflat' or 'hnsw'
|
|
890
|
+
"""
|
|
891
|
+
validate_identifier(table_name, "table")
|
|
892
|
+
validate_identifier(column, "column")
|
|
893
|
+
validate_identifier(schema, "schema")
|
|
894
|
+
|
|
895
|
+
idx_name = index_name or f"idx_{table_name}_{column}_vec"
|
|
896
|
+
validate_identifier(idx_name, "index")
|
|
897
|
+
|
|
898
|
+
qualified_table = f'"{schema}"."{table_name}"'
|
|
899
|
+
method_val = method.value if isinstance(method, IndexMethod) else method
|
|
900
|
+
|
|
901
|
+
if method_val == "ivfflat":
|
|
902
|
+
return (
|
|
903
|
+
f"CREATE INDEX IF NOT EXISTS {idx_name} "
|
|
904
|
+
f'ON {qualified_table} USING ivfflat ("{column}" vector_cosine_ops) '
|
|
905
|
+
f"WITH (lists = {lists});"
|
|
906
|
+
)
|
|
907
|
+
elif method_val == "hnsw":
|
|
908
|
+
return (
|
|
909
|
+
f"CREATE INDEX IF NOT EXISTS {idx_name} "
|
|
910
|
+
f'ON {qualified_table} USING hnsw ("{column}" vector_cosine_ops);'
|
|
911
|
+
)
|
|
912
|
+
else:
|
|
913
|
+
raise ValueError(f"Unsupported vector index method: {method_val}")
|
|
914
|
+
|
|
915
|
+
@classmethod
|
|
916
|
+
def create_table_spec(
|
|
917
|
+
cls,
|
|
918
|
+
op: Operable,
|
|
919
|
+
name: str,
|
|
920
|
+
*,
|
|
921
|
+
schema: str = "public",
|
|
922
|
+
primary_key: str = "id",
|
|
923
|
+
**kwargs: Any,
|
|
924
|
+
) -> TableSpec:
|
|
925
|
+
"""Create TableSpec from Operable for advanced schema operations.
|
|
926
|
+
|
|
927
|
+
Args:
|
|
928
|
+
op: Operable with Specs
|
|
929
|
+
name: Table name
|
|
930
|
+
schema: Database schema
|
|
931
|
+
primary_key: Primary key column
|
|
932
|
+
**kwargs: indexes, triggers, check_constraints, unique_constraints
|
|
933
|
+
|
|
934
|
+
Returns:
|
|
935
|
+
TableSpec for diffing, introspection, or full DDL generation.
|
|
936
|
+
"""
|
|
937
|
+
return TableSpec.from_operable(
|
|
938
|
+
op,
|
|
939
|
+
name,
|
|
940
|
+
schema=schema,
|
|
941
|
+
primary_key=primary_key,
|
|
942
|
+
indexes=kwargs.get("indexes"),
|
|
943
|
+
triggers=kwargs.get("triggers"),
|
|
944
|
+
check_constraints=kwargs.get("check_constraints"),
|
|
945
|
+
unique_constraints=kwargs.get("unique_constraints"),
|
|
946
|
+
)
|