datablade 0.0.0__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datablade/__init__.py +49 -1
- datablade/blade.py +322 -0
- datablade/core/__init__.py +28 -7
- datablade/core/frames.py +23 -236
- datablade/core/json.py +5 -10
- datablade/core/lists.py +5 -10
- datablade/core/messages.py +23 -11
- datablade/core/strings.py +5 -43
- datablade/core/zip.py +5 -24
- datablade/dataframes/__init__.py +51 -0
- datablade/dataframes/frames.py +585 -0
- datablade/dataframes/readers.py +1367 -0
- datablade/docs/ARCHITECTURE.md +102 -0
- datablade/docs/OBJECT_REGISTRY.md +194 -0
- datablade/docs/README.md +57 -0
- datablade/docs/TESTING.md +37 -0
- datablade/docs/USAGE.md +409 -0
- datablade/docs/__init__.py +87 -0
- datablade/docs/__main__.py +6 -0
- datablade/io/__init__.py +15 -0
- datablade/io/json.py +70 -0
- datablade/io/zip.py +111 -0
- datablade/registry.py +581 -0
- datablade/sql/__init__.py +56 -0
- datablade/sql/bulk_load.py +665 -0
- datablade/sql/ddl.py +402 -0
- datablade/sql/ddl_pyarrow.py +411 -0
- datablade/sql/dialects.py +12 -0
- datablade/sql/quoting.py +44 -0
- datablade/sql/schema_spec.py +65 -0
- datablade/sql/sqlserver.py +390 -0
- datablade/utils/__init__.py +38 -0
- datablade/utils/lists.py +32 -0
- datablade/utils/logging.py +204 -0
- datablade/utils/messages.py +29 -0
- datablade/utils/strings.py +249 -0
- datablade-0.0.6.dist-info/METADATA +406 -0
- datablade-0.0.6.dist-info/RECORD +41 -0
- {datablade-0.0.0.dist-info → datablade-0.0.6.dist-info}/WHEEL +1 -1
- {datablade-0.0.0.dist-info → datablade-0.0.6.dist-info/licenses}/LICENSE +20 -20
- datablade-0.0.0.dist-info/METADATA +0 -13
- datablade-0.0.0.dist-info/RECORD +0 -13
- {datablade-0.0.0.dist-info → datablade-0.0.6.dist-info}/top_level.txt +0 -0
datablade/registry.py
ADDED
|
@@ -0,0 +1,581 @@
|
|
|
1
|
+
"""In-memory object registry for SQL-like dot notation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from dataclasses import dataclass, field, replace
|
|
7
|
+
from typing import Any, Callable, Iterable, Mapping, Optional, Union
|
|
8
|
+
|
|
9
|
+
from .sql.dialects import Dialect
|
|
10
|
+
from .sql.quoting import quote_identifier
|
|
11
|
+
|
|
12
|
+
ObjectChild = Union["ObjectNode", "ObjectRef"]
|
|
13
|
+
|
|
14
|
+
_ALLOWED_SEGMENTS = {"host", "catalog", "schema", "object"}
|
|
15
|
+
_SEGMENT_ALIASES = {"collection": "object"}
|
|
16
|
+
_NAME_POLICIES = {"preserve", "lower", "upper", "normalize"}
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass(frozen=True, slots=True)
|
|
20
|
+
class DialectSpec:
|
|
21
|
+
"""Defines how to qualify names for a dialect."""
|
|
22
|
+
|
|
23
|
+
qualifier: tuple[str, ...]
|
|
24
|
+
quote_style: Optional[str] = None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass(frozen=True, slots=True)
|
|
28
|
+
class ObjectRef:
|
|
29
|
+
"""Immutable object metadata reference."""
|
|
30
|
+
|
|
31
|
+
path: str
|
|
32
|
+
name: str
|
|
33
|
+
object_type: str
|
|
34
|
+
host: Optional[str] = None
|
|
35
|
+
catalog: Optional[str] = None
|
|
36
|
+
schema: Optional[str] = None
|
|
37
|
+
content: Any = None
|
|
38
|
+
aliases: tuple[str, ...] = ()
|
|
39
|
+
tags: Mapping[str, str] = field(default_factory=dict)
|
|
40
|
+
_dialects: Mapping[str, DialectSpec] = field(
|
|
41
|
+
default_factory=dict, repr=False, compare=False
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
def qualified(self, dialect: str = "sqlserver") -> str:
|
|
45
|
+
"""Return a dialect-qualified name using available segments."""
|
|
46
|
+
spec = self._dialects.get(dialect)
|
|
47
|
+
if spec is None:
|
|
48
|
+
raise KeyError(f"Dialect not configured: {dialect}")
|
|
49
|
+
segments: list[str] = []
|
|
50
|
+
for segment in spec.qualifier:
|
|
51
|
+
value = _segment_value(self, segment)
|
|
52
|
+
if value:
|
|
53
|
+
segments.append(_quote_segment(value, spec.quote_style))
|
|
54
|
+
return ".".join(segments)
|
|
55
|
+
|
|
56
|
+
def with_content(self, content: Any) -> "ObjectRef":
|
|
57
|
+
"""Return a new ObjectRef with updated content."""
|
|
58
|
+
return replace(self, content=content)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class ObjectNode:
|
|
62
|
+
"""Namespace container that supports dot and key access."""
|
|
63
|
+
|
|
64
|
+
__slots__ = ("name", "path", "_normalizer", "_children", "_raw_map", "_alias_map")
|
|
65
|
+
|
|
66
|
+
def __init__(self, name: str, path: str, normalizer: Callable[[str], str]) -> None:
|
|
67
|
+
self.name = name
|
|
68
|
+
self.path = path
|
|
69
|
+
self._normalizer = normalizer
|
|
70
|
+
self._children: dict[str, ObjectChild] = {}
|
|
71
|
+
self._raw_map: dict[str, str] = {}
|
|
72
|
+
self._alias_map: dict[str, str] = {}
|
|
73
|
+
|
|
74
|
+
def add_child(
|
|
75
|
+
self,
|
|
76
|
+
key: str,
|
|
77
|
+
child: ObjectChild,
|
|
78
|
+
*,
|
|
79
|
+
aliases: Iterable[str] = (),
|
|
80
|
+
strict: bool = True,
|
|
81
|
+
) -> None:
|
|
82
|
+
normalized = self._normalizer(key)
|
|
83
|
+
if normalized in self._children:
|
|
84
|
+
raise ValueError(
|
|
85
|
+
f"Key collision at {self.path or '<root>'}: '{key}' conflicts after normalization."
|
|
86
|
+
)
|
|
87
|
+
self._children[normalized] = child
|
|
88
|
+
self._raw_map[key] = normalized
|
|
89
|
+
for alias in aliases:
|
|
90
|
+
normalized_alias = self._normalizer(alias)
|
|
91
|
+
if (
|
|
92
|
+
normalized_alias in self._children
|
|
93
|
+
or normalized_alias in self._alias_map
|
|
94
|
+
):
|
|
95
|
+
if strict:
|
|
96
|
+
raise ValueError(
|
|
97
|
+
f"Alias collision at {self.path or '<root>'}: '{alias}' conflicts with a sibling."
|
|
98
|
+
)
|
|
99
|
+
continue
|
|
100
|
+
self._alias_map[normalized_alias] = normalized
|
|
101
|
+
|
|
102
|
+
def _resolve(self, key: str) -> tuple[str, ObjectChild]:
|
|
103
|
+
if key in self._raw_map:
|
|
104
|
+
normalized = self._raw_map[key]
|
|
105
|
+
return normalized, self._children[normalized]
|
|
106
|
+
normalized = self._normalizer(key)
|
|
107
|
+
if normalized in self._alias_map:
|
|
108
|
+
canonical = self._alias_map[normalized]
|
|
109
|
+
return canonical, self._children[canonical]
|
|
110
|
+
if normalized in self._children:
|
|
111
|
+
return normalized, self._children[normalized]
|
|
112
|
+
raise KeyError(key)
|
|
113
|
+
|
|
114
|
+
def _replace_child(self, key: str, child: ObjectChild) -> None:
|
|
115
|
+
if key not in self._children:
|
|
116
|
+
raise KeyError(key)
|
|
117
|
+
self._children[key] = child
|
|
118
|
+
|
|
119
|
+
def __getattr__(self, key: str) -> ObjectChild:
|
|
120
|
+
if key.startswith("_"):
|
|
121
|
+
raise AttributeError(key)
|
|
122
|
+
try:
|
|
123
|
+
_, child = self._resolve(key)
|
|
124
|
+
except KeyError as exc:
|
|
125
|
+
raise AttributeError(key) from exc
|
|
126
|
+
return child
|
|
127
|
+
|
|
128
|
+
def __getitem__(self, key: str) -> ObjectChild:
|
|
129
|
+
_, child = self._resolve(key)
|
|
130
|
+
return child
|
|
131
|
+
|
|
132
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
133
|
+
"""Safe lookup by key or alias."""
|
|
134
|
+
try:
|
|
135
|
+
return self[key]
|
|
136
|
+
except KeyError:
|
|
137
|
+
return default
|
|
138
|
+
|
|
139
|
+
def iter_objects(self) -> Iterable[ObjectRef]:
|
|
140
|
+
"""Yield ObjectRef descendants."""
|
|
141
|
+
for child in self._children.values():
|
|
142
|
+
if isinstance(child, ObjectRef):
|
|
143
|
+
yield child
|
|
144
|
+
elif isinstance(child, ObjectNode):
|
|
145
|
+
yield from child.iter_objects()
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class ObjectRegistry:
|
|
149
|
+
"""Root registry for object metadata and namespace lookup."""
|
|
150
|
+
|
|
151
|
+
__slots__ = ("root", "_dialects", "_name_policy")
|
|
152
|
+
|
|
153
|
+
def __init__(
|
|
154
|
+
self,
|
|
155
|
+
root: ObjectNode,
|
|
156
|
+
*,
|
|
157
|
+
dialects: Mapping[str, DialectSpec],
|
|
158
|
+
name_policy: str,
|
|
159
|
+
) -> None:
|
|
160
|
+
self.root = root
|
|
161
|
+
self._dialects = dialects
|
|
162
|
+
self._name_policy = name_policy
|
|
163
|
+
|
|
164
|
+
def __getattr__(self, key: str) -> ObjectChild:
|
|
165
|
+
return getattr(self.root, key)
|
|
166
|
+
|
|
167
|
+
def __getitem__(self, key: str) -> ObjectChild:
|
|
168
|
+
return self.root[key]
|
|
169
|
+
|
|
170
|
+
@classmethod
|
|
171
|
+
def from_mapping(
|
|
172
|
+
cls,
|
|
173
|
+
mapping: Mapping[str, Any],
|
|
174
|
+
*,
|
|
175
|
+
dialects: Optional[Mapping[str, Any]] = None,
|
|
176
|
+
strict: bool = True,
|
|
177
|
+
) -> "ObjectRegistry":
|
|
178
|
+
if not isinstance(mapping, Mapping):
|
|
179
|
+
raise ValueError("mapping must be a dict-like object")
|
|
180
|
+
|
|
181
|
+
version = mapping.get("version")
|
|
182
|
+
if not isinstance(version, int):
|
|
183
|
+
raise ValueError("version must be an integer")
|
|
184
|
+
|
|
185
|
+
defaults = mapping.get("defaults", {})
|
|
186
|
+
if defaults and not isinstance(defaults, Mapping):
|
|
187
|
+
raise ValueError("defaults must be a mapping when provided")
|
|
188
|
+
|
|
189
|
+
name_policy = defaults.get("name_policy", "preserve")
|
|
190
|
+
if name_policy not in _NAME_POLICIES:
|
|
191
|
+
raise ValueError(f"Unsupported name_policy: {name_policy}")
|
|
192
|
+
|
|
193
|
+
normalizer = _make_normalizer(name_policy)
|
|
194
|
+
|
|
195
|
+
config_dialects = mapping.get("dialects", {})
|
|
196
|
+
if config_dialects and not isinstance(config_dialects, Mapping):
|
|
197
|
+
raise ValueError("dialects must be a mapping when provided")
|
|
198
|
+
|
|
199
|
+
dialect_map = _build_dialects(config_dialects, override=dialects, strict=strict)
|
|
200
|
+
|
|
201
|
+
catalogs = mapping.get("catalogs")
|
|
202
|
+
hosts = mapping.get("hosts")
|
|
203
|
+
if not catalogs and not hosts:
|
|
204
|
+
raise ValueError("Either catalogs or hosts must be provided")
|
|
205
|
+
|
|
206
|
+
root = ObjectNode(name="root", path="", normalizer=normalizer)
|
|
207
|
+
if catalogs:
|
|
208
|
+
catalogs_node = ObjectNode(
|
|
209
|
+
name="catalogs", path="catalogs", normalizer=normalizer
|
|
210
|
+
)
|
|
211
|
+
root.add_child("catalogs", catalogs_node, strict=strict)
|
|
212
|
+
_build_catalogs(
|
|
213
|
+
catalogs_node,
|
|
214
|
+
catalogs,
|
|
215
|
+
defaults=defaults,
|
|
216
|
+
dialects=dialect_map,
|
|
217
|
+
normalizer=normalizer,
|
|
218
|
+
strict=strict,
|
|
219
|
+
host_context=defaults.get("host"),
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
if hosts:
|
|
223
|
+
hosts_node = ObjectNode(name="hosts", path="hosts", normalizer=normalizer)
|
|
224
|
+
root.add_child("hosts", hosts_node, strict=strict)
|
|
225
|
+
_build_hosts(
|
|
226
|
+
hosts_node,
|
|
227
|
+
hosts,
|
|
228
|
+
defaults=defaults,
|
|
229
|
+
dialects=dialect_map,
|
|
230
|
+
normalizer=normalizer,
|
|
231
|
+
strict=strict,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
return cls(root, dialects=dialect_map, name_policy=name_policy)
|
|
235
|
+
|
|
236
|
+
@classmethod
|
|
237
|
+
def from_yaml(cls, path: str, **kwargs: Any) -> "ObjectRegistry":
|
|
238
|
+
try:
|
|
239
|
+
import yaml
|
|
240
|
+
except ImportError as exc:
|
|
241
|
+
raise ImportError(
|
|
242
|
+
"PyYAML is required for ObjectRegistry.from_yaml; install pyyaml."
|
|
243
|
+
) from exc
|
|
244
|
+
|
|
245
|
+
with open(path, "r", encoding="utf-8") as handle:
|
|
246
|
+
data = yaml.safe_load(handle)
|
|
247
|
+
return cls.from_mapping(data, **kwargs)
|
|
248
|
+
|
|
249
|
+
def get(self, path: str) -> ObjectChild:
|
|
250
|
+
if not path or not isinstance(path, str):
|
|
251
|
+
raise ValueError("path must be a non-empty string")
|
|
252
|
+
current: ObjectChild = self.root
|
|
253
|
+
for part in path.split("."):
|
|
254
|
+
if not isinstance(current, ObjectNode):
|
|
255
|
+
raise KeyError(path)
|
|
256
|
+
_, current = current._resolve(part)
|
|
257
|
+
return current
|
|
258
|
+
|
|
259
|
+
def iter_objects(
|
|
260
|
+
self,
|
|
261
|
+
*,
|
|
262
|
+
object_type: Optional[str] = None,
|
|
263
|
+
tags: Optional[Mapping[str, str]] = None,
|
|
264
|
+
) -> Iterable[ObjectRef]:
|
|
265
|
+
for obj in self.root.iter_objects():
|
|
266
|
+
if object_type and obj.object_type != object_type:
|
|
267
|
+
continue
|
|
268
|
+
if tags and any(obj.tags.get(k) != v for k, v in tags.items()):
|
|
269
|
+
continue
|
|
270
|
+
yield obj
|
|
271
|
+
|
|
272
|
+
def bind_content(self, path: str, content: Any) -> None:
|
|
273
|
+
parent, key, child = self._resolve_parent(path)
|
|
274
|
+
if not isinstance(child, ObjectRef):
|
|
275
|
+
raise ValueError(f"Path does not resolve to an object: {path}")
|
|
276
|
+
parent._replace_child(key, child.with_content(content))
|
|
277
|
+
|
|
278
|
+
def qualify(self, path: str, dialect: str = "sqlserver") -> str:
|
|
279
|
+
obj = self.get(path)
|
|
280
|
+
if not isinstance(obj, ObjectRef):
|
|
281
|
+
raise ValueError(f"Path does not resolve to an object: {path}")
|
|
282
|
+
return obj.qualified(dialect=dialect)
|
|
283
|
+
|
|
284
|
+
def _resolve_parent(self, path: str) -> tuple[ObjectNode, str, ObjectChild]:
|
|
285
|
+
if not path or not isinstance(path, str):
|
|
286
|
+
raise ValueError("path must be a non-empty string")
|
|
287
|
+
parts = path.split(".")
|
|
288
|
+
parent: ObjectNode = self.root
|
|
289
|
+
for part in parts[:-1]:
|
|
290
|
+
_, child = parent._resolve(part)
|
|
291
|
+
if not isinstance(child, ObjectNode):
|
|
292
|
+
raise KeyError(path)
|
|
293
|
+
parent = child
|
|
294
|
+
key, child = parent._resolve(parts[-1])
|
|
295
|
+
return parent, key, child
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def _build_dialects(
|
|
299
|
+
dialects: Mapping[str, Any],
|
|
300
|
+
*,
|
|
301
|
+
override: Optional[Mapping[str, Any]] = None,
|
|
302
|
+
strict: bool = True,
|
|
303
|
+
) -> dict[str, DialectSpec]:
|
|
304
|
+
result = dict(_default_dialects())
|
|
305
|
+
for source in (dialects, override or {}):
|
|
306
|
+
for name, spec in source.items():
|
|
307
|
+
if not isinstance(spec, Mapping):
|
|
308
|
+
raise ValueError(f"dialects.{name} must be a mapping")
|
|
309
|
+
qualifier = spec.get("qualifier")
|
|
310
|
+
if not qualifier or not isinstance(qualifier, str):
|
|
311
|
+
raise ValueError(f"dialects.{name}.qualifier must be a string")
|
|
312
|
+
segments = []
|
|
313
|
+
for segment in qualifier.split("."):
|
|
314
|
+
segment = segment.strip()
|
|
315
|
+
if not segment:
|
|
316
|
+
continue
|
|
317
|
+
segment = _SEGMENT_ALIASES.get(segment, segment)
|
|
318
|
+
if segment not in _ALLOWED_SEGMENTS:
|
|
319
|
+
if strict:
|
|
320
|
+
raise ValueError(
|
|
321
|
+
f"dialects.{name}.qualifier uses unknown segment '{segment}'"
|
|
322
|
+
)
|
|
323
|
+
continue
|
|
324
|
+
segments.append(segment)
|
|
325
|
+
if not segments:
|
|
326
|
+
raise ValueError(f"dialects.{name}.qualifier has no valid segments")
|
|
327
|
+
quote_style = spec.get("quote_style")
|
|
328
|
+
if quote_style is not None and not isinstance(quote_style, str):
|
|
329
|
+
raise ValueError(f"dialects.{name}.quote_style must be a string")
|
|
330
|
+
result[name] = DialectSpec(tuple(segments), quote_style)
|
|
331
|
+
return result
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def _default_dialects() -> dict[str, DialectSpec]:
|
|
335
|
+
return {
|
|
336
|
+
"sqlserver": DialectSpec(("catalog", "schema", "object")),
|
|
337
|
+
"postgres": DialectSpec(("schema", "object")),
|
|
338
|
+
"mysql": DialectSpec(("schema", "object")),
|
|
339
|
+
"duckdb": DialectSpec(("schema", "object")),
|
|
340
|
+
"nosql": DialectSpec(("object",)),
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def _make_normalizer(policy: str) -> Callable[[str], str]:
|
|
345
|
+
if policy == "preserve":
|
|
346
|
+
return lambda value: value
|
|
347
|
+
if policy == "lower":
|
|
348
|
+
return lambda value: value.lower()
|
|
349
|
+
if policy == "upper":
|
|
350
|
+
return lambda value: value.upper()
|
|
351
|
+
if policy == "normalize":
|
|
352
|
+
return _normalize_identifier
|
|
353
|
+
raise ValueError(f"Unsupported name_policy: {policy}")
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def _normalize_identifier(value: str) -> str:
|
|
357
|
+
cleaned = re.sub(r"[^a-zA-Z0-9_]", "_", value.lower())
|
|
358
|
+
cleaned = re.sub(r"_+", "_", cleaned)
|
|
359
|
+
if not cleaned:
|
|
360
|
+
cleaned = "_"
|
|
361
|
+
if cleaned[0].isdigit():
|
|
362
|
+
cleaned = "_" + cleaned
|
|
363
|
+
return cleaned
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def _build_hosts(
|
|
367
|
+
hosts_node: ObjectNode,
|
|
368
|
+
hosts: Mapping[str, Any],
|
|
369
|
+
*,
|
|
370
|
+
defaults: Mapping[str, Any],
|
|
371
|
+
dialects: Mapping[str, DialectSpec],
|
|
372
|
+
normalizer: Callable[[str], str],
|
|
373
|
+
strict: bool,
|
|
374
|
+
) -> None:
|
|
375
|
+
if not isinstance(hosts, Mapping):
|
|
376
|
+
raise ValueError("hosts must be a mapping")
|
|
377
|
+
for host_key, host_entry in hosts.items():
|
|
378
|
+
if not isinstance(host_key, str) or not host_key.strip():
|
|
379
|
+
raise ValueError("hosts keys must be non-empty strings")
|
|
380
|
+
if not isinstance(host_entry, Mapping):
|
|
381
|
+
raise ValueError(f"hosts.{host_key} must be a mapping")
|
|
382
|
+
host_value = host_entry.get("host") or defaults.get("host")
|
|
383
|
+
catalogs = host_entry.get("catalogs")
|
|
384
|
+
if not catalogs:
|
|
385
|
+
raise ValueError(f"hosts.{host_key}.catalogs must be provided")
|
|
386
|
+
|
|
387
|
+
host_path = _join_path(hosts_node.path, host_key, normalizer)
|
|
388
|
+
host_node = ObjectNode(name=host_key, path=host_path, normalizer=normalizer)
|
|
389
|
+
hosts_node.add_child(host_key, host_node, strict=strict)
|
|
390
|
+
_build_catalogs(
|
|
391
|
+
host_node,
|
|
392
|
+
catalogs,
|
|
393
|
+
defaults=defaults,
|
|
394
|
+
dialects=dialects,
|
|
395
|
+
normalizer=normalizer,
|
|
396
|
+
strict=strict,
|
|
397
|
+
host_context=host_value,
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
def _build_catalogs(
|
|
402
|
+
parent_node: ObjectNode,
|
|
403
|
+
catalogs: Mapping[str, Any],
|
|
404
|
+
*,
|
|
405
|
+
defaults: Mapping[str, Any],
|
|
406
|
+
dialects: Mapping[str, DialectSpec],
|
|
407
|
+
normalizer: Callable[[str], str],
|
|
408
|
+
strict: bool,
|
|
409
|
+
host_context: Optional[str],
|
|
410
|
+
) -> None:
|
|
411
|
+
if not isinstance(catalogs, Mapping):
|
|
412
|
+
raise ValueError(f"{parent_node.path}.catalogs must be a mapping")
|
|
413
|
+
for catalog_key, catalog_entry in catalogs.items():
|
|
414
|
+
if not isinstance(catalog_key, str) or not catalog_key.strip():
|
|
415
|
+
raise ValueError("catalog keys must be non-empty strings")
|
|
416
|
+
if not isinstance(catalog_entry, Mapping):
|
|
417
|
+
raise ValueError(f"{parent_node.path}.{catalog_key} must be a mapping")
|
|
418
|
+
catalog_name = catalog_entry.get("catalog") or catalog_key
|
|
419
|
+
catalog_path = _join_path(parent_node.path, catalog_key, normalizer)
|
|
420
|
+
catalog_node = ObjectNode(
|
|
421
|
+
name=catalog_key, path=catalog_path, normalizer=normalizer
|
|
422
|
+
)
|
|
423
|
+
parent_node.add_child(catalog_key, catalog_node, strict=strict)
|
|
424
|
+
|
|
425
|
+
_build_objects(
|
|
426
|
+
catalog_node,
|
|
427
|
+
catalog_entry.get("objects", {}),
|
|
428
|
+
defaults=defaults,
|
|
429
|
+
dialects=dialects,
|
|
430
|
+
normalizer=normalizer,
|
|
431
|
+
strict=strict,
|
|
432
|
+
host=host_context,
|
|
433
|
+
catalog=catalog_name,
|
|
434
|
+
schema=defaults.get("schema", "dbo"),
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
schemas = catalog_entry.get("schemas", {})
|
|
438
|
+
_build_schemas(
|
|
439
|
+
catalog_node,
|
|
440
|
+
schemas,
|
|
441
|
+
defaults=defaults,
|
|
442
|
+
dialects=dialects,
|
|
443
|
+
normalizer=normalizer,
|
|
444
|
+
strict=strict,
|
|
445
|
+
host=host_context,
|
|
446
|
+
catalog=catalog_name,
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def _build_schemas(
|
|
451
|
+
catalog_node: ObjectNode,
|
|
452
|
+
schemas: Mapping[str, Any],
|
|
453
|
+
*,
|
|
454
|
+
defaults: Mapping[str, Any],
|
|
455
|
+
dialects: Mapping[str, DialectSpec],
|
|
456
|
+
normalizer: Callable[[str], str],
|
|
457
|
+
strict: bool,
|
|
458
|
+
host: Optional[str],
|
|
459
|
+
catalog: Optional[str],
|
|
460
|
+
) -> None:
|
|
461
|
+
if not isinstance(schemas, Mapping):
|
|
462
|
+
raise ValueError(f"{catalog_node.path}.schemas must be a mapping")
|
|
463
|
+
for schema_key, schema_entry in schemas.items():
|
|
464
|
+
if not isinstance(schema_key, str) or not schema_key.strip():
|
|
465
|
+
raise ValueError("schema keys must be non-empty strings")
|
|
466
|
+
if not isinstance(schema_entry, Mapping):
|
|
467
|
+
raise ValueError(f"{catalog_node.path}.{schema_key} must be a mapping")
|
|
468
|
+
schema_name = schema_entry.get("schema") or schema_key
|
|
469
|
+
schema_path = _join_path(catalog_node.path, schema_key, normalizer)
|
|
470
|
+
schema_node = ObjectNode(
|
|
471
|
+
name=schema_key, path=schema_path, normalizer=normalizer
|
|
472
|
+
)
|
|
473
|
+
catalog_node.add_child(schema_key, schema_node, strict=strict)
|
|
474
|
+
|
|
475
|
+
_build_objects(
|
|
476
|
+
schema_node,
|
|
477
|
+
schema_entry.get("objects", {}),
|
|
478
|
+
defaults=defaults,
|
|
479
|
+
dialects=dialects,
|
|
480
|
+
normalizer=normalizer,
|
|
481
|
+
strict=strict,
|
|
482
|
+
host=host,
|
|
483
|
+
catalog=catalog,
|
|
484
|
+
schema=schema_name,
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def _build_objects(
|
|
489
|
+
parent_node: ObjectNode,
|
|
490
|
+
objects: Mapping[str, Any],
|
|
491
|
+
*,
|
|
492
|
+
defaults: Mapping[str, Any],
|
|
493
|
+
dialects: Mapping[str, DialectSpec],
|
|
494
|
+
normalizer: Callable[[str], str],
|
|
495
|
+
strict: bool,
|
|
496
|
+
host: Optional[str],
|
|
497
|
+
catalog: Optional[str],
|
|
498
|
+
schema: Optional[str],
|
|
499
|
+
) -> None:
|
|
500
|
+
if not isinstance(objects, Mapping):
|
|
501
|
+
raise ValueError(f"{parent_node.path}.objects must be a mapping")
|
|
502
|
+
for obj_key, obj_entry in objects.items():
|
|
503
|
+
if not isinstance(obj_key, str) or not obj_key.strip():
|
|
504
|
+
raise ValueError("object keys must be non-empty strings")
|
|
505
|
+
if not isinstance(obj_entry, Mapping):
|
|
506
|
+
raise ValueError(f"{parent_node.path}.{obj_key} must be a mapping")
|
|
507
|
+
name = obj_entry.get("name") or obj_key
|
|
508
|
+
object_type = obj_entry.get("object_type") or defaults.get(
|
|
509
|
+
"object_type", "table"
|
|
510
|
+
)
|
|
511
|
+
if not isinstance(object_type, str) or not object_type.strip():
|
|
512
|
+
raise ValueError(
|
|
513
|
+
f"{parent_node.path}.{obj_key}.object_type must be a string"
|
|
514
|
+
)
|
|
515
|
+
aliases = obj_entry.get("aliases", [])
|
|
516
|
+
if aliases is None:
|
|
517
|
+
aliases = []
|
|
518
|
+
if not isinstance(aliases, list):
|
|
519
|
+
raise ValueError(f"{parent_node.path}.{obj_key}.aliases must be a list")
|
|
520
|
+
content = obj_entry.get("content")
|
|
521
|
+
tags = obj_entry.get("tags", {})
|
|
522
|
+
if tags is None:
|
|
523
|
+
tags = {}
|
|
524
|
+
if not isinstance(tags, Mapping):
|
|
525
|
+
raise ValueError(f"{parent_node.path}.{obj_key}.tags must be a mapping")
|
|
526
|
+
|
|
527
|
+
obj_path = _join_path(parent_node.path, obj_key, normalizer)
|
|
528
|
+
obj_ref = ObjectRef(
|
|
529
|
+
path=obj_path,
|
|
530
|
+
name=name,
|
|
531
|
+
object_type=object_type,
|
|
532
|
+
host=host,
|
|
533
|
+
catalog=catalog,
|
|
534
|
+
schema=schema,
|
|
535
|
+
content=content,
|
|
536
|
+
aliases=tuple(aliases),
|
|
537
|
+
tags=dict(tags),
|
|
538
|
+
_dialects=dialects,
|
|
539
|
+
)
|
|
540
|
+
parent_node.add_child(obj_key, obj_ref, aliases=aliases, strict=strict)
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
def _join_path(parent_path: str, key: str, normalizer: Callable[[str], str]) -> str:
|
|
544
|
+
normalized = normalizer(key)
|
|
545
|
+
return f"{parent_path}.{normalized}" if parent_path else normalized
|
|
546
|
+
|
|
547
|
+
|
|
548
|
+
def _segment_value(obj: ObjectRef, segment: str) -> Optional[str]:
|
|
549
|
+
if segment == "host":
|
|
550
|
+
return obj.host
|
|
551
|
+
if segment == "catalog":
|
|
552
|
+
return obj.catalog
|
|
553
|
+
if segment == "schema":
|
|
554
|
+
return obj.schema
|
|
555
|
+
if segment == "object":
|
|
556
|
+
return obj.name
|
|
557
|
+
return None
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
def _quote_segment(value: str, quote_style: Optional[str]) -> str:
|
|
561
|
+
if not quote_style or quote_style == "none":
|
|
562
|
+
return value
|
|
563
|
+
style = quote_style.lower()
|
|
564
|
+
mapping = {
|
|
565
|
+
"sqlserver": Dialect.SQLSERVER,
|
|
566
|
+
"postgres": Dialect.POSTGRES,
|
|
567
|
+
"mysql": Dialect.MYSQL,
|
|
568
|
+
"duckdb": Dialect.DUCKDB,
|
|
569
|
+
}
|
|
570
|
+
dialect = mapping.get(style)
|
|
571
|
+
if dialect is None:
|
|
572
|
+
return value
|
|
573
|
+
return quote_identifier(value, dialect)
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
__all__ = [
|
|
577
|
+
"DialectSpec",
|
|
578
|
+
"ObjectRef",
|
|
579
|
+
"ObjectNode",
|
|
580
|
+
"ObjectRegistry",
|
|
581
|
+
]
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SQL utilities for datablade.
|
|
3
|
+
|
|
4
|
+
Provides dialect-aware quoting, DDL generation, and bulk loading.
|
|
5
|
+
Supports SQL Server, PostgreSQL, MySQL, and DuckDB.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .bulk_load import (
|
|
9
|
+
bulk_load,
|
|
10
|
+
bulk_load_duckdb,
|
|
11
|
+
bulk_load_mysql,
|
|
12
|
+
bulk_load_postgres,
|
|
13
|
+
bulk_load_sqlserver,
|
|
14
|
+
bulk_load_sqlserver_command,
|
|
15
|
+
bulk_load_sqlserver_commands,
|
|
16
|
+
bulk_load_sqlserver_many,
|
|
17
|
+
write_dataframe_and_load,
|
|
18
|
+
)
|
|
19
|
+
from .ddl import generate_create_table
|
|
20
|
+
from .ddl_pyarrow import (
|
|
21
|
+
DroppedColumn,
|
|
22
|
+
FallbackColumn,
|
|
23
|
+
ParquetDDLMetadata,
|
|
24
|
+
generate_create_table_from_parquet,
|
|
25
|
+
)
|
|
26
|
+
from .dialects import Dialect
|
|
27
|
+
from .quoting import quote_identifier
|
|
28
|
+
from .sqlserver import (
|
|
29
|
+
sqlserver_bulk_insert_statements,
|
|
30
|
+
sqlserver_create_and_insert_from_parquet,
|
|
31
|
+
sqlserver_create_and_stage_from_parquets,
|
|
32
|
+
sqlserver_openrowset_parquet,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
__all__ = [
|
|
36
|
+
"Dialect",
|
|
37
|
+
"quote_identifier",
|
|
38
|
+
"generate_create_table",
|
|
39
|
+
"generate_create_table_from_parquet",
|
|
40
|
+
"DroppedColumn",
|
|
41
|
+
"FallbackColumn",
|
|
42
|
+
"ParquetDDLMetadata",
|
|
43
|
+
"bulk_load",
|
|
44
|
+
"bulk_load_sqlserver",
|
|
45
|
+
"bulk_load_sqlserver_command",
|
|
46
|
+
"bulk_load_sqlserver_commands",
|
|
47
|
+
"bulk_load_sqlserver_many",
|
|
48
|
+
"bulk_load_postgres",
|
|
49
|
+
"bulk_load_mysql",
|
|
50
|
+
"bulk_load_duckdb",
|
|
51
|
+
"write_dataframe_and_load",
|
|
52
|
+
"sqlserver_openrowset_parquet",
|
|
53
|
+
"sqlserver_bulk_insert_statements",
|
|
54
|
+
"sqlserver_create_and_insert_from_parquet",
|
|
55
|
+
"sqlserver_create_and_stage_from_parquets",
|
|
56
|
+
]
|