trellis-datamodel 0.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trellis_datamodel/__init__.py +8 -0
- trellis_datamodel/adapters/__init__.py +41 -0
- trellis_datamodel/adapters/base.py +147 -0
- trellis_datamodel/adapters/dbt_core.py +975 -0
- trellis_datamodel/cli.py +292 -0
- trellis_datamodel/config.py +239 -0
- trellis_datamodel/models/__init__.py +13 -0
- trellis_datamodel/models/schemas.py +28 -0
- trellis_datamodel/routes/__init__.py +11 -0
- trellis_datamodel/routes/data_model.py +221 -0
- trellis_datamodel/routes/manifest.py +110 -0
- trellis_datamodel/routes/schema.py +183 -0
- trellis_datamodel/server.py +101 -0
- trellis_datamodel/static/_app/env.js +1 -0
- trellis_datamodel/static/_app/immutable/assets/0.ByDwyx3a.css +1 -0
- trellis_datamodel/static/_app/immutable/assets/2.DLAp_5AW.css +1 -0
- trellis_datamodel/static/_app/immutable/assets/trellis_squared.CTOnsdDx.svg +127 -0
- trellis_datamodel/static/_app/immutable/chunks/8ZaN1sxc.js +1 -0
- trellis_datamodel/static/_app/immutable/chunks/BfBfOTnK.js +1 -0
- trellis_datamodel/static/_app/immutable/chunks/C3yhlRfZ.js +2 -0
- trellis_datamodel/static/_app/immutable/chunks/CK3bXPEX.js +1 -0
- trellis_datamodel/static/_app/immutable/chunks/CXDUumOQ.js +1 -0
- trellis_datamodel/static/_app/immutable/chunks/DDNfEvut.js +1 -0
- trellis_datamodel/static/_app/immutable/chunks/DUdVct7e.js +1 -0
- trellis_datamodel/static/_app/immutable/chunks/QRltG_J6.js +2 -0
- trellis_datamodel/static/_app/immutable/chunks/zXDdy2c_.js +1 -0
- trellis_datamodel/static/_app/immutable/entry/app.abCkWeAJ.js +2 -0
- trellis_datamodel/static/_app/immutable/entry/start.B7CjH6Z7.js +1 -0
- trellis_datamodel/static/_app/immutable/nodes/0.bFI_DI3G.js +1 -0
- trellis_datamodel/static/_app/immutable/nodes/1.J_r941Qf.js +1 -0
- trellis_datamodel/static/_app/immutable/nodes/2.WqbMkq6o.js +27 -0
- trellis_datamodel/static/_app/version.json +1 -0
- trellis_datamodel/static/index.html +40 -0
- trellis_datamodel/static/robots.txt +3 -0
- trellis_datamodel/static/trellis_squared.svg +127 -0
- trellis_datamodel/tests/__init__.py +2 -0
- trellis_datamodel/tests/conftest.py +132 -0
- trellis_datamodel/tests/test_cli.py +526 -0
- trellis_datamodel/tests/test_data_model.py +151 -0
- trellis_datamodel/tests/test_dbt_schema.py +892 -0
- trellis_datamodel/tests/test_manifest.py +72 -0
- trellis_datamodel/tests/test_server_static.py +44 -0
- trellis_datamodel/tests/test_yaml_handler.py +228 -0
- trellis_datamodel/utils/__init__.py +2 -0
- trellis_datamodel/utils/yaml_handler.py +365 -0
- trellis_datamodel-0.3.3.dist-info/METADATA +333 -0
- trellis_datamodel-0.3.3.dist-info/RECORD +52 -0
- trellis_datamodel-0.3.3.dist-info/WHEEL +5 -0
- trellis_datamodel-0.3.3.dist-info/entry_points.txt +2 -0
- trellis_datamodel-0.3.3.dist-info/licenses/LICENSE +661 -0
- trellis_datamodel-0.3.3.dist-info/licenses/NOTICE +6 -0
- trellis_datamodel-0.3.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
"""Routes for data model CRUD operations."""
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, HTTPException
|
|
4
|
+
import yaml
|
|
5
|
+
import os
|
|
6
|
+
from typing import Dict, Any, List, Tuple
|
|
7
|
+
|
|
8
|
+
from trellis_datamodel import config as cfg
|
|
9
|
+
from trellis_datamodel.models.schemas import DataModelUpdate
|
|
10
|
+
|
|
11
|
+
router = APIRouter(prefix="/api", tags=["data-model"])
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _load_canvas_layout() -> Dict[str, Any]:
|
|
15
|
+
"""Load canvas layout file if it exists."""
|
|
16
|
+
if not os.path.exists(cfg.CANVAS_LAYOUT_PATH):
|
|
17
|
+
return {"version": 0.1, "entities": {}, "relationships": {}}
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
with open(cfg.CANVAS_LAYOUT_PATH, "r") as f:
|
|
21
|
+
layout = yaml.safe_load(f) or {}
|
|
22
|
+
return {
|
|
23
|
+
"version": layout.get("version", 0.1),
|
|
24
|
+
"entities": layout.get("entities", {}),
|
|
25
|
+
"relationships": layout.get("relationships", {}),
|
|
26
|
+
}
|
|
27
|
+
except Exception as e:
|
|
28
|
+
print(f"Warning: Could not load canvas layout: {e}")
|
|
29
|
+
return {"version": 0.1, "entities": {}, "relationships": {}}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _merge_layout_into_model(
|
|
33
|
+
model_data: Dict[str, Any], layout_data: Dict[str, Any]
|
|
34
|
+
) -> Dict[str, Any]:
|
|
35
|
+
"""Merge canvas layout data into model data."""
|
|
36
|
+
entities_layout = layout_data.get("entities", {})
|
|
37
|
+
relationships_layout = layout_data.get("relationships", {})
|
|
38
|
+
|
|
39
|
+
# Merge entity visual properties
|
|
40
|
+
entities = model_data.get("entities", [])
|
|
41
|
+
for entity in entities:
|
|
42
|
+
entity_id = entity.get("id")
|
|
43
|
+
if entity_id and entity_id in entities_layout:
|
|
44
|
+
layout = entities_layout[entity_id]
|
|
45
|
+
if "position" in layout:
|
|
46
|
+
entity["position"] = layout["position"]
|
|
47
|
+
if "width" in layout:
|
|
48
|
+
entity["width"] = layout["width"]
|
|
49
|
+
if "panel_height" in layout:
|
|
50
|
+
entity["panel_height"] = layout["panel_height"]
|
|
51
|
+
if "collapsed" in layout:
|
|
52
|
+
entity["collapsed"] = layout["collapsed"]
|
|
53
|
+
|
|
54
|
+
# Merge relationship visual properties
|
|
55
|
+
relationships = model_data.get("relationships", [])
|
|
56
|
+
for idx, relationship in enumerate(relationships):
|
|
57
|
+
source = relationship.get("source")
|
|
58
|
+
target = relationship.get("target")
|
|
59
|
+
if source and target:
|
|
60
|
+
# Create key: source-target-index
|
|
61
|
+
rel_key = f"{source}-{target}-{idx}"
|
|
62
|
+
if rel_key in relationships_layout:
|
|
63
|
+
layout = relationships_layout[rel_key]
|
|
64
|
+
if "label_dx" in layout:
|
|
65
|
+
relationship["label_dx"] = layout["label_dx"]
|
|
66
|
+
if "label_dy" in layout:
|
|
67
|
+
relationship["label_dy"] = layout["label_dy"]
|
|
68
|
+
|
|
69
|
+
return model_data
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@router.get("/data-model")
|
|
73
|
+
async def get_data_model():
|
|
74
|
+
"""Return the current data model with layout merged in."""
|
|
75
|
+
if not os.path.exists(cfg.DATA_MODEL_PATH):
|
|
76
|
+
return {"version": 0.1, "entities": [], "relationships": []}
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
# Load model data
|
|
80
|
+
with open(cfg.DATA_MODEL_PATH, "r") as f:
|
|
81
|
+
model_data = yaml.safe_load(f) or {}
|
|
82
|
+
|
|
83
|
+
if not model_data.get("entities"):
|
|
84
|
+
model_data["entities"] = []
|
|
85
|
+
if not model_data.get("relationships"):
|
|
86
|
+
model_data["relationships"] = []
|
|
87
|
+
|
|
88
|
+
# Load and merge layout data
|
|
89
|
+
layout_data = _load_canvas_layout()
|
|
90
|
+
merged_data = _merge_layout_into_model(model_data, layout_data)
|
|
91
|
+
|
|
92
|
+
return merged_data
|
|
93
|
+
except Exception as e:
|
|
94
|
+
raise HTTPException(
|
|
95
|
+
status_code=500, detail=f"Error reading data model: {str(e)}"
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _split_model_and_layout(
|
|
100
|
+
content: Dict[str, Any],
|
|
101
|
+
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
102
|
+
"""Split incoming data into model-only and layout-only dictionaries."""
|
|
103
|
+
model_data = {
|
|
104
|
+
"version": content.get("version", 0.1),
|
|
105
|
+
"entities": [],
|
|
106
|
+
"relationships": [],
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
layout_data = {"version": 0.1, "entities": {}, "relationships": {}}
|
|
110
|
+
|
|
111
|
+
# Split entities
|
|
112
|
+
entities = content.get("entities", [])
|
|
113
|
+
for entity in entities:
|
|
114
|
+
entity_id = entity.get("id")
|
|
115
|
+
if not entity_id:
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
# Model-only properties
|
|
119
|
+
model_entity = {
|
|
120
|
+
"id": entity_id,
|
|
121
|
+
"label": entity.get("label", ""),
|
|
122
|
+
}
|
|
123
|
+
if "description" in entity:
|
|
124
|
+
model_entity["description"] = entity["description"]
|
|
125
|
+
if "dbt_model" in entity:
|
|
126
|
+
model_entity["dbt_model"] = entity["dbt_model"]
|
|
127
|
+
if "additional_models" in entity:
|
|
128
|
+
model_entity["additional_models"] = entity["additional_models"]
|
|
129
|
+
if "drafted_fields" in entity:
|
|
130
|
+
model_entity["drafted_fields"] = entity["drafted_fields"]
|
|
131
|
+
if "tags" in entity:
|
|
132
|
+
model_entity["tags"] = entity["tags"]
|
|
133
|
+
|
|
134
|
+
model_data["entities"].append(model_entity)
|
|
135
|
+
|
|
136
|
+
# Layout-only properties
|
|
137
|
+
layout_entity = {}
|
|
138
|
+
if "position" in entity:
|
|
139
|
+
layout_entity["position"] = entity["position"]
|
|
140
|
+
if "width" in entity:
|
|
141
|
+
layout_entity["width"] = entity["width"]
|
|
142
|
+
if "panel_height" in entity:
|
|
143
|
+
layout_entity["panel_height"] = entity["panel_height"]
|
|
144
|
+
if "collapsed" in entity:
|
|
145
|
+
layout_entity["collapsed"] = entity["collapsed"]
|
|
146
|
+
|
|
147
|
+
if layout_entity:
|
|
148
|
+
layout_data["entities"][entity_id] = layout_entity
|
|
149
|
+
|
|
150
|
+
# Split relationships
|
|
151
|
+
relationships = content.get("relationships", [])
|
|
152
|
+
for idx, relationship in enumerate(relationships):
|
|
153
|
+
source = relationship.get("source")
|
|
154
|
+
target = relationship.get("target")
|
|
155
|
+
if not source or not target:
|
|
156
|
+
continue
|
|
157
|
+
|
|
158
|
+
# Model-only properties
|
|
159
|
+
model_rel = {
|
|
160
|
+
"source": source,
|
|
161
|
+
"target": target,
|
|
162
|
+
}
|
|
163
|
+
if "label" in relationship:
|
|
164
|
+
model_rel["label"] = relationship["label"]
|
|
165
|
+
if "type" in relationship:
|
|
166
|
+
model_rel["type"] = relationship["type"]
|
|
167
|
+
if "source_field" in relationship:
|
|
168
|
+
model_rel["source_field"] = relationship["source_field"]
|
|
169
|
+
if "target_field" in relationship:
|
|
170
|
+
model_rel["target_field"] = relationship["target_field"]
|
|
171
|
+
|
|
172
|
+
model_data["relationships"].append(model_rel)
|
|
173
|
+
|
|
174
|
+
# Layout-only properties
|
|
175
|
+
layout_rel = {}
|
|
176
|
+
if "label_dx" in relationship:
|
|
177
|
+
layout_rel["label_dx"] = relationship["label_dx"]
|
|
178
|
+
if "label_dy" in relationship:
|
|
179
|
+
layout_rel["label_dy"] = relationship["label_dy"]
|
|
180
|
+
|
|
181
|
+
if layout_rel:
|
|
182
|
+
# Use source-target-index as key
|
|
183
|
+
rel_key = f"{source}-{target}-{idx}"
|
|
184
|
+
layout_data["relationships"][rel_key] = layout_rel
|
|
185
|
+
|
|
186
|
+
return model_data, layout_data
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@router.post("/data-model")
|
|
190
|
+
async def save_data_model(data: DataModelUpdate):
|
|
191
|
+
"""Save the data model, splitting model and layout into separate files."""
|
|
192
|
+
try:
|
|
193
|
+
content = data.dict() # Pydantic v1 (required by dbt-core==1.10)
|
|
194
|
+
|
|
195
|
+
# Split into model and layout
|
|
196
|
+
model_data, layout_data = _split_model_and_layout(content)
|
|
197
|
+
|
|
198
|
+
# Save model file
|
|
199
|
+
print(f"Saving data model to: {cfg.DATA_MODEL_PATH}")
|
|
200
|
+
os.makedirs(os.path.dirname(cfg.DATA_MODEL_PATH), exist_ok=True)
|
|
201
|
+
with open(cfg.DATA_MODEL_PATH, "w") as f:
|
|
202
|
+
yaml.dump(model_data, f, default_flow_style=False, sort_keys=False)
|
|
203
|
+
f.flush()
|
|
204
|
+
os.fsync(f.fileno())
|
|
205
|
+
|
|
206
|
+
# Save layout file
|
|
207
|
+
print(f"Saving canvas layout to: {cfg.CANVAS_LAYOUT_PATH}")
|
|
208
|
+
os.makedirs(os.path.dirname(cfg.CANVAS_LAYOUT_PATH), exist_ok=True)
|
|
209
|
+
with open(cfg.CANVAS_LAYOUT_PATH, "w") as f:
|
|
210
|
+
yaml.dump(layout_data, f, default_flow_style=False, sort_keys=False)
|
|
211
|
+
f.flush()
|
|
212
|
+
os.fsync(f.fileno())
|
|
213
|
+
|
|
214
|
+
return {"status": "success"}
|
|
215
|
+
except Exception as e:
|
|
216
|
+
import traceback
|
|
217
|
+
|
|
218
|
+
traceback.print_exc()
|
|
219
|
+
raise HTTPException(
|
|
220
|
+
status_code=500, detail=f"Error saving data model: {str(e)}"
|
|
221
|
+
)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""Routes for manifest and catalog operations."""
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, HTTPException
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from trellis_datamodel import config as cfg
|
|
7
|
+
from trellis_datamodel.config import (
|
|
8
|
+
find_config_file,
|
|
9
|
+
)
|
|
10
|
+
from trellis_datamodel.adapters import get_adapter
|
|
11
|
+
|
|
12
|
+
router = APIRouter(prefix="/api", tags=["manifest"])
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _resolve_config_path() -> str | None:
|
|
16
|
+
"""Resolve config file path, preferring CONFIG_PATH from startup, falling back to search."""
|
|
17
|
+
if cfg.CONFIG_PATH and os.path.exists(cfg.CONFIG_PATH):
|
|
18
|
+
return cfg.CONFIG_PATH
|
|
19
|
+
return find_config_file()
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@router.get("/config-status")
|
|
23
|
+
async def get_config_status():
|
|
24
|
+
"""Return configuration status for the frontend."""
|
|
25
|
+
found_config = _resolve_config_path()
|
|
26
|
+
config_present = found_config is not None
|
|
27
|
+
|
|
28
|
+
# Determine expected config filename for display
|
|
29
|
+
if config_present:
|
|
30
|
+
config_filename = os.path.basename(found_config)
|
|
31
|
+
else:
|
|
32
|
+
# Default to trellis.yml (primary config file name)
|
|
33
|
+
config_filename = "trellis.yml"
|
|
34
|
+
|
|
35
|
+
manifest_exists = os.path.exists(cfg.MANIFEST_PATH) if cfg.MANIFEST_PATH else False
|
|
36
|
+
catalog_exists = os.path.exists(cfg.CATALOG_PATH) if cfg.CATALOG_PATH else False
|
|
37
|
+
data_model_exists = (
|
|
38
|
+
os.path.exists(cfg.DATA_MODEL_PATH) if cfg.DATA_MODEL_PATH else False
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
error = None
|
|
42
|
+
if not config_present:
|
|
43
|
+
error = "Config file not found."
|
|
44
|
+
elif not cfg.DBT_PROJECT_PATH:
|
|
45
|
+
error = "dbt_project_path not set in config."
|
|
46
|
+
elif not manifest_exists:
|
|
47
|
+
error = f"Manifest not found at {cfg.MANIFEST_PATH}"
|
|
48
|
+
|
|
49
|
+
return {
|
|
50
|
+
"config_present": config_present,
|
|
51
|
+
"config_filename": config_filename,
|
|
52
|
+
"framework": cfg.FRAMEWORK,
|
|
53
|
+
"dbt_project_path": cfg.DBT_PROJECT_PATH,
|
|
54
|
+
"manifest_path": cfg.MANIFEST_PATH,
|
|
55
|
+
"catalog_path": cfg.CATALOG_PATH,
|
|
56
|
+
"manifest_exists": manifest_exists,
|
|
57
|
+
"catalog_exists": catalog_exists,
|
|
58
|
+
"data_model_exists": data_model_exists,
|
|
59
|
+
"error": error,
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@router.get("/config-info")
|
|
64
|
+
async def get_config_info():
|
|
65
|
+
"""
|
|
66
|
+
Return resolved config paths and their existence for transparency/debugging.
|
|
67
|
+
"""
|
|
68
|
+
config_path = _resolve_config_path()
|
|
69
|
+
|
|
70
|
+
adapter = get_adapter()
|
|
71
|
+
try:
|
|
72
|
+
model_dirs = adapter.get_model_dirs() # type: ignore[attr-defined]
|
|
73
|
+
except Exception:
|
|
74
|
+
model_dirs = []
|
|
75
|
+
|
|
76
|
+
return {
|
|
77
|
+
"config_path": config_path,
|
|
78
|
+
"framework": cfg.FRAMEWORK,
|
|
79
|
+
"dbt_project_path": cfg.DBT_PROJECT_PATH,
|
|
80
|
+
"manifest_path": cfg.MANIFEST_PATH,
|
|
81
|
+
"manifest_exists": bool(
|
|
82
|
+
cfg.MANIFEST_PATH and os.path.exists(cfg.MANIFEST_PATH)
|
|
83
|
+
),
|
|
84
|
+
"catalog_path": cfg.CATALOG_PATH,
|
|
85
|
+
"catalog_exists": bool(cfg.CATALOG_PATH and os.path.exists(cfg.CATALOG_PATH)),
|
|
86
|
+
"data_model_path": cfg.DATA_MODEL_PATH,
|
|
87
|
+
"data_model_exists": bool(
|
|
88
|
+
cfg.DATA_MODEL_PATH and os.path.exists(cfg.DATA_MODEL_PATH)
|
|
89
|
+
),
|
|
90
|
+
"canvas_layout_path": cfg.CANVAS_LAYOUT_PATH,
|
|
91
|
+
"canvas_layout_exists": bool(
|
|
92
|
+
cfg.CANVAS_LAYOUT_PATH and os.path.exists(cfg.CANVAS_LAYOUT_PATH)
|
|
93
|
+
),
|
|
94
|
+
"frontend_build_dir": cfg.FRONTEND_BUILD_DIR,
|
|
95
|
+
"model_paths_configured": cfg.DBT_MODEL_PATHS,
|
|
96
|
+
"model_paths_resolved": model_dirs,
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@router.get("/manifest")
|
|
101
|
+
async def get_manifest():
|
|
102
|
+
"""Return parsed models from the transformation framework."""
|
|
103
|
+
try:
|
|
104
|
+
adapter = get_adapter()
|
|
105
|
+
models = adapter.get_models()
|
|
106
|
+
return {"models": models}
|
|
107
|
+
except FileNotFoundError as e:
|
|
108
|
+
raise HTTPException(status_code=404, detail=str(e))
|
|
109
|
+
except Exception as e:
|
|
110
|
+
raise HTTPException(status_code=500, detail=f"Error reading manifest: {str(e)}")
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
"""Routes for schema sync operations."""
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, HTTPException
|
|
4
|
+
import yaml
|
|
5
|
+
import os
|
|
6
|
+
|
|
7
|
+
from trellis_datamodel import config as cfg
|
|
8
|
+
from trellis_datamodel.models.schemas import DbtSchemaRequest, ModelSchemaRequest
|
|
9
|
+
from trellis_datamodel.adapters import get_adapter
|
|
10
|
+
|
|
11
|
+
router = APIRouter(prefix="/api", tags=["schema"])
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@router.post("/dbt-schema")
|
|
15
|
+
async def save_dbt_schema(request: DbtSchemaRequest):
|
|
16
|
+
"""Generate and save a schema YAML file for the drafted fields."""
|
|
17
|
+
try:
|
|
18
|
+
if not cfg.DBT_PROJECT_PATH:
|
|
19
|
+
raise HTTPException(
|
|
20
|
+
status_code=400,
|
|
21
|
+
detail="dbt_project_path is not configured. Please set it in config.yml",
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
adapter = get_adapter()
|
|
25
|
+
output_path = adapter.save_dbt_schema(
|
|
26
|
+
entity_id=request.entity_id,
|
|
27
|
+
model_name=request.model_name,
|
|
28
|
+
fields=request.fields,
|
|
29
|
+
description=request.description,
|
|
30
|
+
tags=request.tags,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
return {
|
|
34
|
+
"status": "success",
|
|
35
|
+
"file_path": str(output_path),
|
|
36
|
+
"message": f"Schema saved to {output_path}",
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
except HTTPException:
|
|
40
|
+
raise
|
|
41
|
+
except Exception as e:
|
|
42
|
+
raise HTTPException(status_code=500, detail=f"Error saving schema: {str(e)}")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@router.post("/sync-dbt-tests")
|
|
46
|
+
async def sync_dbt_tests():
|
|
47
|
+
"""Sync relationship tests from data model to schema files."""
|
|
48
|
+
try:
|
|
49
|
+
if not cfg.DBT_PROJECT_PATH:
|
|
50
|
+
raise HTTPException(
|
|
51
|
+
status_code=400,
|
|
52
|
+
detail="dbt_project_path is not configured. Please set it in config.yml",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if not cfg.DATA_MODEL_PATH or not os.path.exists(cfg.DATA_MODEL_PATH):
|
|
56
|
+
raise HTTPException(status_code=404, detail="Data model file not found")
|
|
57
|
+
|
|
58
|
+
with open(cfg.DATA_MODEL_PATH, "r") as f:
|
|
59
|
+
data_model = yaml.safe_load(f) or {}
|
|
60
|
+
|
|
61
|
+
entities = data_model.get("entities", [])
|
|
62
|
+
relationships = data_model.get("relationships", [])
|
|
63
|
+
|
|
64
|
+
adapter = get_adapter()
|
|
65
|
+
updated_files = adapter.sync_relationships(entities, relationships)
|
|
66
|
+
|
|
67
|
+
return {
|
|
68
|
+
"status": "success",
|
|
69
|
+
"message": f"Updated {len(updated_files)} file(s)",
|
|
70
|
+
"files": [str(f) for f in updated_files],
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
except HTTPException:
|
|
74
|
+
raise
|
|
75
|
+
except Exception as e:
|
|
76
|
+
import traceback
|
|
77
|
+
|
|
78
|
+
traceback.print_exc()
|
|
79
|
+
raise HTTPException(status_code=500, detail=f"Error syncing tests: {str(e)}")
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@router.get("/models/{model_name}/schema")
|
|
83
|
+
async def get_model_schema(model_name: str, version: int | None = None):
|
|
84
|
+
"""Get the schema for a specific model from its YAML file."""
|
|
85
|
+
try:
|
|
86
|
+
if not cfg.DBT_PROJECT_PATH:
|
|
87
|
+
raise HTTPException(
|
|
88
|
+
status_code=400,
|
|
89
|
+
detail="dbt_project_path is not configured. Please set it in config.yml",
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
adapter = get_adapter()
|
|
93
|
+
schema = adapter.get_model_schema(model_name, version=version)
|
|
94
|
+
|
|
95
|
+
return {
|
|
96
|
+
"model_name": schema.get("model_name", model_name),
|
|
97
|
+
"description": schema.get("description", ""),
|
|
98
|
+
"columns": schema.get("columns", []),
|
|
99
|
+
"tags": schema.get("tags", []),
|
|
100
|
+
"file_path": schema.get("file_path", ""),
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
except FileNotFoundError as e:
|
|
104
|
+
raise HTTPException(status_code=404, detail=str(e))
|
|
105
|
+
except ValueError as e:
|
|
106
|
+
raise HTTPException(status_code=404, detail=str(e))
|
|
107
|
+
except HTTPException:
|
|
108
|
+
raise
|
|
109
|
+
except Exception as e:
|
|
110
|
+
import traceback
|
|
111
|
+
|
|
112
|
+
traceback.print_exc()
|
|
113
|
+
raise HTTPException(
|
|
114
|
+
status_code=500, detail=f"Error reading model schema: {str(e)}"
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@router.post("/models/{model_name}/schema")
|
|
119
|
+
async def update_model_schema(model_name: str, request: ModelSchemaRequest):
|
|
120
|
+
"""Update the schema for a specific model in its YAML file."""
|
|
121
|
+
try:
|
|
122
|
+
if not cfg.DBT_PROJECT_PATH:
|
|
123
|
+
raise HTTPException(
|
|
124
|
+
status_code=400,
|
|
125
|
+
detail="dbt_project_path is not configured. Please set it in config.yml",
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
adapter = get_adapter()
|
|
129
|
+
output_path = adapter.save_model_schema(
|
|
130
|
+
model_name=model_name,
|
|
131
|
+
columns=request.columns,
|
|
132
|
+
description=request.description,
|
|
133
|
+
tags=request.tags,
|
|
134
|
+
version=request.version,
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
"status": "success",
|
|
139
|
+
"message": f"Schema updated for model '{model_name}'",
|
|
140
|
+
"file_path": str(output_path),
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
except FileNotFoundError as e:
|
|
144
|
+
raise HTTPException(status_code=404, detail=str(e))
|
|
145
|
+
except ValueError as e:
|
|
146
|
+
raise HTTPException(status_code=404, detail=str(e))
|
|
147
|
+
except HTTPException:
|
|
148
|
+
raise
|
|
149
|
+
except Exception as e:
|
|
150
|
+
import traceback
|
|
151
|
+
|
|
152
|
+
traceback.print_exc()
|
|
153
|
+
raise HTTPException(
|
|
154
|
+
status_code=500, detail=f"Error updating model schema: {str(e)}"
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@router.get("/infer-relationships")
|
|
159
|
+
async def infer_relationships(include_unbound: bool = False):
|
|
160
|
+
"""Scan schema files and infer entity relationships from relationship tests."""
|
|
161
|
+
try:
|
|
162
|
+
if not cfg.DBT_PROJECT_PATH:
|
|
163
|
+
raise HTTPException(
|
|
164
|
+
status_code=400,
|
|
165
|
+
detail="dbt_project_path is not configured. Please set it in config.yml",
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
adapter = get_adapter()
|
|
169
|
+
relationships = adapter.infer_relationships(include_unbound=include_unbound)
|
|
170
|
+
|
|
171
|
+
return {"relationships": relationships}
|
|
172
|
+
|
|
173
|
+
except FileNotFoundError as e:
|
|
174
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
175
|
+
except HTTPException:
|
|
176
|
+
raise
|
|
177
|
+
except Exception as e:
|
|
178
|
+
import traceback
|
|
179
|
+
|
|
180
|
+
traceback.print_exc()
|
|
181
|
+
raise HTTPException(
|
|
182
|
+
status_code=500, detail=f"Error inferring relationships: {str(e)}"
|
|
183
|
+
)
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Trellis Data - FastAPI Server
|
|
3
|
+
|
|
4
|
+
This is the FastAPI application that serves the API and frontend.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from fastapi import FastAPI, Request, HTTPException
|
|
8
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
9
|
+
from fastapi.staticfiles import StaticFiles
|
|
10
|
+
from fastapi.responses import FileResponse
|
|
11
|
+
from starlette.exceptions import HTTPException as StarletteHTTPException
|
|
12
|
+
import os
|
|
13
|
+
from importlib.resources import files
|
|
14
|
+
|
|
15
|
+
from trellis_datamodel import config as cfg
|
|
16
|
+
from trellis_datamodel.config import print_config
|
|
17
|
+
from trellis_datamodel.routes import manifest_router, data_model_router, schema_router
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def create_app() -> FastAPI:
|
|
21
|
+
"""Create and configure the FastAPI application."""
|
|
22
|
+
app = FastAPI(title="Trellis Data", version="0.1.0")
|
|
23
|
+
|
|
24
|
+
# CORS for development
|
|
25
|
+
app.add_middleware(
|
|
26
|
+
CORSMiddleware,
|
|
27
|
+
allow_origins=["*"], # Allow all for local dev
|
|
28
|
+
allow_credentials=True,
|
|
29
|
+
allow_methods=["*"],
|
|
30
|
+
allow_headers=["*"],
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
# Health check endpoint
|
|
34
|
+
@app.get("/health")
|
|
35
|
+
def health_check():
|
|
36
|
+
return {"status": "ok"}
|
|
37
|
+
|
|
38
|
+
# Find static files directory
|
|
39
|
+
# Priority: 1) configured FRONTEND_BUILD_DIR, 2) package static dir (if it has index.html)
|
|
40
|
+
static_dir_path = None
|
|
41
|
+
|
|
42
|
+
# Check configured build dir first
|
|
43
|
+
if cfg.FRONTEND_BUILD_DIR and os.path.exists(
|
|
44
|
+
os.path.join(cfg.FRONTEND_BUILD_DIR, "index.html")
|
|
45
|
+
):
|
|
46
|
+
static_dir_path = cfg.FRONTEND_BUILD_DIR
|
|
47
|
+
else:
|
|
48
|
+
# Fall back to package static dir if it has index.html
|
|
49
|
+
try:
|
|
50
|
+
static_dir = files("trellis_datamodel") / "static"
|
|
51
|
+
if static_dir.is_dir():
|
|
52
|
+
pkg_index = str(static_dir / "index.html")
|
|
53
|
+
if os.path.exists(pkg_index):
|
|
54
|
+
static_dir_path = str(static_dir)
|
|
55
|
+
except Exception:
|
|
56
|
+
pass
|
|
57
|
+
|
|
58
|
+
print(f"Serving frontend from: {static_dir_path}")
|
|
59
|
+
|
|
60
|
+
# Include API routers - these MUST be registered before mounting static files
|
|
61
|
+
app.include_router(manifest_router)
|
|
62
|
+
app.include_router(data_model_router)
|
|
63
|
+
app.include_router(schema_router)
|
|
64
|
+
|
|
65
|
+
# Mount static files AFTER API routes
|
|
66
|
+
# Important: app.mount() creates a sub-application, so we mount AFTER registering API routes
|
|
67
|
+
# However, mounted apps at "/" will intercept everything, so we need a different approach
|
|
68
|
+
if static_dir_path:
|
|
69
|
+
# Serve static assets at /assets
|
|
70
|
+
assets_path = os.path.join(static_dir_path, "assets")
|
|
71
|
+
if os.path.exists(assets_path):
|
|
72
|
+
app.mount("/assets", StaticFiles(directory=assets_path), name="assets")
|
|
73
|
+
|
|
74
|
+
# Serve other static files (like favicon, etc.) at /_app
|
|
75
|
+
# SvelteKit builds put immutable assets in /_app
|
|
76
|
+
app_path = os.path.join(static_dir_path, "_app")
|
|
77
|
+
if os.path.exists(app_path):
|
|
78
|
+
app.mount("/_app", StaticFiles(directory=app_path), name="app")
|
|
79
|
+
|
|
80
|
+
# Catch-all route for SPA - must be defined LAST
|
|
81
|
+
# FastAPI matches more specific routes first, so /api/* routes will match before this
|
|
82
|
+
@app.get("/{full_path:path}", include_in_schema=False)
|
|
83
|
+
async def serve_spa(request: Request, full_path: str):
|
|
84
|
+
"""Serve SPA index.html for non-API routes."""
|
|
85
|
+
# Serve index.html for all routes (API routes are already matched above)
|
|
86
|
+
index_file = os.path.join(static_dir_path, "index.html")
|
|
87
|
+
if os.path.exists(index_file):
|
|
88
|
+
return FileResponse(index_file)
|
|
89
|
+
raise HTTPException(status_code=404, detail="Not found")
|
|
90
|
+
|
|
91
|
+
else:
|
|
92
|
+
print(
|
|
93
|
+
f"Warning: Frontend build not found. "
|
|
94
|
+
f"Bundled static files missing and {cfg.FRONTEND_BUILD_DIR} does not exist. "
|
|
95
|
+
f"Run 'npm run build' in frontend/ or install the package properly."
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
return app
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
app = create_app()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export const env={}
|