trellis-datamodel 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. trellis_datamodel/__init__.py +8 -0
  2. trellis_datamodel/adapters/__init__.py +41 -0
  3. trellis_datamodel/adapters/base.py +147 -0
  4. trellis_datamodel/adapters/dbt_core.py +975 -0
  5. trellis_datamodel/cli.py +292 -0
  6. trellis_datamodel/config.py +239 -0
  7. trellis_datamodel/models/__init__.py +13 -0
  8. trellis_datamodel/models/schemas.py +28 -0
  9. trellis_datamodel/routes/__init__.py +11 -0
  10. trellis_datamodel/routes/data_model.py +221 -0
  11. trellis_datamodel/routes/manifest.py +110 -0
  12. trellis_datamodel/routes/schema.py +183 -0
  13. trellis_datamodel/server.py +101 -0
  14. trellis_datamodel/static/_app/env.js +1 -0
  15. trellis_datamodel/static/_app/immutable/assets/0.ByDwyx3a.css +1 -0
  16. trellis_datamodel/static/_app/immutable/assets/2.DLAp_5AW.css +1 -0
  17. trellis_datamodel/static/_app/immutable/assets/trellis_squared.CTOnsdDx.svg +127 -0
  18. trellis_datamodel/static/_app/immutable/chunks/8ZaN1sxc.js +1 -0
  19. trellis_datamodel/static/_app/immutable/chunks/BfBfOTnK.js +1 -0
  20. trellis_datamodel/static/_app/immutable/chunks/C3yhlRfZ.js +2 -0
  21. trellis_datamodel/static/_app/immutable/chunks/CK3bXPEX.js +1 -0
  22. trellis_datamodel/static/_app/immutable/chunks/CXDUumOQ.js +1 -0
  23. trellis_datamodel/static/_app/immutable/chunks/DDNfEvut.js +1 -0
  24. trellis_datamodel/static/_app/immutable/chunks/DUdVct7e.js +1 -0
  25. trellis_datamodel/static/_app/immutable/chunks/QRltG_J6.js +2 -0
  26. trellis_datamodel/static/_app/immutable/chunks/zXDdy2c_.js +1 -0
  27. trellis_datamodel/static/_app/immutable/entry/app.abCkWeAJ.js +2 -0
  28. trellis_datamodel/static/_app/immutable/entry/start.B7CjH6Z7.js +1 -0
  29. trellis_datamodel/static/_app/immutable/nodes/0.bFI_DI3G.js +1 -0
  30. trellis_datamodel/static/_app/immutable/nodes/1.J_r941Qf.js +1 -0
  31. trellis_datamodel/static/_app/immutable/nodes/2.WqbMkq6o.js +27 -0
  32. trellis_datamodel/static/_app/version.json +1 -0
  33. trellis_datamodel/static/index.html +40 -0
  34. trellis_datamodel/static/robots.txt +3 -0
  35. trellis_datamodel/static/trellis_squared.svg +127 -0
  36. trellis_datamodel/tests/__init__.py +2 -0
  37. trellis_datamodel/tests/conftest.py +132 -0
  38. trellis_datamodel/tests/test_cli.py +526 -0
  39. trellis_datamodel/tests/test_data_model.py +151 -0
  40. trellis_datamodel/tests/test_dbt_schema.py +892 -0
  41. trellis_datamodel/tests/test_manifest.py +72 -0
  42. trellis_datamodel/tests/test_server_static.py +44 -0
  43. trellis_datamodel/tests/test_yaml_handler.py +228 -0
  44. trellis_datamodel/utils/__init__.py +2 -0
  45. trellis_datamodel/utils/yaml_handler.py +365 -0
  46. trellis_datamodel-0.3.3.dist-info/METADATA +333 -0
  47. trellis_datamodel-0.3.3.dist-info/RECORD +52 -0
  48. trellis_datamodel-0.3.3.dist-info/WHEEL +5 -0
  49. trellis_datamodel-0.3.3.dist-info/entry_points.txt +2 -0
  50. trellis_datamodel-0.3.3.dist-info/licenses/LICENSE +661 -0
  51. trellis_datamodel-0.3.3.dist-info/licenses/NOTICE +6 -0
  52. trellis_datamodel-0.3.3.dist-info/top_level.txt +1 -0
@@ -0,0 +1,365 @@
1
+ """
2
+ YAML Handler for round-trip editing of DBT schema files.
3
+ Preserves comments, formatting, and structure while allowing updates.
4
+ """
5
+
6
+ import os
7
+ from typing import Dict, List, Optional, Any
8
+ from ruamel.yaml import YAML
9
+ from ruamel.yaml.comments import CommentedMap, CommentedSeq
10
+
11
+
12
+ class YamlHandler:
13
+ """Handler for reading and writing DBT schema YAML files with round-trip capabilities."""
14
+
15
+ def __init__(self):
16
+ self.yaml = YAML()
17
+ self.yaml.preserve_quotes = True
18
+ self.yaml.default_flow_style = False
19
+ self.yaml.width = 4096 # Prevent unwanted line wrapping
20
+ # Match the two-space sequence indentation seen in existing dbt yml files
21
+ self.yaml.indent(mapping=2, sequence=2, offset=0)
22
+ # Disallow indentless sequences so list items are indented under their parent keys
23
+ self.yaml.indentless_sequences = False
24
+
25
+ def load_file(self, file_path: str) -> Optional[Dict]:
26
+ """
27
+ Load a YAML file with round-trip capabilities.
28
+
29
+ Args:
30
+ file_path: Path to the YAML file
31
+
32
+ Returns:
33
+ Parsed YAML content or None if file doesn't exist
34
+ """
35
+ if not os.path.exists(file_path):
36
+ return None
37
+
38
+ try:
39
+ with open(file_path, "r") as f:
40
+ return self.yaml.load(f)
41
+ except Exception as e:
42
+ print(f"Error loading YAML file {file_path}: {e}")
43
+ return None
44
+
45
+ def save_file(self, file_path: str, data: Dict) -> None:
46
+ """
47
+ Save YAML data to a file with round-trip preservation.
48
+
49
+ Args:
50
+ file_path: Path to the YAML file
51
+ data: YAML data to save
52
+ """
53
+ # Ensure directory exists
54
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
55
+
56
+ # Write atomically using a temp file
57
+ temp_path = f"{file_path}.tmp"
58
+ try:
59
+ with open(temp_path, "w") as f:
60
+ self.yaml.dump(data, f)
61
+ os.replace(temp_path, file_path)
62
+ except Exception as e:
63
+ if os.path.exists(temp_path):
64
+ os.remove(temp_path)
65
+ raise e
66
+
67
+ def find_model(self, data: Dict, model_name: str) -> Optional[CommentedMap]:
68
+ """
69
+ Find a specific model entry in the YAML data.
70
+
71
+ Args:
72
+ data: Parsed YAML data
73
+ model_name: Name of the model to find
74
+
75
+ Returns:
76
+ Model entry or None if not found
77
+ """
78
+ if not data or "models" not in data:
79
+ return None
80
+
81
+ models = data.get("models", [])
82
+ for model in models:
83
+ if model.get("name") == model_name:
84
+ return model
85
+
86
+ return None
87
+
88
+ def ensure_model(self, data: Dict, model_name: str) -> CommentedMap:
89
+ """
90
+ Ensure a model entry exists in the YAML data, creating it if necessary.
91
+ Preserve existing tags if present.
92
+
93
+ Args:
94
+ data: Parsed YAML data
95
+ model_name: Name of the model
96
+
97
+ Returns:
98
+ Model entry (existing or newly created)
99
+ """
100
+ if "version" not in data:
101
+ data["version"] = 2
102
+
103
+ if "models" not in data:
104
+ data["models"] = CommentedSeq()
105
+
106
+ model = self.find_model(data, model_name)
107
+ if not model:
108
+ model = CommentedMap()
109
+ model["name"] = model_name
110
+ data["models"].append(model)
111
+ # Don't auto-create tags - let update_model_tags handle it when needed
112
+ return model
113
+
114
+ def set_latest_version(self, model: CommentedMap, version: int) -> None:
115
+ """Set or bump latest_version for a versioned model."""
116
+ existing = model.get("latest_version")
117
+ if existing is None or (isinstance(existing, int) and version > existing):
118
+ model["latest_version"] = version
119
+
120
+ def ensure_model_version(
121
+ self, model: CommentedMap, version: int
122
+ ) -> CommentedMap:
123
+ """
124
+ Ensure a version entry exists within a versioned model.
125
+
126
+ Args:
127
+ model: Model entry
128
+ version: Version number (e.g., 2)
129
+
130
+ Returns:
131
+ Version entry (existing or newly created)
132
+ """
133
+ if "versions" not in model or model.get("versions") is None:
134
+ model["versions"] = CommentedSeq()
135
+
136
+ for ver in model["versions"]:
137
+ if ver.get("v") == version:
138
+ return ver
139
+
140
+ ver_entry = CommentedMap()
141
+ ver_entry["v"] = version
142
+ model["versions"].append(ver_entry)
143
+ return ver_entry
144
+
145
+ def update_model_description(
146
+ self, model: CommentedMap, description: Optional[str]
147
+ ) -> None:
148
+ """
149
+ Update the description of a model.
150
+
151
+ Args:
152
+ model: Model entry
153
+ description: New description (or None to skip)
154
+ """
155
+ if description:
156
+ model["description"] = description
157
+
158
+ def get_model_tags(self, model: CommentedMap) -> List[str]:
159
+ """Return the list of tags for a model, combining top-level and config tags."""
160
+ top_level_tags = list(model.get("tags", []))
161
+ config = model.get("config", {})
162
+ config_tags = list(config.get("tags", [])) if config else []
163
+ # Combine and deduplicate, preserving order
164
+ seen = set()
165
+ combined = []
166
+ for tag in top_level_tags + config_tags:
167
+ if tag not in seen:
168
+ seen.add(tag)
169
+ combined.append(tag)
170
+ return combined
171
+
172
+ def update_version_tags(self, version: CommentedMap, tags: List[str]) -> None:
173
+ """
174
+ Replace the tags list for a model version using config.tags (dbt convention).
175
+ """
176
+ config = version.get("config")
177
+ if config is None:
178
+ version["config"] = CommentedMap()
179
+ config = version["config"]
180
+ config["tags"] = tags
181
+
182
+ def update_model_tags(self, model: CommentedMap, tags: List[str]) -> None:
183
+ """Replace the tags list for a model, preserving the original location.
184
+
185
+ Priority: 1) existing config.tags, 2) existing top-level tags, 3) config.tags (default)
186
+ Ensures tags are only in one location to avoid confusion.
187
+ """
188
+ config = model.get("config")
189
+ has_config_tags = config is not None and "tags" in config
190
+ has_top_level_tags = "tags" in model
191
+
192
+ if has_config_tags:
193
+ # Update in config block (original location)
194
+ config["tags"] = tags
195
+ # Remove top-level tags if present to avoid duplication
196
+ if has_top_level_tags:
197
+ del model["tags"]
198
+ elif has_top_level_tags:
199
+ # Update existing top-level tags
200
+ model["tags"] = tags
201
+ else:
202
+ # Default: use config.tags (dbt convention)
203
+ if config is None:
204
+ model["config"] = CommentedMap()
205
+ config = model["config"]
206
+ config["tags"] = tags
207
+
208
+ def find_column(
209
+ self, model: CommentedMap, column_name: str
210
+ ) -> Optional[CommentedMap]:
211
+ """
212
+ Find a specific column entry in the model.
213
+
214
+ Args:
215
+ model: Model entry
216
+ column_name: Name of the column to find
217
+
218
+ Returns:
219
+ Column entry or None if not found
220
+ """
221
+ columns = model.get("columns", [])
222
+ for col in columns:
223
+ if col.get("name") == column_name:
224
+ return col
225
+ return None
226
+
227
+ def ensure_column(self, model: CommentedMap, column_name: str) -> CommentedMap:
228
+ """
229
+ Ensure a column entry exists in the model, creating it if necessary.
230
+
231
+ Args:
232
+ model: Model entry
233
+ column_name: Name of the column
234
+
235
+ Returns:
236
+ Column entry (existing or newly created)
237
+ """
238
+ if "columns" not in model:
239
+ model["columns"] = CommentedSeq()
240
+
241
+ col = self.find_column(model, column_name)
242
+ if not col:
243
+ col = CommentedMap()
244
+ col["name"] = column_name
245
+ model["columns"].append(col)
246
+
247
+ return col
248
+
249
+ def update_column(
250
+ self,
251
+ column: CommentedMap,
252
+ data_type: Optional[str] = None,
253
+ description: Optional[str] = None,
254
+ ) -> None:
255
+ """
256
+ Update column properties.
257
+
258
+ Args:
259
+ column: Column entry
260
+ data_type: New data type (or None to skip)
261
+ description: New description (or None to skip)
262
+ """
263
+ if data_type:
264
+ column["data_type"] = data_type
265
+ if description:
266
+ column["description"] = description
267
+
268
+ def add_relationship_test(
269
+ self,
270
+ column: CommentedMap,
271
+ target_model: str,
272
+ target_field: str,
273
+ ) -> None:
274
+ """
275
+ Add or update a relationship test for a column.
276
+
277
+ Args:
278
+ column: Column entry
279
+ target_model: Target model name (will be wrapped in ref())
280
+ target_field: Target field name
281
+ """
282
+ existing_tests = CommentedSeq()
283
+
284
+ # Collect non-relationship tests from both tests and data_tests keys
285
+ for key in ("data_tests", "tests"):
286
+ if key in column:
287
+ for test in column.get(key, []):
288
+ if isinstance(test, dict) and "relationships" in test:
289
+ continue
290
+ existing_tests.append(test)
291
+
292
+ # Build new relationships test using recommended arguments block
293
+ rel_test = CommentedMap()
294
+ rel_test["relationships"] = CommentedMap()
295
+ rel_test["relationships"]["arguments"] = CommentedMap()
296
+ rel_test["relationships"]["arguments"]["to"] = f"ref('{target_model}')"
297
+ rel_test["relationships"]["arguments"]["field"] = target_field
298
+
299
+ existing_tests.append(rel_test)
300
+
301
+ column["data_tests"] = existing_tests
302
+ # Drop tests key if present to avoid confusion
303
+ if "tests" in column:
304
+ del column["tests"]
305
+
306
+ def update_columns_batch(
307
+ self,
308
+ model: CommentedMap,
309
+ columns_data: List[Dict[str, Any]],
310
+ ) -> None:
311
+ """
312
+ Update multiple columns at once.
313
+
314
+ Args:
315
+ model: Model entry
316
+ columns_data: List of column dicts with name, data_type, description
317
+ """
318
+ for col_data in columns_data:
319
+ col_name = col_data.get("name")
320
+ if not col_name:
321
+ continue
322
+
323
+ col = self.ensure_column(model, col_name)
324
+ self.update_column(
325
+ col,
326
+ data_type=col_data.get("data_type"),
327
+ description=col_data.get("description"),
328
+ )
329
+
330
+ def get_columns(self, model: CommentedMap) -> List[Dict[str, Any]]:
331
+ """
332
+ Extract columns from a model as a list of dicts.
333
+
334
+ Args:
335
+ model: Model entry
336
+
337
+ Returns:
338
+ List of column dicts
339
+ """
340
+ columns = model.get("columns", [])
341
+ result = []
342
+
343
+ for col in columns:
344
+ col_dict = {
345
+ "name": col.get("name"),
346
+ "data_type": col.get("data_type"),
347
+ "description": col.get("description"),
348
+ }
349
+
350
+ # Extract tests (supports both dbt's tests and data_tests keys)
351
+ collected_tests: list[dict[str, Any]] = []
352
+ for key in ("data_tests", "tests"):
353
+ if key not in col:
354
+ continue
355
+ for test in col.get(key, []):
356
+ if isinstance(test, dict):
357
+ collected_tests.append(dict(test))
358
+
359
+ if collected_tests:
360
+ col_dict["data_tests"] = collected_tests
361
+
362
+ result.append(col_dict)
363
+
364
+ return result
365
+
@@ -0,0 +1,333 @@
1
+ Metadata-Version: 2.4
2
+ Name: trellis-datamodel
3
+ Version: 0.3.3
4
+ Summary: Visual data model editor for dbt projects
5
+ Author: Tim Hiebenthal
6
+ Project-URL: Homepage, https://github.com/timhiebenthal/trellis-datamodel
7
+ Project-URL: Repository, https://github.com/timhiebenthal/trellis-datamodel
8
+ Project-URL: Issues, https://github.com/timhiebenthal/trellis-datamodel/issues
9
+ Keywords: dbt,data-modeling,erd,data-engineering,analytics-engineering,visualization,schema
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Topic :: Database
16
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
17
+ Requires-Python: >=3.11
18
+ Description-Content-Type: text/markdown
19
+ License-File: LICENSE
20
+ License-File: NOTICE
21
+ Requires-Dist: dbt-core<2.0,>=1.10.5
22
+ Requires-Dist: dbt-duckdb>=1.10.0
23
+ Requires-Dist: fastapi>=0.121.3
24
+ Requires-Dist: python-dotenv>=1.2.1
25
+ Requires-Dist: pyyaml>=6.0.3
26
+ Requires-Dist: ruamel.yaml>=0.18.0
27
+ Requires-Dist: typer>=0.9.0
28
+ Requires-Dist: uvicorn>=0.38.0
29
+ Provides-Extra: dev
30
+ Requires-Dist: pytest>=8.0.0; extra == "dev"
31
+ Requires-Dist: httpx>=0.27.0; extra == "dev"
32
+ Provides-Extra: dbt-example
33
+ Requires-Dist: dbt-duckdb==1.10; extra == "dbt-example"
34
+ Requires-Dist: duckdb>=1.4.2; extra == "dbt-example"
35
+ Requires-Dist: faker>=24.0.0; extra == "dbt-example"
36
+ Requires-Dist: marimo>=0.18.0; extra == "dbt-example"
37
+ Requires-Dist: nba-api>=1.11.3; extra == "dbt-example"
38
+ Requires-Dist: pandas>=2.3.3; extra == "dbt-example"
39
+ Requires-Dist: tqdm>=4.67.1; extra == "dbt-example"
40
+ Dynamic: license-file
41
+
42
+ # Trellis Data
43
+
44
+ ![Trellis Logo](resources/trellis_with_text.png)
45
+
46
+ A lightweight, local-first tool to bridge Conceptual Data Modeling, Logical Data Modeling and the Physical Implementation (currently with dbt-core).
47
+
48
+ ## Motivation
49
+
50
+ **Current workflow pains:**
51
+ - ERD diagrams live in separate tools (Lucidchart, draw.io) and quickly become stale or unreadable for large projects
52
+ - Data transformations are done isolated from the conceptual data model.
53
+ - No single view connecting business concepts to logical schema
54
+ - Stakeholders can't easily understand model structure without technical context
55
+ - Holistic Data Warehouse Automation Tools exists but do not integrate well with dbt and the Modern Data Stack
56
+
57
+ **How Trellis helps:**
58
+ - Visual data model that stays in sync — reads directly from `manifest.json` / `catalog.json`
59
+ - Sketch entities and with their fields and auto-generate schema.yml's for dbt
60
+ - Draw relationships on canvas → auto-generates dbt `relationships` tests
61
+ - Two views: **Conceptual** (entity names, descriptions) and **Logical** (columns, types, materializations) to jump between high-level architect and execution-view.
62
+ - Organize entities based on subdirectories and tags from your pyhsical implementation.
63
+ - Write description or tags back to your dbt-project
64
+
65
+ **Two Ways of getting started**
66
+ - Greenfield: draft entities and fields before writing SQL, then sync to dbt YAML
67
+ - Brownfield: document your existing data model by loading existing dbt models and utilize relationship tests to infer links
68
+
69
+ ## Tutorial
70
+
71
+ Check out our [Full Tutorial](https://app.capacities.io/home/667ad256-ca68-4dfd-8231-e77d83127dcf) with video clips showing the core features in action.
72
+
73
+ ## Vision
74
+
75
+ trellis is currently designed and tested specifically for **dbt-core**, but the vision is to be tool-agnostic. As the saying goes: *"tools evolve, concepts don't"* — data modeling concepts persist regardless of the transformation framework you use.
76
+
77
+ If this project gains traction, we might explore support for:
78
+ - **dbt-fusion** through adapter support
79
+ - **Pydantic models** as a simple output format
80
+ - Other frameworks like [SQLMesh](https://github.com/TobikoData/sqlmesh) or [Bruin](https://github.com/bruin-data/bruin) through adapter patterns, where compatibility allows
81
+
82
+ This remains a vision for now — the current focus is on making Trellis work well with dbt-core.
83
+
84
+ ## Prerequisites
85
+ - **Node.js 22+ (or 20.19+) & npm**
86
+ - Recommended: Use [nvm](https://github.com/nvm-sh/nvm) to install a compatible version (e.g., `nvm install 22`).
87
+ - Note: System packages (`apt-get`) may be too old for the frontend dependencies.
88
+ - A `.nvmrc` file is included; run `nvm use` to switch to the correct version automatically.
89
+ - **Python 3.11+ & [uv](https://github.com/astral-sh/uv)**
90
+ - Install uv via `curl -LsSf https://astral.sh/uv/install.sh | sh` and ensure it's on your `$PATH`.
91
+ - **Make** (optional) for convenience targets defined in the `Makefile`.
92
+
93
+ ## Installation
94
+
95
+ ### Install from PyPI
96
+
97
+ ```bash
98
+ pip install trellis-datamodel
99
+ # or with uv
100
+ uv pip install trellis-datamodel
101
+ ```
102
+
103
+ ### Install from Source (Development)
104
+
105
+ ```bash
106
+ # Clone the repository
107
+ git clone https://github.com/timhiebenthal/trellis-datamodel.git
108
+ cd trellis-datamodel
109
+
110
+ # Install in editable mode
111
+ pip install -e .
112
+ # or with uv
113
+ uv pip install -e .
114
+ ```
115
+
116
+ ## Quick Start
117
+
118
+ 1. **Navigate to your dbt project directory**
119
+ ```bash
120
+ cd /path/to/your/dbt-project
121
+ ```
122
+
123
+ 2. **Initialize configuration**
124
+ ```bash
125
+ trellis init
126
+ ```
127
+ This creates a `trellis.yml` file. Edit it to point to your dbt manifest and catalog locations.
128
+
129
+ 3. **Start the server**
130
+ ```bash
131
+ trellis run
132
+ ```
133
+
134
+ The server will start on **http://localhost:8089** and automatically open your browser.
135
+
136
+ ## Development Setup
137
+
138
+ For local development with hot reload:
139
+
140
+ ### Install Dependencies
141
+ Run these once per machine (or when dependencies change).
142
+
143
+ 1. **Backend**
144
+ ```bash
145
+ uv sync
146
+ ```
147
+ 2. **Frontend**
148
+ ```bash
149
+ cd frontend
150
+ npm install
151
+ ```
152
+
153
+ **Terminal 1 – Backend**
154
+ ```bash
155
+ make backend
156
+ # or
157
+ uv run trellis run
158
+ ```
159
+ Backend serves the API at http://localhost:8089.
160
+
161
+ **Terminal 2 – Frontend**
162
+ ```bash
163
+ make frontend
164
+ # or
165
+ cd frontend && npm run dev
166
+ ```
167
+ Frontend runs at http://localhost:5173 (for development with hot reload).
168
+
169
+ ## Building for Distribution
170
+
171
+ To build the package with bundled frontend:
172
+
173
+ ```bash
174
+ make build-package
175
+ ```
176
+
177
+ This will:
178
+ 1. Build the frontend (`npm run build`)
179
+ 2. Copy static files to `trellis_datamodel/static/`
180
+ 3. Build the Python wheel (`uv build`)
181
+
182
+ The wheel will be in `dist/` and can be installed with `pip install dist/trellis_datamodel-*.whl`.
183
+
184
+ ## CLI Options
185
+
186
+ ```bash
187
+ trellis run [OPTIONS]
188
+
189
+ Options:
190
+ --port, -p INTEGER Port to run the server on [default: 8089]
191
+ --config, -c TEXT Path to config file (trellis.yml or config.yml)
192
+ --no-browser Don't open browser automatically
193
+ --help Show help message
194
+ ```
195
+
196
+ ## dbt Metadata
197
+ - Generate `manifest.json` and `catalog.json` by running `dbt docs generate` in your dbt project.
198
+ - The UI reads these artifacts to power the ERD modeller.
199
+ - Without these artifacts, the UI loads but shows no dbt models.
200
+
201
+ ## Configuration
202
+
203
+ Run `trellis init` to create a starter `trellis.yml` file in your project.
204
+
205
+ Options:
206
+
207
+ - `framework`: Transformation framework to use. Currently supported: `dbt-core`. Future: `dbt-fusion`, `sqlmesh`, `bruin`, `pydantic`. Defaults to `dbt-core`.
208
+ - `dbt_project_path`: Path to your dbt project directory (relative to `config.yml` or absolute). **Required**.
209
+ - `dbt_manifest_path`: Path to `manifest.json` (relative to `dbt_project_path` or absolute). Defaults to `target/manifest.json`.
210
+ - `dbt_catalog_path`: Path to `catalog.json` (relative to `dbt_project_path` or absolute). Defaults to `target/catalog.json`.
211
+ - `data_model_file`: Path where the data model YAML will be saved (relative to `dbt_project_path` or absolute). Defaults to `data_model.yml`.
212
+ - `dbt_model_paths`: List of path patterns to filter which dbt models are shown (e.g., `["3_core"]`). If empty, all models are included.
213
+
214
+ **Example `trellis.yml`:**
215
+ ```yaml
216
+ framework: dbt-core
217
+ dbt_project_path: "./dbt_built"
218
+ dbt_manifest_path: "target/manifest.json"
219
+ dbt_catalog_path: "target/catalog.json"
220
+ data_model_file: "data_model.yml"
221
+ dbt_model_paths:
222
+ - "3_core"
223
+ ```
224
+
225
+
226
+ ## Testing
227
+
228
+ ### Frontend
229
+ **Testing Libraries:**
230
+ The following testing libraries are defined in `package.json` under `devDependencies` and are automatically installed when you run `npm install`:
231
+ - [Vitest](https://vitest.dev/) (Unit testing)
232
+ - [Playwright](https://playwright.dev/) (End-to-End testing)
233
+ - [Testing Library](https://testing-library.com/) (DOM & Svelte testing utilities)
234
+ - [jsdom](https://github.com/jsdom/jsdom) (DOM environment)
235
+
236
+ > **Playwright system dependencies (Ubuntu/WSL2)**
237
+ >
238
+ > The browsers downloaded by Playwright need a handful of native libraries. Install them before running `npm run test:e2e`:
239
+ >
240
+ > ```bash
241
+ > sudo apt-get update && sudo apt-get install -y \
242
+ > libxcursor1 libxdamage1 libgtk-3-0 libpangocairo-1.0-0 libpango-1.0-0 \
243
+ > libatk1.0-0 libcairo-gobject2 libcairo2 libgdk-pixbuf-2.0-0 libasound2 \
244
+ > libnspr4 libnss3 libgbm1 libgles2-mesa libgtk-4-1 libgraphene-1.0-0 \
245
+ > libxslt1.1 libwoff2dec0 libvpx7 libevent-2.1-7 libopus0 \
246
+ > libgstallocators-1.0-0 libgstapp-1.0-0 libgstpbutils-1.0-0 libgstaudio-1.0-0 \
247
+ > libgsttag-1.0-0 libgstvideo-1.0-0 libgstgl-1.0-0 libgstcodecparsers-1.0-0 \
248
+ > libgstfft-1.0-0 libflite1 libflite1-plugins libwebpdemux2 libavif13 \
249
+ > libharfbuzz-icu0 libwebpmux3 libenchant-2-2 libsecret-1-0 libhyphen0 \
250
+ > libwayland-server0 libmanette-0.2-0 libx264-163
251
+ > ```
252
+
253
+ **Running Tests:**
254
+
255
+ The test suite has multiple levels to catch different types of issues:
256
+
257
+ ```bash
258
+ cd frontend
259
+
260
+ # Quick smoke test (catches 500 errors, runtime crashes, ESM issues)
261
+ # Fastest way to verify the app loads without errors
262
+ npm run test:smoke
263
+
264
+ # TypeScript/compilation check
265
+ npm run check
266
+
267
+ # Unit tests
268
+ npm run test:unit
269
+
270
+ # E2E tests (includes smoke test + full test suite)
271
+ # Note: Requires backend running with test data (see Test Data Isolation below)
272
+ npm run test:e2e
273
+
274
+ # Run all tests (check + smoke + unit + e2e)
275
+ npm run test
276
+ ```
277
+
278
+ **Test Levels:**
279
+ 1. **`npm run check`** - TypeScript compilation errors
280
+ 2. **`npm run test:smoke`** - Runtime errors (500s, console errors, ESM issues) - **catches app crashes**
281
+ 3. **`npm run test:unit`** - Unit tests with Vitest
282
+ 4. **`npm run test:e2e`** - Full E2E tests with Playwright
283
+
284
+ **Using Makefile:**
285
+ ```bash
286
+ # From project root
287
+ make test-smoke # Quick smoke test
288
+ make test-check # TypeScript check
289
+ make test-unit # Unit tests
290
+ make test-e2e # E2E tests (auto-starts backend with test data)
291
+ make test-all # All tests
292
+ ```
293
+
294
+ **Test Data Isolation:**
295
+ E2E tests use a separate test data file (`frontend/tests/test_data_model.yml`) to avoid polluting your production data model. **Playwright automatically starts the backend** with the correct environment variable, so you don't need to manage it manually.
296
+
297
+ ```bash
298
+ # Just run E2E tests - backend starts automatically with test data
299
+ make test-e2e
300
+ # OR:
301
+ # cd frontend && npm run test:e2e
302
+ ```
303
+
304
+ The test data file is automatically cleaned before and after test runs via Playwright's `globalSetup` and `globalTeardown`. Your production `data_model.yml` remains untouched.
305
+
306
+ ### Backend
307
+ **Testing Libraries:**
308
+ The following testing libraries are defined in `pyproject.toml` under `[project.optional-dependencies]` in the `dev` group:
309
+ - [pytest](https://docs.pytest.org/) (Testing framework)
310
+ - [httpx](https://www.python-httpx.org/) (Async HTTP client for API testing)
311
+
312
+ **Installation:**
313
+ Unlike `npm`, `uv sync` does not install optional dependencies by default. To include the testing libraries, run:
314
+ ```bash
315
+ uv sync --extra dev
316
+ ```
317
+
318
+ **Running Tests:**
319
+ ```bash
320
+ uv run pytest
321
+ ```
322
+
323
+ ## Collaboration
324
+
325
+ If you want to collaborate, reach out!
326
+
327
+ ## Contributing and CLA
328
+ - Contributions are welcome! Please read [`CONTRIBUTING.md`](CONTRIBUTING.md) for workflow, testing, and PR guidelines.
329
+ - All contributors must sign the CLA once per GitHub account. The CLA bot on pull requests will guide you; see [`CLA.md`](CLA.md) for details.
330
+
331
+ ## License
332
+ - Trellis Datamodel is licensed under the [GNU Affero General Public License v3.0](LICENSE).
333
+ - See [`NOTICE`](NOTICE) for a summary of copyright and licensing information.