laketower 0.6.2__tar.gz → 0.6.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of laketower might be problematic. Click here for more details.
- {laketower-0.6.2 → laketower-0.6.4}/.github/workflows/ci-cd.yml +5 -5
- {laketower-0.6.2 → laketower-0.6.4}/.gitignore +4 -1
- {laketower-0.6.2 → laketower-0.6.4}/CHANGELOG.md +18 -1
- {laketower-0.6.2 → laketower-0.6.4}/PKG-INFO +4 -4
- {laketower-0.6.2 → laketower-0.6.4}/README.md +2 -2
- laketower-0.6.4/docs/static/queries_view.png +0 -0
- laketower-0.6.4/docs/static/tables_history.png +0 -0
- laketower-0.6.4/docs/static/tables_import.png +0 -0
- laketower-0.6.4/docs/static/tables_overview.png +0 -0
- laketower-0.6.4/docs/static/tables_query.png +0 -0
- laketower-0.6.4/docs/static/tables_statistics.png +0 -0
- laketower-0.6.4/docs/static/tables_view.png +0 -0
- laketower-0.6.4/laketower/__about__.py +1 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/cli.py +17 -18
- {laketower-0.6.2 → laketower-0.6.4}/laketower/tables.py +10 -6
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/_base.html +72 -8
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/queries/view.html +4 -4
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/query.html +4 -4
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/statistics.html +4 -4
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/view.html +4 -4
- {laketower-0.6.2 → laketower-0.6.4}/laketower/web.py +8 -3
- {laketower-0.6.2 → laketower-0.6.4}/pyproject.toml +5 -5
- {laketower-0.6.2 → laketower-0.6.4}/tasks.py +5 -1
- {laketower-0.6.2 → laketower-0.6.4}/tests/test_cli.py +8 -3
- {laketower-0.6.2 → laketower-0.6.4}/tests/test_web.py +59 -43
- {laketower-0.6.2 → laketower-0.6.4}/uv.lock +115 -119
- laketower-0.6.2/docs/static/queries_view.png +0 -0
- laketower-0.6.2/docs/static/tables_history.png +0 -0
- laketower-0.6.2/docs/static/tables_import.png +0 -0
- laketower-0.6.2/docs/static/tables_overview.png +0 -0
- laketower-0.6.2/docs/static/tables_query.png +0 -0
- laketower-0.6.2/docs/static/tables_statistics.png +0 -0
- laketower-0.6.2/docs/static/tables_view.png +0 -0
- laketower-0.6.2/laketower/__about__.py +0 -1
- {laketower-0.6.2 → laketower-0.6.4}/.python-version +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/LICENSE +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/generate.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/laketower.yml +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/_delta_log/00000000000000000000.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/_delta_log/00000000000000000001.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/_delta_log/00000000000000000002.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/_delta_log/00000000000000000003.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/part-00001-1a31a393-6db6-4d1a-bf4e-81ea061ff8cd-c000.snappy.parquet +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/part-00001-5af77102-9207-4c89-aaf6-37e1f815ec26-c000.snappy.parquet +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/sample_table/part-00001-b11bab55-43d0-4d05-ae88-5b9481ae57db-c000.snappy.parquet +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/weather/_delta_log/00000000000000000000.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/weather/_delta_log/00000000000000000001.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/weather/_delta_log/00000000000000000002.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/weather/part-00001-2323b963-be56-44e0-8c10-e237e7e6d4b9-c000.snappy.parquet +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/demo/weather/part-00001-6360cbf8-f8a9-475f-8729-6f20b4ca64a9-c000.snappy.parquet +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/__init__.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/__main__.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/config.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/.gitkeep +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/editor.bundle.js +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/editor.js +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/vendor/bootstrap/bootstrap.bundle.min.js +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/vendor/bootstrap-icons/bootstrap-icons.min.css +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/vendor/bootstrap-icons/fonts/bootstrap-icons.woff +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/vendor/bootstrap-icons/fonts/bootstrap-icons.woff2 +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/vendor/halfmoon/halfmoon.min.css +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/static/vendor/halfmoon/halfmoon.modern.css +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/index.html +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/_macros.html +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/history.html +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/import.html +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/laketower/templates/tables/index.html +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/package-lock.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/package.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/renovate.json +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/tests/__init__.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/tests/conftest.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/tests/test_config.py +0 -0
- {laketower-0.6.2 → laketower-0.6.4}/tests/test_tables.py +0 -0
|
@@ -18,7 +18,7 @@ jobs:
|
|
|
18
18
|
steps:
|
|
19
19
|
- uses: actions/checkout@v5
|
|
20
20
|
- name: Install uv
|
|
21
|
-
uses: astral-sh/setup-uv@
|
|
21
|
+
uses: astral-sh/setup-uv@v7
|
|
22
22
|
with:
|
|
23
23
|
python-version: ${{ matrix.python-version }}
|
|
24
24
|
enable-cache: true
|
|
@@ -52,7 +52,7 @@ jobs:
|
|
|
52
52
|
ref: main
|
|
53
53
|
- uses: actions/checkout@v5
|
|
54
54
|
- name: Install uv
|
|
55
|
-
uses: astral-sh/setup-uv@
|
|
55
|
+
uses: astral-sh/setup-uv@v7
|
|
56
56
|
with:
|
|
57
57
|
python-version: "3.13"
|
|
58
58
|
enable-cache: true
|
|
@@ -126,7 +126,7 @@ jobs:
|
|
|
126
126
|
steps:
|
|
127
127
|
- uses: actions/checkout@v5
|
|
128
128
|
- name: Install uv
|
|
129
|
-
uses: astral-sh/setup-uv@
|
|
129
|
+
uses: astral-sh/setup-uv@v7
|
|
130
130
|
with:
|
|
131
131
|
python-version: '3.13'
|
|
132
132
|
enable-cache: true
|
|
@@ -187,7 +187,7 @@ jobs:
|
|
|
187
187
|
- name: Publish package distributions to PyPI
|
|
188
188
|
uses: pypa/gh-action-pypi-publish@release/v1
|
|
189
189
|
- name: Install uv
|
|
190
|
-
uses: astral-sh/setup-uv@
|
|
190
|
+
uses: astral-sh/setup-uv@v7
|
|
191
191
|
- name: Validate package is available with uvx
|
|
192
192
|
run: uvx laketower --version
|
|
193
193
|
|
|
@@ -209,7 +209,7 @@ jobs:
|
|
|
209
209
|
name: build
|
|
210
210
|
path: dist/
|
|
211
211
|
- name: Install uv
|
|
212
|
-
uses: astral-sh/setup-uv@
|
|
212
|
+
uses: astral-sh/setup-uv@v7
|
|
213
213
|
with:
|
|
214
214
|
python-version: '3.13'
|
|
215
215
|
enable-cache: true
|
|
@@ -7,6 +7,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
|
7
7
|
|
|
8
8
|
## [Unreleased]
|
|
9
9
|
|
|
10
|
+
## [0.6.4] - 2025-10-20
|
|
11
|
+
### Fixed
|
|
12
|
+
- add missing `tzdata` dependency from previous `pandas` dependency removal
|
|
13
|
+
|
|
14
|
+
## [0.6.3] - 2025-10-19
|
|
15
|
+
Patch release removing unnecessary `pandas` dependency and updating the displayed
|
|
16
|
+
application name in the web application.
|
|
17
|
+
|
|
18
|
+
### Fixed
|
|
19
|
+
- web: update application name
|
|
20
|
+
- web: move application details to about modal window
|
|
21
|
+
|
|
22
|
+
### Misc
|
|
23
|
+
- replace usage of `pandas.DataFrame` with `pyarrow.Table`
|
|
24
|
+
|
|
10
25
|
## [0.6.2] - 2025-09-28
|
|
11
26
|
Patch release fixing a bug when registering Arrow Datasets as tables instead of
|
|
12
27
|
views with DuckDB query engine, leading to performance degradation on larger tables.
|
|
@@ -137,7 +152,9 @@ Initial release of `laketower`.
|
|
|
137
152
|
- View a given table with simple query builder
|
|
138
153
|
- Query all registered tables with DuckDB SQL dialect
|
|
139
154
|
|
|
140
|
-
[Unreleased]: https://github.com/datalpia/laketower/compare/0.6.
|
|
155
|
+
[Unreleased]: https://github.com/datalpia/laketower/compare/0.6.4...HEAD
|
|
156
|
+
[0.6.4]: https://github.com/datalpia/laketower/compare/0.6.3...0.6.4
|
|
157
|
+
[0.6.3]: https://github.com/datalpia/laketower/compare/0.6.2...0.6.3
|
|
141
158
|
[0.6.2]: https://github.com/datalpia/laketower/compare/0.6.1...0.6.2
|
|
142
159
|
[0.6.1]: https://github.com/datalpia/laketower/compare/0.6.0...0.6.1
|
|
143
160
|
[0.6.0]: https://github.com/datalpia/laketower/compare/0.5.1...0.6.0
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: laketower
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.4
|
|
4
4
|
Summary: Oversee your lakehouse
|
|
5
5
|
Project-URL: Repository, https://github.com/datalpia/laketower
|
|
6
6
|
Project-URL: Issues, https://github.com/datalpia/laketower/issues
|
|
@@ -28,7 +28,6 @@ Requires-Dist: duckdb
|
|
|
28
28
|
Requires-Dist: fastapi
|
|
29
29
|
Requires-Dist: jinja2!=3.1.5,>=3
|
|
30
30
|
Requires-Dist: markdown
|
|
31
|
-
Requires-Dist: pandas
|
|
32
31
|
Requires-Dist: pyarrow!=19.0.0
|
|
33
32
|
Requires-Dist: pydantic-settings>=2
|
|
34
33
|
Requires-Dist: pydantic>=2
|
|
@@ -36,10 +35,11 @@ Requires-Dist: python-multipart
|
|
|
36
35
|
Requires-Dist: pyyaml
|
|
37
36
|
Requires-Dist: rich
|
|
38
37
|
Requires-Dist: sqlglot
|
|
38
|
+
Requires-Dist: tzdata
|
|
39
39
|
Requires-Dist: uvicorn
|
|
40
40
|
Description-Content-Type: text/markdown
|
|
41
41
|
|
|
42
|
-
# Laketower
|
|
42
|
+
# 🗼 Laketower
|
|
43
43
|
|
|
44
44
|
> Oversee your lakehouse
|
|
45
45
|
|
|
@@ -614,4 +614,4 @@ $ laketower -c demo/laketower.yml queries view daily_avg_temperature_params -p s
|
|
|
614
614
|
|
|
615
615
|
Licensed under [Apache License 2.0](LICENSE)
|
|
616
616
|
|
|
617
|
-
Copyright (c) 2025 - present Romain Clement
|
|
617
|
+
Copyright (c) 2025 - present Romain Clement / Datalpia
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Laketower
|
|
1
|
+
# 🗼 Laketower
|
|
2
2
|
|
|
3
3
|
> Oversee your lakehouse
|
|
4
4
|
|
|
@@ -573,4 +573,4 @@ $ laketower -c demo/laketower.yml queries view daily_avg_temperature_params -p s
|
|
|
573
573
|
|
|
574
574
|
Licensed under [Apache License 2.0](LICENSE)
|
|
575
575
|
|
|
576
|
-
Copyright (c) 2025 - present Romain Clement
|
|
576
|
+
Copyright (c) 2025 - present Romain Clement / Datalpia
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.6.4"
|
|
@@ -8,6 +8,7 @@ import rich.table
|
|
|
8
8
|
import rich.text
|
|
9
9
|
import rich.tree
|
|
10
10
|
import uvicorn
|
|
11
|
+
import pyarrow.csv as pacsv
|
|
11
12
|
|
|
12
13
|
from laketower.__about__ import __version__
|
|
13
14
|
from laketower.config import load_yaml_config
|
|
@@ -135,11 +136,10 @@ def table_statistics(
|
|
|
135
136
|
results = execute_query({table_name: table_dataset}, sql_query)
|
|
136
137
|
|
|
137
138
|
out = rich.table.Table()
|
|
138
|
-
for column in results.
|
|
139
|
+
for column in results.column_names:
|
|
139
140
|
out.add_column(column)
|
|
140
|
-
for
|
|
141
|
-
|
|
142
|
-
out.add_row(*row)
|
|
141
|
+
for row_dict in results.to_pylist():
|
|
142
|
+
out.add_row(*[str(row_dict[col]) for col in results.column_names])
|
|
143
143
|
except Exception as e:
|
|
144
144
|
out = rich.panel.Panel.fit(f"[red]{e}")
|
|
145
145
|
|
|
@@ -168,11 +168,10 @@ def view_table(
|
|
|
168
168
|
results = execute_query({table_name: table_dataset}, sql_query)
|
|
169
169
|
|
|
170
170
|
out = rich.table.Table()
|
|
171
|
-
for column in results.
|
|
171
|
+
for column in results.column_names:
|
|
172
172
|
out.add_column(column)
|
|
173
|
-
for
|
|
174
|
-
|
|
175
|
-
out.add_row(*row)
|
|
173
|
+
for row_dict in results.to_pylist():
|
|
174
|
+
out.add_row(*[str(row_dict[col]) for col in results.column_names])
|
|
176
175
|
except Exception as e:
|
|
177
176
|
out = rich.panel.Panel.fit(f"[red]{e}")
|
|
178
177
|
|
|
@@ -198,15 +197,16 @@ def query_table(
|
|
|
198
197
|
results = execute_query(tables_dataset, sql_query, sql_params=query_params)
|
|
199
198
|
|
|
200
199
|
out = rich.table.Table()
|
|
201
|
-
for column in results.
|
|
200
|
+
for column in results.column_names:
|
|
202
201
|
out.add_column(column)
|
|
203
|
-
for
|
|
204
|
-
|
|
205
|
-
out.add_row(*row)
|
|
202
|
+
for row_dict in results.to_pylist():
|
|
203
|
+
out.add_row(*[str(row_dict[col]) for col in results.column_names])
|
|
206
204
|
|
|
207
205
|
if output_path is not None:
|
|
208
|
-
|
|
209
|
-
|
|
206
|
+
pacsv.write_csv(
|
|
207
|
+
results,
|
|
208
|
+
output_path,
|
|
209
|
+
pacsv.WriteOptions(include_header=True, delimiter=","),
|
|
210
210
|
)
|
|
211
211
|
out = rich.text.Text(f"Query results written to: {output_path}")
|
|
212
212
|
except ValueError as e:
|
|
@@ -271,11 +271,10 @@ def view_query(
|
|
|
271
271
|
results = execute_query(tables_dataset, sql_query, sql_params=sql_params)
|
|
272
272
|
|
|
273
273
|
out = rich.table.Table()
|
|
274
|
-
for column in results.
|
|
274
|
+
for column in results.column_names:
|
|
275
275
|
out.add_column(column)
|
|
276
|
-
for
|
|
277
|
-
|
|
278
|
-
out.add_row(*row)
|
|
276
|
+
for row_dict in results.to_pylist():
|
|
277
|
+
out.add_row(*[str(row_dict[col]) for col in results.column_names])
|
|
279
278
|
except ValueError as e:
|
|
280
279
|
out = rich.panel.Panel.fit(f"[red]{e}")
|
|
281
280
|
|
|
@@ -4,8 +4,8 @@ from typing import Any, BinaryIO, Protocol, TextIO
|
|
|
4
4
|
|
|
5
5
|
import deltalake
|
|
6
6
|
import duckdb
|
|
7
|
-
import pandas as pd
|
|
8
7
|
import pyarrow as pa
|
|
8
|
+
import pyarrow.csv as csv
|
|
9
9
|
import pyarrow.dataset as padataset
|
|
10
10
|
import pydantic
|
|
11
11
|
import sqlglot
|
|
@@ -61,7 +61,7 @@ class TableProtocol(Protocol): # pragma: no cover
|
|
|
61
61
|
def history(self) -> TableHistory: ...
|
|
62
62
|
def dataset(self, version: int | str | None = None) -> padataset.Dataset: ...
|
|
63
63
|
def import_data(
|
|
64
|
-
self, data:
|
|
64
|
+
self, data: pa.Table, mode: ImportModeEnum = ImportModeEnum.append
|
|
65
65
|
) -> None: ...
|
|
66
66
|
|
|
67
67
|
|
|
@@ -202,7 +202,7 @@ class DeltaTable:
|
|
|
202
202
|
return self._impl.to_pyarrow_dataset()
|
|
203
203
|
|
|
204
204
|
def import_data(
|
|
205
|
-
self, data:
|
|
205
|
+
self, data: pa.Table, mode: ImportModeEnum = ImportModeEnum.append
|
|
206
206
|
) -> None:
|
|
207
207
|
deltalake.write_deltalake(
|
|
208
208
|
self.table_config.uri, data, mode=mode.value, schema_mode="merge"
|
|
@@ -274,7 +274,7 @@ def execute_query(
|
|
|
274
274
|
tables_datasets: dict[str, padataset.Dataset],
|
|
275
275
|
sql_query: str,
|
|
276
276
|
sql_params: dict[str, str] = {},
|
|
277
|
-
) ->
|
|
277
|
+
) -> pa.Table:
|
|
278
278
|
if not sql_query:
|
|
279
279
|
raise ValueError("Error: Cannot execute empty SQL query")
|
|
280
280
|
|
|
@@ -289,7 +289,7 @@ def execute_query(
|
|
|
289
289
|
view_name = f"{table_name}_view"
|
|
290
290
|
conn.register(view_name, table_dataset)
|
|
291
291
|
conn.execute(f'create view "{table_name}" as select * from "{view_name}"') # nosec B608
|
|
292
|
-
return conn.execute(sql_query, parameters=sql_params).
|
|
292
|
+
return conn.execute(sql_query, parameters=sql_params).fetch_arrow_table()
|
|
293
293
|
except duckdb.Error as e:
|
|
294
294
|
raise ValueError(str(e)) from e
|
|
295
295
|
|
|
@@ -303,7 +303,11 @@ def import_file_to_table(
|
|
|
303
303
|
encoding: str = "utf-8",
|
|
304
304
|
) -> int:
|
|
305
305
|
file_format_handler = {
|
|
306
|
-
ImportFileFormatEnum.csv: lambda f, d, e:
|
|
306
|
+
ImportFileFormatEnum.csv: lambda f, d, e: csv.read_csv(
|
|
307
|
+
f,
|
|
308
|
+
read_options=csv.ReadOptions(encoding=e),
|
|
309
|
+
parse_options=csv.ParseOptions(delimiter=d),
|
|
310
|
+
)
|
|
307
311
|
}
|
|
308
312
|
table = load_table(table_config)
|
|
309
313
|
df = file_format_handler[file_format](file_path, delimiter, encoding)
|
|
@@ -68,14 +68,12 @@
|
|
|
68
68
|
</ul>
|
|
69
69
|
|
|
70
70
|
<div class="mt-auto pt-3 border-top flex-shrink-0">
|
|
71
|
-
<
|
|
72
|
-
<
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
</a>
|
|
78
|
-
</small>
|
|
71
|
+
<div class="px-3 d-block">
|
|
72
|
+
<button type="button" class="btn btn-link btn-sm text-muted text-decoration-none p-0" data-bs-toggle="modal" data-bs-target="#creditsModal">
|
|
73
|
+
<i class="bi-info-circle me-1"></i>
|
|
74
|
+
About
|
|
75
|
+
</button>
|
|
76
|
+
</div>
|
|
79
77
|
</div>
|
|
80
78
|
</div>
|
|
81
79
|
</nav>
|
|
@@ -99,6 +97,72 @@
|
|
|
99
97
|
</div>
|
|
100
98
|
</main>
|
|
101
99
|
|
|
100
|
+
<div class="modal fade" id="creditsModal" tabindex="-1" aria-labelledby="creditsModalLabel" aria-hidden="true">
|
|
101
|
+
<div class="modal-dialog modal-dialog-centered">
|
|
102
|
+
<div class="modal-content">
|
|
103
|
+
<div class="modal-header">
|
|
104
|
+
<h5 class="modal-title" id="creditsModalLabel">About</h5>
|
|
105
|
+
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
|
106
|
+
</div>
|
|
107
|
+
<div class="modal-body">
|
|
108
|
+
<div class="text-center mb-3">
|
|
109
|
+
<h4>{{ app_metadata.app_name }}</h4>
|
|
110
|
+
<p class="text-muted mb-0">Oversee your lakehouse</p>
|
|
111
|
+
</div>
|
|
112
|
+
|
|
113
|
+
<dl class="row mb-3">
|
|
114
|
+
<dt class="col-sm-4">Version</dt>
|
|
115
|
+
<dd class="col-sm-8">
|
|
116
|
+
<a href="https://github.com/datalpia/laketower/releases/tag/{{ app_metadata.app_version }}"
|
|
117
|
+
target="_blank"
|
|
118
|
+
class="text-decoration-none">
|
|
119
|
+
{{ app_metadata.app_version }}
|
|
120
|
+
<i class="bi-box-arrow-up-right ms-1" style="font-size: 0.8em;"></i>
|
|
121
|
+
</a>
|
|
122
|
+
</dd>
|
|
123
|
+
|
|
124
|
+
<dt class="col-sm-4">Repository</dt>
|
|
125
|
+
<dd class="col-sm-8">
|
|
126
|
+
<a href="https://github.com/datalpia/laketower"
|
|
127
|
+
target="_blank"
|
|
128
|
+
class="text-decoration-none">
|
|
129
|
+
github.com/datalpia/laketower
|
|
130
|
+
<i class="bi-box-arrow-up-right ms-1" style="font-size: 0.8em;"></i>
|
|
131
|
+
</a>
|
|
132
|
+
</dd>
|
|
133
|
+
|
|
134
|
+
<dt class="col-sm-4">Issue Tracker</dt>
|
|
135
|
+
<dd class="col-sm-8">
|
|
136
|
+
<a href="https://github.com/datalpia/laketower/issues"
|
|
137
|
+
target="_blank"
|
|
138
|
+
class="text-decoration-none">
|
|
139
|
+
Report an issue
|
|
140
|
+
<i class="bi-box-arrow-up-right ms-1" style="font-size: 0.8em;"></i>
|
|
141
|
+
</a>
|
|
142
|
+
</dd>
|
|
143
|
+
|
|
144
|
+
<dt class="col-sm-4">License</dt>
|
|
145
|
+
<dd class="col-sm-8">
|
|
146
|
+
<a href="https://github.com/datalpia/laketower/blob/main/LICENSE"
|
|
147
|
+
target="_blank"
|
|
148
|
+
class="text-decoration-none">
|
|
149
|
+
Apache License 2.0
|
|
150
|
+
<i class="bi-box-arrow-up-right ms-1" style="font-size: 0.8em;"></i>
|
|
151
|
+
</a>
|
|
152
|
+
</dd>
|
|
153
|
+
</dl>
|
|
154
|
+
|
|
155
|
+
<div class="text-center text-muted">
|
|
156
|
+
<small>Copyright © 2025 Romain Clement / Datalpia</small>
|
|
157
|
+
</div>
|
|
158
|
+
</div>
|
|
159
|
+
<div class="modal-footer">
|
|
160
|
+
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
|
161
|
+
</div>
|
|
162
|
+
</div>
|
|
163
|
+
</div>
|
|
164
|
+
</div>
|
|
165
|
+
|
|
102
166
|
<script src="{{ url_for('static', path='/vendor/bootstrap/bootstrap.bundle.min.js') }}"></script>
|
|
103
167
|
{% block extra_scripts %}{% endblock %}
|
|
104
168
|
</body>
|
|
@@ -58,16 +58,16 @@
|
|
|
58
58
|
<table class="table table-sm table-bordered table-striped table-hover">
|
|
59
59
|
<thead>
|
|
60
60
|
<tr>
|
|
61
|
-
{% for column in query_results.
|
|
61
|
+
{% for column in query_results.column_names %}
|
|
62
62
|
<th>{{ column }}</th>
|
|
63
63
|
{% endfor %}
|
|
64
64
|
</tr>
|
|
65
65
|
</thead>
|
|
66
66
|
<tbody class="table-group-divider">
|
|
67
|
-
{% for row in query_results.
|
|
67
|
+
{% for row in query_results.to_pylist() %}
|
|
68
68
|
<tr>
|
|
69
|
-
{% for
|
|
70
|
-
<td>{{
|
|
69
|
+
{% for column in query_results.column_names %}
|
|
70
|
+
<td>{{ row[column] }}</td>
|
|
71
71
|
{% endfor %}
|
|
72
72
|
</tr>
|
|
73
73
|
{% endfor %}
|
|
@@ -47,16 +47,16 @@
|
|
|
47
47
|
<table class="table table-sm table-bordered table-striped table-hover">
|
|
48
48
|
<thead>
|
|
49
49
|
<tr>
|
|
50
|
-
{% for column in table_results.
|
|
50
|
+
{% for column in table_results.column_names %}
|
|
51
51
|
<th>{{ column }}</th>
|
|
52
52
|
{% endfor %}
|
|
53
53
|
</tr>
|
|
54
54
|
</thead>
|
|
55
55
|
<tbody class="table-group-divider">
|
|
56
|
-
{% for row in table_results.
|
|
56
|
+
{% for row in table_results.to_pylist() %}
|
|
57
57
|
<tr>
|
|
58
|
-
{% for
|
|
59
|
-
<td>{{
|
|
58
|
+
{% for column in table_results.column_names %}
|
|
59
|
+
<td>{{ row[column] }}</td>
|
|
60
60
|
{% endfor %}
|
|
61
61
|
</tr>
|
|
62
62
|
{% endfor %}
|
|
@@ -15,16 +15,16 @@
|
|
|
15
15
|
<table class="table table-sm table-bordered table-striped table-hover">
|
|
16
16
|
<thead>
|
|
17
17
|
<tr>
|
|
18
|
-
{% for column in table_results.
|
|
18
|
+
{% for column in table_results.column_names %}
|
|
19
19
|
<th>{{ column }}</th>
|
|
20
20
|
{% endfor %}
|
|
21
21
|
</tr>
|
|
22
22
|
</thead>
|
|
23
23
|
<tbody class="table-group-divider">
|
|
24
|
-
{% for row in table_results.
|
|
24
|
+
{% for row in table_results.to_pylist() %}
|
|
25
25
|
<tr>
|
|
26
|
-
{% for
|
|
27
|
-
<td>{{
|
|
26
|
+
{% for column in table_results.column_names %}
|
|
27
|
+
<td>{{ row[column] }}</td>
|
|
28
28
|
{% endfor %}
|
|
29
29
|
</tr>
|
|
30
30
|
{% endfor %}
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
<table class="table table-sm table-bordered table-striped table-hover">
|
|
16
16
|
<thead>
|
|
17
17
|
<tr>
|
|
18
|
-
{% for column in table_results.
|
|
18
|
+
{% for column in table_results.column_names %}
|
|
19
19
|
<th>
|
|
20
20
|
{{ column }}
|
|
21
21
|
{% if column == request.query_params.sort_asc %}
|
|
@@ -44,10 +44,10 @@
|
|
|
44
44
|
</tr>
|
|
45
45
|
</thead>
|
|
46
46
|
<tbody class="table-group-divider">
|
|
47
|
-
{% for row in table_results.
|
|
47
|
+
{% for row in table_results.to_pylist() %}
|
|
48
48
|
<tr>
|
|
49
|
-
{% for
|
|
50
|
-
<td>{{
|
|
49
|
+
{% for column in table_results.column_names %}
|
|
50
|
+
<td>{{ row[column] }}</td>
|
|
51
51
|
{% endfor %}
|
|
52
52
|
</tr>
|
|
53
53
|
{% endfor %}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import io
|
|
1
2
|
import urllib.parse
|
|
2
3
|
from dataclasses import dataclass
|
|
3
4
|
from pathlib import Path
|
|
@@ -5,6 +6,7 @@ from typing import Annotated
|
|
|
5
6
|
|
|
6
7
|
import bleach
|
|
7
8
|
import markdown
|
|
9
|
+
import pyarrow.csv as pacsv
|
|
8
10
|
import pydantic_settings
|
|
9
11
|
from fastapi import APIRouter, FastAPI, File, Form, Query, Request, UploadFile
|
|
10
12
|
from fastapi.responses import HTMLResponse, RedirectResponse, Response
|
|
@@ -119,10 +121,13 @@ def export_tables_query_csv(request: Request, sql: str) -> Response:
|
|
|
119
121
|
tables_dataset = load_datasets(config.tables)
|
|
120
122
|
|
|
121
123
|
results = execute_query(tables_dataset, sql)
|
|
122
|
-
csv_content =
|
|
124
|
+
csv_content = io.BytesIO()
|
|
125
|
+
pacsv.write_csv(
|
|
126
|
+
results, csv_content, pacsv.WriteOptions(include_header=True, delimiter=",")
|
|
127
|
+
)
|
|
123
128
|
|
|
124
129
|
return Response(
|
|
125
|
-
content=csv_content,
|
|
130
|
+
content=csv_content.getvalue(),
|
|
126
131
|
media_type="text/csv",
|
|
127
132
|
headers={"Content-Disposition": "attachment; filename=query_results.csv"},
|
|
128
133
|
)
|
|
@@ -415,7 +420,7 @@ def create_app() -> FastAPI:
|
|
|
415
420
|
)
|
|
416
421
|
app.include_router(router)
|
|
417
422
|
app.state.app_metadata = AppMetadata(
|
|
418
|
-
app_name="Laketower", app_version=__about__.__version__
|
|
423
|
+
app_name="🗼 Laketower", app_version=__about__.__version__
|
|
419
424
|
)
|
|
420
425
|
app.state.config = config
|
|
421
426
|
|
|
@@ -14,7 +14,6 @@ dependencies = [
|
|
|
14
14
|
"fastapi",
|
|
15
15
|
"jinja2>=3,!=3.1.5",
|
|
16
16
|
"markdown",
|
|
17
|
-
"pandas",
|
|
18
17
|
"pyarrow!=19.0.0",
|
|
19
18
|
"pydantic>=2",
|
|
20
19
|
"pydantic-settings>=2",
|
|
@@ -22,6 +21,7 @@ dependencies = [
|
|
|
22
21
|
"pyyaml",
|
|
23
22
|
"rich",
|
|
24
23
|
"sqlglot",
|
|
24
|
+
"tzdata",
|
|
25
25
|
"uvicorn",
|
|
26
26
|
]
|
|
27
27
|
keywords = ["data", "lakehouse", "sql", "delta-lake"]
|
|
@@ -56,17 +56,17 @@ build-backend = "hatchling.build"
|
|
|
56
56
|
[dependency-groups]
|
|
57
57
|
dev = [
|
|
58
58
|
"bandit==1.8.6",
|
|
59
|
-
"beautifulsoup4==4.
|
|
60
|
-
"duckdb>1,!=1.4.0",
|
|
59
|
+
"beautifulsoup4==4.14.2",
|
|
61
60
|
"httpx==0.28.1",
|
|
62
|
-
"invoke==2.2.
|
|
61
|
+
"invoke==2.2.1",
|
|
63
62
|
"mypy==1.18.2",
|
|
63
|
+
"pandas==2.3.3",
|
|
64
64
|
"pandas-stubs==2.3.2.250926",
|
|
65
65
|
"pip-audit==2.9.0",
|
|
66
66
|
"pyarrow-stubs==20.0.0.20250928",
|
|
67
67
|
"pytest==8.4.2",
|
|
68
68
|
"pytest-cov==7.0.0",
|
|
69
|
-
"ruff==0.
|
|
69
|
+
"ruff==0.14.1",
|
|
70
70
|
"types-bleach==6.2.0.20250809",
|
|
71
71
|
"types-markdown==3.9.0.20250906",
|
|
72
72
|
"types-pyyaml==6.0.12.20250915",
|
|
@@ -18,7 +18,11 @@ def format(ctx: Context) -> None:
|
|
|
18
18
|
|
|
19
19
|
@task
|
|
20
20
|
def audit(ctx: Context) -> None:
|
|
21
|
-
|
|
21
|
+
ignored_vulns = [
|
|
22
|
+
"GHSA-4xh5-x5gv-qwph", # pip<=25.2 affected, no resolution yet
|
|
23
|
+
]
|
|
24
|
+
options = [f"--ignore-vuln {vuln}" for vuln in ignored_vulns]
|
|
25
|
+
ctx.run(f"pip-audit {' '.join(options)}", echo=True, pty=True)
|
|
22
26
|
|
|
23
27
|
|
|
24
28
|
@task
|
|
@@ -6,6 +6,7 @@ from typing import Any
|
|
|
6
6
|
import deltalake
|
|
7
7
|
import pandas as pd
|
|
8
8
|
import pyarrow as pa
|
|
9
|
+
import pyarrow.csv as pacsv
|
|
9
10
|
import pytest
|
|
10
11
|
import yaml
|
|
11
12
|
|
|
@@ -744,7 +745,7 @@ def test_tables_query_output_csv(
|
|
|
744
745
|
delta_table: deltalake.DeltaTable,
|
|
745
746
|
) -> None:
|
|
746
747
|
selected_column = delta_table.schema().fields[0].name
|
|
747
|
-
selected_limit =
|
|
748
|
+
selected_limit = 3
|
|
748
749
|
|
|
749
750
|
output_csv_path = tmp_path / "output.csv"
|
|
750
751
|
|
|
@@ -774,8 +775,12 @@ def test_tables_query_output_csv(
|
|
|
774
775
|
df = delta_table.to_pandas()
|
|
775
776
|
expected_output = df[[selected_column]][0:selected_limit]
|
|
776
777
|
expected_csv_path = tmp_path / "expected.csv"
|
|
777
|
-
|
|
778
|
-
|
|
778
|
+
|
|
779
|
+
expected_table = pa.Table.from_pandas(expected_output)
|
|
780
|
+
pacsv.write_csv(
|
|
781
|
+
expected_table,
|
|
782
|
+
expected_csv_path,
|
|
783
|
+
pacsv.WriteOptions(include_header=True, delimiter=","),
|
|
779
784
|
)
|
|
780
785
|
assert output_csv_path.read_text() == expected_csv_path.read_text()
|
|
781
786
|
|