zaturn 0.1.7__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zaturn/mcp/__init__.py +97 -0
- zaturn/studio/__init__.py +5 -0
- zaturn/studio/agent_wrapper.py +131 -0
- zaturn/studio/app.py +288 -0
- zaturn/studio/static/fira_code.ttf +0 -0
- zaturn/studio/static/inter_ital_var.ttf +0 -0
- zaturn/studio/static/inter_var.ttf +0 -0
- zaturn/studio/static/js/htmx-multi-swap.js +44 -0
- zaturn/studio/static/js/htmx.min.js +1 -0
- zaturn/studio/static/logo.png +0 -0
- zaturn/studio/static/logo.svg +10 -0
- zaturn/studio/static/noto_emoji.ttf +0 -0
- zaturn/studio/storage.py +85 -0
- zaturn/studio/templates/_shell.html +38 -0
- zaturn/studio/templates/ai_message.html +4 -0
- zaturn/studio/templates/c_settings_updated.html +1 -0
- zaturn/studio/templates/c_source_card.html +19 -0
- zaturn/studio/templates/chat.html +22 -0
- zaturn/studio/templates/css/style.css +406 -0
- zaturn/studio/templates/function_call.html +7 -0
- zaturn/studio/templates/loader.html +1 -0
- zaturn/studio/templates/manage_sources.html +45 -0
- zaturn/studio/templates/nav.html +5 -0
- zaturn/studio/templates/new_conversation.html +13 -0
- zaturn/studio/templates/settings.html +29 -0
- zaturn/studio/templates/setup_prompt.html +6 -0
- zaturn/studio/templates/user_message.html +4 -0
- zaturn/tools/__init__.py +13 -0
- zaturn/{config.py → tools/config.py} +7 -9
- zaturn/tools/core.py +97 -0
- zaturn/{query_utils.py → tools/query_utils.py} +52 -2
- zaturn/tools/visualizations.py +267 -0
- zaturn-0.2.0.dist-info/METADATA +128 -0
- zaturn-0.2.0.dist-info/RECORD +39 -0
- {zaturn-0.1.7.dist-info → zaturn-0.2.0.dist-info}/WHEEL +1 -1
- zaturn-0.2.0.dist-info/entry_points.txt +3 -0
- zaturn/__init__.py +0 -14
- zaturn/core.py +0 -140
- zaturn/visualizations.py +0 -155
- zaturn-0.1.7.dist-info/METADATA +0 -185
- zaturn-0.1.7.dist-info/RECORD +0 -12
- zaturn-0.1.7.dist-info/entry_points.txt +0 -2
- /zaturn/{example_data → tools/example_data}/all_pokemon_data.csv +0 -0
- {zaturn-0.1.7.dist-info → zaturn-0.2.0.dist-info}/licenses/LICENSE +0 -0
- {zaturn-0.1.7.dist-info → zaturn-0.2.0.dist-info}/top_level.txt +0 -0
@@ -7,14 +7,64 @@ import sqlalchemy
|
|
7
7
|
from sqlalchemy.orm import Session
|
8
8
|
import time
|
9
9
|
from typing import List
|
10
|
-
from zaturn import config
|
10
|
+
from zaturn.tools import config
|
11
11
|
|
12
12
|
|
13
|
+
def list_tables(source):
|
14
|
+
try:
|
15
|
+
match source['source_type']:
|
16
|
+
case "sqlite":
|
17
|
+
result = execute_query(source,
|
18
|
+
"SELECT name FROM sqlite_schema WHERE type ='table' AND name NOT LIKE 'sqlite_%';"
|
19
|
+
)
|
20
|
+
return result['name'].to_list()
|
21
|
+
|
22
|
+
case "postgresql":
|
23
|
+
result = execute_query(source,
|
24
|
+
"SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';"
|
25
|
+
)
|
26
|
+
return result['tablename'].to_list()
|
27
|
+
|
28
|
+
case "mysql":
|
29
|
+
result = execute_query(source, "SHOW TABLES")
|
30
|
+
for col in list(result):
|
31
|
+
if col.startswith("Tables_in_"):
|
32
|
+
return result[col].to_list()
|
33
|
+
|
34
|
+
case "duckdb" | "csv" | "parquet" | "clickhouse":
|
35
|
+
result = execute_query(source, "SHOW TABLES")
|
36
|
+
return result['name'].to_list()
|
37
|
+
|
38
|
+
except Exception as e:
|
39
|
+
return str(e)
|
40
|
+
|
41
|
+
|
42
|
+
def describe_table(source, table_name):
|
43
|
+
match source['source_type']:
|
44
|
+
case 'sqlite':
|
45
|
+
return execute_query(source,
|
46
|
+
f'PRAGMA table_info("{table_name}");'
|
47
|
+
)
|
48
|
+
|
49
|
+
case 'postgresql':
|
50
|
+
return execute_query(source,
|
51
|
+
f"SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{table_name}';"
|
52
|
+
)
|
53
|
+
|
54
|
+
case "mysql" | "duckdb" | "csv" | "parquet" | "clickhouse":
|
55
|
+
if ' ' in table_name:
|
56
|
+
table_name = f'`{table_name}`'
|
57
|
+
|
58
|
+
return execute_query(source,
|
59
|
+
f'DESCRIBE {table_name};'
|
60
|
+
)
|
61
|
+
|
62
|
+
|
13
63
|
def execute_query(source: dict, query: str):
|
14
64
|
"""Run the query using the appropriate engine and read only config"""
|
15
65
|
url = source['url']
|
16
66
|
|
17
|
-
match source['
|
67
|
+
match source['source_type']:
|
18
68
|
case "sqlite":
|
19
69
|
with sqlalchemy.create_engine(url).connect() as conn:
|
20
70
|
conn.execute(sqlalchemy.text('PRAGMA query_only = ON;'))
|
@@ -0,0 +1,267 @@
|
|
1
|
+
from base64 import b64encode
|
2
|
+
from typing import Any, List, Union, Annotated
|
3
|
+
|
4
|
+
from mcp.types import ImageContent
|
5
|
+
import plotly.express as px
|
6
|
+
from pydantic import Field
|
7
|
+
|
8
|
+
from zaturn.tools import query_utils
|
9
|
+
|
10
|
+
|
11
|
+
|
12
|
+
|
13
|
+
def _fig_to_image(fig):
|
14
|
+
fig_encoded = b64encode(fig.to_image(format='png')).decode()
|
15
|
+
img_b64 = "data:image/png;base64," + fig_encoded
|
16
|
+
|
17
|
+
return ImageContent(
|
18
|
+
type = 'image',
|
19
|
+
data = fig_encoded,
|
20
|
+
mimeType = 'image/png',
|
21
|
+
annotations = None,
|
22
|
+
)
|
23
|
+
|
24
|
+
|
25
|
+
class Visualizations:
|
26
|
+
|
27
|
+
def __init__(self, data_sources):
|
28
|
+
self.data_sources = data_sources
|
29
|
+
self.tools = [
|
30
|
+
self.scatter_plot,
|
31
|
+
self.line_plot,
|
32
|
+
self.histogram,
|
33
|
+
self.strip_plot,
|
34
|
+
self.box_plot,
|
35
|
+
self.bar_plot,
|
36
|
+
]
|
37
|
+
|
38
|
+
|
39
|
+
def _get_df_from_source(self, source_id, query):
|
40
|
+
source = self.data_sources.get(source_id)
|
41
|
+
if not source:
|
42
|
+
raise Exception(f"Source {source_id} Not Found")
|
43
|
+
|
44
|
+
return query_utils.execute_query(source, query)
|
45
|
+
|
46
|
+
|
47
|
+
def scatter_plot(self,
|
48
|
+
source_id: Annotated[
|
49
|
+
str, Field(description='The data source to run the query on')
|
50
|
+
],
|
51
|
+
query: Annotated[
|
52
|
+
str, Field(description='SQL query to run on the data source')
|
53
|
+
],
|
54
|
+
x: Annotated[
|
55
|
+
str, Field(description='Column name from SQL result to use for x-axis')
|
56
|
+
],
|
57
|
+
y: Annotated[
|
58
|
+
str, Field(description='Column name from SQL result to use for y-axis')
|
59
|
+
],
|
60
|
+
color: Annotated[
|
61
|
+
str | None, Field(description='Optional; column name from SQL result to use for coloring the points, with color representing another dimension')
|
62
|
+
] = None,
|
63
|
+
) -> str:
|
64
|
+
"""
|
65
|
+
Run query against specified source and make a scatter plot using result
|
66
|
+
For both csv and parquet sources, use DuckDB SQL syntax
|
67
|
+
Use 'CSV' as the table name in the SQL query for csv sources.
|
68
|
+
Use 'PARQUET' as the table name in the SQL query for parquet sources.
|
69
|
+
|
70
|
+
This will return an image of the plot
|
71
|
+
"""
|
72
|
+
|
73
|
+
try:
|
74
|
+
df = self._get_df_from_source(source_id, query)
|
75
|
+
fig = px.scatter(df, x=x, y=y, color=color)
|
76
|
+
fig.update_xaxes(autotickangles=[0, 45, 60, 90])
|
77
|
+
|
78
|
+
return _fig_to_image(fig)
|
79
|
+
except Exception as e:
|
80
|
+
return str(e)
|
81
|
+
|
82
|
+
|
83
|
+
def line_plot(self,
|
84
|
+
source_id: Annotated[
|
85
|
+
str, Field(description='The data source to run the query on')
|
86
|
+
],
|
87
|
+
query: Annotated[
|
88
|
+
str, Field(description='SQL query to run on the data source')
|
89
|
+
],
|
90
|
+
x: Annotated[
|
91
|
+
str, Field(description='Column name from SQL result to use for x-axis')
|
92
|
+
],
|
93
|
+
y: Annotated[
|
94
|
+
str, Field(description='Column name from SQL result to use for y-axis')
|
95
|
+
],
|
96
|
+
color: Annotated[
|
97
|
+
str | None, Field(description='Optional; column name from SQL result to use for drawing multiple colored lines representing another dimension')
|
98
|
+
] = None,
|
99
|
+
) -> str:
|
100
|
+
"""
|
101
|
+
Run query against specified source and make a line plot using result
|
102
|
+
For both csv and parquet sources, use DuckDB SQL syntax
|
103
|
+
Use 'CSV' as the table name in the SQL query for csv sources.
|
104
|
+
Use 'PARQUET' as the table name in the SQL query for parquet sources.
|
105
|
+
|
106
|
+
This will return an image of the plot
|
107
|
+
"""
|
108
|
+
|
109
|
+
try:
|
110
|
+
df = self._get_df_from_source(source_id, query)
|
111
|
+
fig = px.line(df, x=x, y=y, color=color)
|
112
|
+
fig.update_xaxes(autotickangles=[0, 45, 60, 90])
|
113
|
+
|
114
|
+
return _fig_to_image(fig)
|
115
|
+
except Exception as e:
|
116
|
+
return str(e)
|
117
|
+
|
118
|
+
|
119
|
+
def histogram(self,
|
120
|
+
source_id: Annotated[
|
121
|
+
str, Field(description='The data source to run the query on')
|
122
|
+
],
|
123
|
+
query: Annotated[
|
124
|
+
str, Field(description='SQL query to run on the data source')
|
125
|
+
],
|
126
|
+
column: Annotated[
|
127
|
+
str, Field(description='Column name from SQL result to use for the histogram')
|
128
|
+
],
|
129
|
+
color: Annotated[
|
130
|
+
str | None, Field(description='Optional; column name from SQL result to use for drawing multiple colored histograms representing another dimension')
|
131
|
+
] = None,
|
132
|
+
nbins: Annotated[
|
133
|
+
int | None, Field(description='Optional; number of bins')
|
134
|
+
] = None,
|
135
|
+
) -> str:
|
136
|
+
"""
|
137
|
+
Run query against specified source and make a histogram using result
|
138
|
+
For both csv and parquet sources, use DuckDB SQL syntax
|
139
|
+
Use 'CSV' as the table name in the SQL query for csv sources.
|
140
|
+
Use 'PARQUET' as the table name in the SQL query for parquet sources.
|
141
|
+
|
142
|
+
This will return an image of the plot
|
143
|
+
"""
|
144
|
+
|
145
|
+
try:
|
146
|
+
df = self._get_df_from_source(source_id, query)
|
147
|
+
fig = px.histogram(df, x=column, color=color, nbins=nbins)
|
148
|
+
fig.update_xaxes(autotickangles=[0, 45, 60, 90])
|
149
|
+
|
150
|
+
return _fig_to_image(fig)
|
151
|
+
except Exception as e:
|
152
|
+
return str(e)
|
153
|
+
|
154
|
+
|
155
|
+
def strip_plot(self,
|
156
|
+
source_id: Annotated[
|
157
|
+
str, Field(description='The data source to run the query on')
|
158
|
+
],
|
159
|
+
query: Annotated[
|
160
|
+
str, Field(description='SQL query to run on the data source')
|
161
|
+
],
|
162
|
+
x: Annotated[
|
163
|
+
str, Field(description='Column name from SQL result to use for x-axis')
|
164
|
+
],
|
165
|
+
y: Annotated[
|
166
|
+
str, Field(description='Column name from SQL result to use for y-axis')
|
167
|
+
],
|
168
|
+
color: Annotated[
|
169
|
+
str | None, Field(description='Optional column name from SQL result to show multiple colored strips representing another dimension')
|
170
|
+
] = None,
|
171
|
+
) -> str:
|
172
|
+
"""
|
173
|
+
Run query against specified source and make a strip plot using result
|
174
|
+
For both csv and parquet sources, use DuckDB SQL syntax
|
175
|
+
Use 'CSV' as the table name in the SQL query for csv sources.
|
176
|
+
Use 'PARQUET' as the table name in the SQL query for parquet sources.
|
177
|
+
|
178
|
+
This will return an image of the plot
|
179
|
+
"""
|
180
|
+
|
181
|
+
try:
|
182
|
+
df = self._get_df_from_source(source_id, query)
|
183
|
+
fig = px.strip(df, x=x, y=y, color=color)
|
184
|
+
fig.update_xaxes(autotickangles=[0, 45, 60, 90])
|
185
|
+
|
186
|
+
return _fig_to_image(fig)
|
187
|
+
except Exception as e:
|
188
|
+
return str(e)
|
189
|
+
|
190
|
+
|
191
|
+
def box_plot(self,
|
192
|
+
source_id: Annotated[
|
193
|
+
str, Field(description='The data source to run the query on')
|
194
|
+
],
|
195
|
+
query: Annotated[
|
196
|
+
str, Field(description='SQL query to run on the data source')
|
197
|
+
],
|
198
|
+
x: Annotated[
|
199
|
+
str, Field(description='Column name from SQL result to use for x-axis')
|
200
|
+
],
|
201
|
+
y: Annotated[
|
202
|
+
str, Field(description='Column name from SQL result to use for y-axis')
|
203
|
+
],
|
204
|
+
color: Annotated[
|
205
|
+
str | None, Field(description='Optional column name from SQL result to show multiple colored bars representing another dimension')
|
206
|
+
] = None,
|
207
|
+
) -> str:
|
208
|
+
"""
|
209
|
+
Run query against specified source and make a box plot using result
|
210
|
+
For both csv and parquet sources, use DuckDB SQL syntax
|
211
|
+
Use 'CSV' as the table name in the SQL query for csv sources.
|
212
|
+
Use 'PARQUET' as the table name in the SQL query for parquet sources.
|
213
|
+
|
214
|
+
This will return an image of the plot
|
215
|
+
"""
|
216
|
+
|
217
|
+
try:
|
218
|
+
df = self._get_df_from_source(source_id, query)
|
219
|
+
fig = px.box(df, x=x, y=y, color=color)
|
220
|
+
fig.update_xaxes(autotickangles=[0, 45, 60, 90])
|
221
|
+
|
222
|
+
return _fig_to_image(fig)
|
223
|
+
except Exception as e:
|
224
|
+
return str(e)
|
225
|
+
|
226
|
+
|
227
|
+
def bar_plot(self,
|
228
|
+
source_id: Annotated[
|
229
|
+
str, Field(description='The data source to run the query on')
|
230
|
+
],
|
231
|
+
query: Annotated[
|
232
|
+
str, Field(description='SQL query to run on the data source')
|
233
|
+
],
|
234
|
+
x: Annotated[
|
235
|
+
str, Field(description='Column name from SQL result to use for x-axis')
|
236
|
+
],
|
237
|
+
y: Annotated[
|
238
|
+
str, Field(description='Column name from SQL result to use for y-axis')
|
239
|
+
],
|
240
|
+
color: Annotated[
|
241
|
+
str | None, Field(description='Optional column name from SQL result to use as a 3rd dimension by splitting each bar into colored sections')
|
242
|
+
] = None,
|
243
|
+
orientation: Annotated[
|
244
|
+
str, Field(description="Orientation of the box plot, use 'v' for vertical (default) and 'h' for horizontal. Be mindful of choosing the correct X and Y columns as per orientation")
|
245
|
+
] = 'v',
|
246
|
+
) -> str:
|
247
|
+
"""
|
248
|
+
Run query against specified source and make a bar plot using result
|
249
|
+
For both csv and parquet sources, use DuckDB SQL syntax
|
250
|
+
Use 'CSV' as the table name in the SQL query for csv sources.
|
251
|
+
Use 'PARQUET' as the table name in the SQL query for parquet sources.
|
252
|
+
|
253
|
+
This will return an image of the plot
|
254
|
+
"""
|
255
|
+
|
256
|
+
try:
|
257
|
+
df = self._get_df_from_source(source_id, query)
|
258
|
+
fig = px.bar(df, x=x, y=y, color=color, orientation=orientation)
|
259
|
+
fig.update_xaxes(autotickangles=[0, 45, 60, 90])
|
260
|
+
|
261
|
+
return _fig_to_image(fig)
|
262
|
+
except Exception as e:
|
263
|
+
return str(e)
|
264
|
+
|
265
|
+
|
266
|
+
if __name__=="__main__":
|
267
|
+
print(ImageContent)
|
@@ -0,0 +1,128 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: zaturn
|
3
|
+
Version: 0.2.0
|
4
|
+
Summary: AI Data Analysis MCP & Studio
|
5
|
+
Author-email: Karthik Devan <krtdvn@gmail.com>
|
6
|
+
Maintainer-email: Karthik Devan <krtdvn@gmail.com>
|
7
|
+
Project-URL: Homepage, https://github.com/kdqed/zaturn
|
8
|
+
Project-URL: Issues, https://github.com/kdqed/zaturn/issues
|
9
|
+
Requires-Python: >=3.11
|
10
|
+
Description-Content-Type: text/markdown
|
11
|
+
License-File: LICENSE
|
12
|
+
Requires-Dist: clickhouse-connect>=0.8.17
|
13
|
+
Requires-Dist: cryptography>=44.0.2
|
14
|
+
Requires-Dist: duckdb>=1.2.1
|
15
|
+
Requires-Dist: fastmcp>=0.4.1
|
16
|
+
Requires-Dist: flask[async]>=3.1.1
|
17
|
+
Requires-Dist: function-schema>=0.4.5
|
18
|
+
Requires-Dist: kaleido==0.2.1
|
19
|
+
Requires-Dist: mistune>=3.1.3
|
20
|
+
Requires-Dist: openai>=1.82.1
|
21
|
+
Requires-Dist: openai-agents>=0.0.16
|
22
|
+
Requires-Dist: pandas>=2.2.3
|
23
|
+
Requires-Dist: pillow>=11.2.1
|
24
|
+
Requires-Dist: platformdirs>=4.3.7
|
25
|
+
Requires-Dist: plotly[express]>=6.0.1
|
26
|
+
Requires-Dist: psycopg2-binary>=2.9.10
|
27
|
+
Requires-Dist: pyarrow>=19.0.1
|
28
|
+
Requires-Dist: pymysql>=1.1.1
|
29
|
+
Requires-Dist: python-lsp-server>=1.12.2
|
30
|
+
Requires-Dist: python-slugify>=8.0.4
|
31
|
+
Requires-Dist: seaborn>=0.13.2
|
32
|
+
Requires-Dist: setuptools>=78.1.0
|
33
|
+
Requires-Dist: sqlalchemy>=2.0.40
|
34
|
+
Requires-Dist: tabulate>=0.9.0
|
35
|
+
Requires-Dist: tomli-w>=1.2.0
|
36
|
+
Requires-Dist: werkzeug>=3.1.3
|
37
|
+
Dynamic: license-file
|
38
|
+
|
39
|
+
<h1>
|
40
|
+
<img src="https://github.com/kdqed/zaturn/raw/main/zaturn/studio/static/logo.png" width="24" height="24">
|
41
|
+
<span>Zaturn: Your Co-Pilot For Data Analytics & Business Insights</span>
|
42
|
+
</h1>
|
43
|
+
|
44
|
+
<a href="https://discord.gg/K8mECeVzpQ">
|
45
|
+
<img src="https://zaturn.pro/assets/discord-full.png" height="20" width="133" alt="Discord Logo">
|
46
|
+
</a>
|
47
|
+
|
48
|
+
## Just Chat With Your Data! No SQL, No Python.
|
49
|
+
|
50
|
+
Zaturn provides tools that enable AI models to run SQL, so you don't have to. It can be used as an MCP or as a web interface similar to Jupyter Notebook.
|
51
|
+
|
52
|
+
## Zaturn in Action
|
53
|
+
|
54
|
+
https://github.com/user-attachments/assets/d42dc433-e5ec-4b3e-bef0-5cfc097396ab
|
55
|
+
|
56
|
+
## Features:
|
57
|
+
|
58
|
+
### Multiple Data Sources
|
59
|
+
|
60
|
+
Zaturn can currently connect to the following data sources:
|
61
|
+
- SQL Databases: PostgreSQL, SQLite, DuckDB, MySQL, ClickHouse
|
62
|
+
- Files: CSV, Parquet
|
63
|
+
|
64
|
+
Connectors for more data sources are being added.
|
65
|
+
|
66
|
+
### Visualizations
|
67
|
+
|
68
|
+
In addition to providing tabular and textual summaries, Zaturn can also generate the following image visualizations
|
69
|
+
|
70
|
+
- Scatter and Line Plots
|
71
|
+
- Histograms
|
72
|
+
- Strip and Box Plots
|
73
|
+
- Bar Plots
|
74
|
+
|
75
|
+
More visualization capabilities are being added.
|
76
|
+
|
77
|
+
|
78
|
+
## Installation & Setup
|
79
|
+
|
80
|
+
See [https://zaturn.pro/install](https://zaturn.pro/install)
|
81
|
+
|
82
|
+
|
83
|
+
## Roadmap
|
84
|
+
|
85
|
+
- Support for more data source types
|
86
|
+
- More data visualizations
|
87
|
+
- Predictive analysis and forecasting, e.g.:
|
88
|
+
```
|
89
|
+
Based on the revenue of the last 3 months, forecast next month's revenue.
|
90
|
+
```
|
91
|
+
- Generate Presentations & PDFs
|
92
|
+
```
|
93
|
+
Manager:
|
94
|
+
I need a presentation to show the boss. Can you do it by EOD?
|
95
|
+
Analyst:
|
96
|
+
EOD?! Are you still in the 2010s?
|
97
|
+
I can get it done right now. Actually, you can do it right now.
|
98
|
+
You know what? The boss can do it right now.
|
99
|
+
```
|
100
|
+
- A native notebook interface
|
101
|
+
|
102
|
+
## Help And Feedback
|
103
|
+
|
104
|
+
[Raise an issue](https://github.com/kdqed/zaturn/issues) or [join the Discord](https://discord.gg/K8mECeVzpQ).
|
105
|
+
|
106
|
+
|
107
|
+
## Support The Project
|
108
|
+
|
109
|
+
If you find Zaturn useful, please support this project by:
|
110
|
+
- Starring the Project
|
111
|
+
- Spreading the word
|
112
|
+
|
113
|
+
Your support will enable me to dedicate more of my time to Zaturn.
|
114
|
+
|
115
|
+
## Example Dataset Credits
|
116
|
+
|
117
|
+
The [pokemon dataset compiled by Sarah Taha and PokéAPI](https://www.kaggle.com/datasets/sarahtaha/1025-pokemon) has been included under the [CC BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) license for demonstration purposes.
|
118
|
+
|
119
|
+
## Featured on glama.ai
|
120
|
+
|
121
|
+
<a href="https://glama.ai/mcp/servers/@kdqed/zaturn">
|
122
|
+
<img width="380" height="200" src="https://glama.ai/mcp/servers/@kdqed/zaturn/badge" alt="Zaturn MCP server" />
|
123
|
+
</a>
|
124
|
+
|
125
|
+
|
126
|
+
## Star History
|
127
|
+
|
128
|
+
[](https://www.star-history.com/#kdqed/zaturn&Date)
|
@@ -0,0 +1,39 @@
|
|
1
|
+
zaturn/mcp/__init__.py,sha256=-fyROX8LHX1UXiLYE3kk17pXIILW-DVrFviechA57Pg,3213
|
2
|
+
zaturn/studio/__init__.py,sha256=A1QxWydtsE0VmsJlvF7O9Fx1-WKjV3MBPje7knFSDzw,83
|
3
|
+
zaturn/studio/agent_wrapper.py,sha256=4mEMRfE1Tfv63D7OIabqUbEJdIgO2yo8UXO6PKvNrmI,4603
|
4
|
+
zaturn/studio/app.py,sha256=LEoSjs5wpZGX1B7pcJ2e_6CaxOFjbO6JitxlkJv__30,9208
|
5
|
+
zaturn/studio/storage.py,sha256=4fZ_pwc8goe_tKZcYSxHg0lOy79OFn8TgxTroxBmbLs,2041
|
6
|
+
zaturn/studio/static/fira_code.ttf,sha256=mqZDedYDyifatZw-V4E2gnkwNfVs5GgvudgTKEd_0UQ,264848
|
7
|
+
zaturn/studio/static/inter_ital_var.ttf,sha256=YTb3M3L-3De4D9Go7D4hc0Bz_xc3bzZycY8YZ3lnLno,904532
|
8
|
+
zaturn/studio/static/inter_var.ttf,sha256=C-I5nqkl8fg_-XR2R2HamGDsUHQu0ppdTB_9DFx6w6g,874708
|
9
|
+
zaturn/studio/static/logo.png,sha256=QNyNJSy_KFdPmHoocqkynN8yp73vFqqcMthPmNGLPg8,20916
|
10
|
+
zaturn/studio/static/logo.svg,sha256=UH5m-H2YZy54eJYKd_zM8xeEWntOaYLfrs4MmGw2MUI,588
|
11
|
+
zaturn/studio/static/noto_emoji.ttf,sha256=tX7Yla6dCbp7SxnDQ6dc85qtVxVsNFDjOdnRFVbX7cE,890608
|
12
|
+
zaturn/studio/static/js/htmx-multi-swap.js,sha256=It1Qs3c1QcnlBLN0kwbMAE0U_Ww5oxeUdMJFdIqbRwU,1480
|
13
|
+
zaturn/studio/static/js/htmx.min.js,sha256=acrv0NqSJpBm5yXX_hdeJrnVDJYuMFZFnAxHcVTNudM,50918
|
14
|
+
zaturn/studio/templates/_shell.html,sha256=4WxTSo8OoqEtyoUJoeb0BlP3j4ZSpVxunPNndVa7yYc,1061
|
15
|
+
zaturn/studio/templates/ai_message.html,sha256=-lFJmjxs5QHeIQLfx1-he4zamMO5U7enco8DPGoOItE,114
|
16
|
+
zaturn/studio/templates/c_settings_updated.html,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
17
|
+
zaturn/studio/templates/c_source_card.html,sha256=TOFssoWHc9pJEWW-GpUGWu4_wZMfJFoFWNtIj3x_LtQ,727
|
18
|
+
zaturn/studio/templates/chat.html,sha256=29I8R8bti_qMzU9FjBOGiLVEh-g1Es1VbaN6Dj_OMBA,660
|
19
|
+
zaturn/studio/templates/function_call.html,sha256=5NBzHE9KYlOgmNqv_Nz2PwsNVVhpSVM9NebhTMpAttg,316
|
20
|
+
zaturn/studio/templates/loader.html,sha256=jkrpmPA3_P0aL0ZrE_wkoGlnK12Go-OI2xtUyCDRikQ,64
|
21
|
+
zaturn/studio/templates/manage_sources.html,sha256=mLyNAYNu-u0mjTH8ybYO8HCYhHP6NVao20jNyqevu9g,1350
|
22
|
+
zaturn/studio/templates/nav.html,sha256=6wB7zO-DEsJn_-ZUujq8iIoYi5fFeYUJoMPnix320Q8,104
|
23
|
+
zaturn/studio/templates/new_conversation.html,sha256=p0e8D7b_riARS6xmqGk-g5tITd_ikgggVWWWlZ8MN5E,403
|
24
|
+
zaturn/studio/templates/settings.html,sha256=ZmukMbTsGGsVauEg9EtyyiR2_fOYvO5fAEAiJL0CJnU,760
|
25
|
+
zaturn/studio/templates/setup_prompt.html,sha256=K8l3ewlEyrUFiMmw2qPyDFbFNkq1RCBksrw9eL2tkIs,249
|
26
|
+
zaturn/studio/templates/user_message.html,sha256=WbvdVr4AVzVx6MG0eQ_ulMeHB9rpScZc-EZlCoYFEgw,90
|
27
|
+
zaturn/studio/templates/css/style.css,sha256=cKImLAjLp6XjHdT16v9yJS0DHG2wWx8djR18SHfMqvE,6264
|
28
|
+
zaturn/tools/__init__.py,sha256=vOcHneRuZll9k-2e3sra7m8qfdc4IoAFYc2g_xK79HE,262
|
29
|
+
zaturn/tools/config.py,sha256=8YCwFp1emhEfbbJd_KTQ_Nrl4yt9tJoDZHgMXatJGjk,3382
|
30
|
+
zaturn/tools/core.py,sha256=4fHxSIbO22cTIejJk02GzUBfWimh1Ql9mZ3JgI1qLBY,2893
|
31
|
+
zaturn/tools/query_utils.py,sha256=x769ZrpbnpxHwdQf6PbAUx6-qMGWuqfMOTA3kVjRUcE,4808
|
32
|
+
zaturn/tools/visualizations.py,sha256=aPXCN1YIjCz6uiwJjduKCiLLtnBFX92hTvjy5EqQYk8,9509
|
33
|
+
zaturn/tools/example_data/all_pokemon_data.csv,sha256=SUlGHHWbehuLg-ch1YUrQ6-xBtqHGw6rIkyn70fAgCk,130893
|
34
|
+
zaturn-0.2.0.dist-info/licenses/LICENSE,sha256=mZSuFlbEBZGl0-8ULRMLdRDbhau5hrWRNQOjytYeaug,1070
|
35
|
+
zaturn-0.2.0.dist-info/METADATA,sha256=9_odOtbZ5NNo5nu5hDOXIEWG1TlIlxr39DdDVakp_pg,3957
|
36
|
+
zaturn-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
37
|
+
zaturn-0.2.0.dist-info/entry_points.txt,sha256=MWMWX0dE_ZQM4StGKCLxJym_D91F82RcBi2LBj0hEho,82
|
38
|
+
zaturn-0.2.0.dist-info/top_level.txt,sha256=KLUnwQwVZkfd5YCnnqR35MOOs8KLhanPGelvmRo2MVA,7
|
39
|
+
zaturn-0.2.0.dist-info/RECORD,,
|
zaturn/__init__.py
DELETED
@@ -1,14 +0,0 @@
|
|
1
|
-
from fastmcp import FastMCP
|
2
|
-
from zaturn import core, visualizations
|
3
|
-
|
4
|
-
# Mount modules and make MCP
|
5
|
-
mcp = FastMCP("Zaturn MCP")
|
6
|
-
mcp.mount("core", core.mcp)
|
7
|
-
mcp.mount("visualizations", visualizations.mcp)
|
8
|
-
|
9
|
-
def main():
|
10
|
-
mcp.run()
|
11
|
-
|
12
|
-
|
13
|
-
if __name__=="__main__":
|
14
|
-
main()
|
zaturn/core.py
DELETED
@@ -1,140 +0,0 @@
|
|
1
|
-
from fastmcp import FastMCP
|
2
|
-
import os
|
3
|
-
from typing import Any, List, Union
|
4
|
-
from zaturn import config, query_utils
|
5
|
-
|
6
|
-
mcp = FastMCP("Zaturn Core")
|
7
|
-
|
8
|
-
|
9
|
-
@mcp.tool()
|
10
|
-
def list_sources() -> str:
|
11
|
-
"""
|
12
|
-
List all available data sources.
|
13
|
-
Returns a list of unique source_ids to be used for other queries.
|
14
|
-
Source type is included in the source_id string.
|
15
|
-
While drafting SQL queries use appropriate syntax as per source type.
|
16
|
-
"""
|
17
|
-
try:
|
18
|
-
if not config.SOURCES:
|
19
|
-
return "No data sources available. Add sources using the command line parameters."
|
20
|
-
|
21
|
-
result = "Available data sources:\n\n"
|
22
|
-
for source in config.SOURCES:
|
23
|
-
tables = _list_tables(source)
|
24
|
-
if type(tables) is List:
|
25
|
-
tables = ', '.join(tables)
|
26
|
-
result += f"- {source}\nHas tables: {tables}\n"
|
27
|
-
|
28
|
-
return result
|
29
|
-
except Exception as e:
|
30
|
-
return str(e)
|
31
|
-
|
32
|
-
|
33
|
-
def _list_tables(source_id: str):
|
34
|
-
"""
|
35
|
-
Lists names of all tables/datasets in a given data source.
|
36
|
-
Use run_query with appropriate SQL query to determine table structure
|
37
|
-
|
38
|
-
Args:
|
39
|
-
source_id: The data source to list tables from
|
40
|
-
"""
|
41
|
-
try:
|
42
|
-
source = config.SOURCES.get(source_id)
|
43
|
-
if not source:
|
44
|
-
return f"Source {source_id} Not Found"
|
45
|
-
|
46
|
-
match source['type']:
|
47
|
-
case "sqlite":
|
48
|
-
result = query_utils.execute_query(source,
|
49
|
-
"SELECT name FROM sqlite_schema WHERE type ='table' AND name NOT LIKE 'sqlite_%';"
|
50
|
-
)
|
51
|
-
return result['name'].to_list()
|
52
|
-
|
53
|
-
case "postgresql":
|
54
|
-
result = query_utils.execute_query(source,
|
55
|
-
"SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';"
|
56
|
-
)
|
57
|
-
return result['tablename'].to_list()
|
58
|
-
|
59
|
-
case "mysql":
|
60
|
-
result = query_utils.execute_query(source, "SHOW TABLES")
|
61
|
-
for col in list(result):
|
62
|
-
if col.startswith("Tables_in_"):
|
63
|
-
return result[col].to_list()
|
64
|
-
|
65
|
-
case "duckdb" | "csv" | "parquet" | "clickhouse":
|
66
|
-
result = query_utils.execute_query(source, "SHOW TABLES")
|
67
|
-
return result['name'].to_list()
|
68
|
-
|
69
|
-
except Exception as e:
|
70
|
-
return str(e)
|
71
|
-
|
72
|
-
@mcp.tool()
|
73
|
-
def describe_table(source_id: str, table_name: str) -> str:
|
74
|
-
"""
|
75
|
-
Lists columns and their types in the specified table of specified data source.
|
76
|
-
|
77
|
-
Args:
|
78
|
-
source_id: The data source
|
79
|
-
table_name: The table in the data source
|
80
|
-
"""
|
81
|
-
try:
|
82
|
-
source = config.SOURCES.get(source_id)
|
83
|
-
if not source:
|
84
|
-
return f"Source {source_id} Not Found"
|
85
|
-
|
86
|
-
match source['type']:
|
87
|
-
case 'sqlite':
|
88
|
-
result = query_utils.execute_query(source,
|
89
|
-
f'PRAGMA table_info({table_name});'
|
90
|
-
)
|
91
|
-
return result.to_markdown(index=False)
|
92
|
-
|
93
|
-
case 'postgresql':
|
94
|
-
result = query_utils.execute_query(source,
|
95
|
-
f"SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{table_name}';"
|
96
|
-
)
|
97
|
-
return result.to_markdown(index=False)
|
98
|
-
|
99
|
-
case "mysql" | "duckdb" | "csv" | "parquet" | "clickhouse":
|
100
|
-
result = query_utils.execute_query(source,
|
101
|
-
f"DESCRIBE {table_name};"
|
102
|
-
)
|
103
|
-
return result.to_markdown(index=False)
|
104
|
-
|
105
|
-
except Exception as e:
|
106
|
-
return str(e)
|
107
|
-
|
108
|
-
|
109
|
-
@mcp.tool()
|
110
|
-
def run_query(source_id: str, query: str) -> str:
|
111
|
-
"""
|
112
|
-
Run query against specified source
|
113
|
-
For both csv and parquet sources, use DuckDB SQL syntax
|
114
|
-
Use 'CSV' as the table name for csv sources.
|
115
|
-
Use 'PARQUET' as the table name for parquet sources.
|
116
|
-
|
117
|
-
This will return a query_id, which can be referenced while calling other Zaturn tools.
|
118
|
-
Args:
|
119
|
-
source_id: The data source to run the query on
|
120
|
-
query: SQL query to run on the data source
|
121
|
-
"""
|
122
|
-
try:
|
123
|
-
source = config.SOURCES.get(source_id)
|
124
|
-
if not source:
|
125
|
-
return f"Source {source_id} Not Found"
|
126
|
-
|
127
|
-
df = query_utils.execute_query(source, query)
|
128
|
-
query_id = query_utils.save_query(df)
|
129
|
-
return query_id
|
130
|
-
except Exception as e:
|
131
|
-
return str(e)
|
132
|
-
|
133
|
-
|
134
|
-
@mcp.tool()
|
135
|
-
def show_query_result(query_id) -> str:
|
136
|
-
"""
|
137
|
-
Show stored result for query_id in markdown table format
|
138
|
-
"""
|
139
|
-
df = query_utils.load_query(query_id)
|
140
|
-
return df.to_markdown(index=False)
|