squirrels 0.4.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of squirrels might be problematic. Click here for more details.

Files changed (125) hide show
  1. dateutils/__init__.py +6 -0
  2. dateutils/_enums.py +25 -0
  3. squirrels/dateutils.py → dateutils/_implementation.py +58 -111
  4. dateutils/types.py +6 -0
  5. squirrels/__init__.py +13 -11
  6. squirrels/_api_routes/__init__.py +5 -0
  7. squirrels/_api_routes/auth.py +271 -0
  8. squirrels/_api_routes/base.py +165 -0
  9. squirrels/_api_routes/dashboards.py +150 -0
  10. squirrels/_api_routes/data_management.py +145 -0
  11. squirrels/_api_routes/datasets.py +257 -0
  12. squirrels/_api_routes/oauth2.py +298 -0
  13. squirrels/_api_routes/project.py +252 -0
  14. squirrels/_api_server.py +256 -450
  15. squirrels/_arguments/__init__.py +0 -0
  16. squirrels/_arguments/init_time_args.py +108 -0
  17. squirrels/_arguments/run_time_args.py +147 -0
  18. squirrels/_auth.py +960 -0
  19. squirrels/_command_line.py +126 -45
  20. squirrels/_compile_prompts.py +147 -0
  21. squirrels/_connection_set.py +48 -26
  22. squirrels/_constants.py +68 -38
  23. squirrels/_dashboards.py +160 -0
  24. squirrels/_data_sources.py +570 -0
  25. squirrels/_dataset_types.py +84 -0
  26. squirrels/_exceptions.py +29 -0
  27. squirrels/_initializer.py +177 -80
  28. squirrels/_logging.py +115 -0
  29. squirrels/_manifest.py +208 -79
  30. squirrels/_model_builder.py +69 -0
  31. squirrels/_model_configs.py +74 -0
  32. squirrels/_model_queries.py +52 -0
  33. squirrels/_models.py +926 -367
  34. squirrels/_package_data/base_project/.env +42 -0
  35. squirrels/_package_data/base_project/.env.example +42 -0
  36. squirrels/_package_data/base_project/assets/expenses.db +0 -0
  37. squirrels/_package_data/base_project/connections.yml +16 -0
  38. squirrels/_package_data/base_project/dashboards/dashboard_example.py +34 -0
  39. squirrels/_package_data/base_project/dashboards/dashboard_example.yml +22 -0
  40. squirrels/{package_data → _package_data}/base_project/docker/.dockerignore +5 -2
  41. squirrels/{package_data → _package_data}/base_project/docker/Dockerfile +3 -3
  42. squirrels/{package_data → _package_data}/base_project/docker/compose.yml +1 -1
  43. squirrels/_package_data/base_project/duckdb_init.sql +10 -0
  44. squirrels/{package_data/base_project/.gitignore → _package_data/base_project/gitignore} +3 -2
  45. squirrels/_package_data/base_project/macros/macros_example.sql +17 -0
  46. squirrels/_package_data/base_project/models/builds/build_example.py +26 -0
  47. squirrels/_package_data/base_project/models/builds/build_example.sql +16 -0
  48. squirrels/_package_data/base_project/models/builds/build_example.yml +57 -0
  49. squirrels/_package_data/base_project/models/dbviews/dbview_example.sql +12 -0
  50. squirrels/_package_data/base_project/models/dbviews/dbview_example.yml +26 -0
  51. squirrels/_package_data/base_project/models/federates/federate_example.py +37 -0
  52. squirrels/_package_data/base_project/models/federates/federate_example.sql +19 -0
  53. squirrels/_package_data/base_project/models/federates/federate_example.yml +65 -0
  54. squirrels/_package_data/base_project/models/sources.yml +38 -0
  55. squirrels/{package_data → _package_data}/base_project/parameters.yml +56 -40
  56. squirrels/_package_data/base_project/pyconfigs/connections.py +14 -0
  57. squirrels/{package_data → _package_data}/base_project/pyconfigs/context.py +21 -40
  58. squirrels/_package_data/base_project/pyconfigs/parameters.py +141 -0
  59. squirrels/_package_data/base_project/pyconfigs/user.py +44 -0
  60. squirrels/_package_data/base_project/seeds/seed_categories.yml +15 -0
  61. squirrels/_package_data/base_project/seeds/seed_subcategories.csv +15 -0
  62. squirrels/_package_data/base_project/seeds/seed_subcategories.yml +21 -0
  63. squirrels/_package_data/base_project/squirrels.yml.j2 +61 -0
  64. squirrels/_package_data/templates/dataset_results.html +112 -0
  65. squirrels/_package_data/templates/oauth_login.html +271 -0
  66. squirrels/_package_data/templates/squirrels_studio.html +20 -0
  67. squirrels/_package_loader.py +8 -4
  68. squirrels/_parameter_configs.py +104 -103
  69. squirrels/_parameter_options.py +348 -0
  70. squirrels/_parameter_sets.py +57 -47
  71. squirrels/_parameters.py +1664 -0
  72. squirrels/_project.py +721 -0
  73. squirrels/_py_module.py +7 -5
  74. squirrels/_schemas/__init__.py +0 -0
  75. squirrels/_schemas/auth_models.py +167 -0
  76. squirrels/_schemas/query_param_models.py +75 -0
  77. squirrels/{_api_response_models.py → _schemas/response_models.py} +126 -47
  78. squirrels/_seeds.py +35 -16
  79. squirrels/_sources.py +110 -0
  80. squirrels/_utils.py +248 -73
  81. squirrels/_version.py +1 -1
  82. squirrels/arguments.py +7 -0
  83. squirrels/auth.py +4 -0
  84. squirrels/connections.py +3 -0
  85. squirrels/dashboards.py +2 -81
  86. squirrels/data_sources.py +14 -631
  87. squirrels/parameter_options.py +13 -348
  88. squirrels/parameters.py +14 -1266
  89. squirrels/types.py +16 -0
  90. squirrels-0.5.0.dist-info/METADATA +113 -0
  91. squirrels-0.5.0.dist-info/RECORD +97 -0
  92. {squirrels-0.4.0.dist-info → squirrels-0.5.0.dist-info}/WHEEL +1 -1
  93. squirrels-0.5.0.dist-info/entry_points.txt +3 -0
  94. {squirrels-0.4.0.dist-info → squirrels-0.5.0.dist-info/licenses}/LICENSE +1 -1
  95. squirrels/_authenticator.py +0 -85
  96. squirrels/_dashboards_io.py +0 -61
  97. squirrels/_environcfg.py +0 -84
  98. squirrels/arguments/init_time_args.py +0 -40
  99. squirrels/arguments/run_time_args.py +0 -208
  100. squirrels/package_data/assets/favicon.ico +0 -0
  101. squirrels/package_data/assets/index.css +0 -1
  102. squirrels/package_data/assets/index.js +0 -58
  103. squirrels/package_data/base_project/assets/expenses.db +0 -0
  104. squirrels/package_data/base_project/connections.yml +0 -7
  105. squirrels/package_data/base_project/dashboards/dashboard_example.py +0 -32
  106. squirrels/package_data/base_project/dashboards.yml +0 -10
  107. squirrels/package_data/base_project/env.yml +0 -29
  108. squirrels/package_data/base_project/models/dbviews/dbview_example.py +0 -47
  109. squirrels/package_data/base_project/models/dbviews/dbview_example.sql +0 -22
  110. squirrels/package_data/base_project/models/federates/federate_example.py +0 -21
  111. squirrels/package_data/base_project/models/federates/federate_example.sql +0 -3
  112. squirrels/package_data/base_project/pyconfigs/auth.py +0 -45
  113. squirrels/package_data/base_project/pyconfigs/connections.py +0 -19
  114. squirrels/package_data/base_project/pyconfigs/parameters.py +0 -95
  115. squirrels/package_data/base_project/seeds/seed_subcategories.csv +0 -15
  116. squirrels/package_data/base_project/squirrels.yml.j2 +0 -94
  117. squirrels/package_data/templates/index.html +0 -18
  118. squirrels/project.py +0 -378
  119. squirrels/user_base.py +0 -55
  120. squirrels-0.4.0.dist-info/METADATA +0 -117
  121. squirrels-0.4.0.dist-info/RECORD +0 -60
  122. squirrels-0.4.0.dist-info/entry_points.txt +0 -4
  123. /squirrels/{package_data → _package_data}/base_project/assets/weather.db +0 -0
  124. /squirrels/{package_data → _package_data}/base_project/seeds/seed_categories.csv +0 -0
  125. /squirrels/{package_data → _package_data}/base_project/tmp/.gitignore +0 -0
squirrels/_project.py ADDED
@@ -0,0 +1,721 @@
1
+ from dotenv import dotenv_values, load_dotenv
2
+ from pathlib import Path
3
+ import asyncio, typing as t, functools as ft, shutil, json, os
4
+ import sqlglot, sqlglot.expressions, duckdb, polars as pl
5
+
6
+ from ._auth import Authenticator, AuthProviderArgs, ProviderFunctionType
7
+ from ._schemas.auth_models import CustomUserFields, AbstractUser, GuestUser, RegisteredUser
8
+ from ._schemas import response_models as rm
9
+ from ._model_builder import ModelBuilder
10
+ from ._exceptions import InvalidInputError, ConfigurationError
11
+ from ._py_module import PyModule
12
+ from . import _dashboards as d, _utils as u, _constants as c, _manifest as mf, _connection_set as cs
13
+ from . import _seeds as s, _models as m, _model_configs as mc, _model_queries as mq, _sources as so
14
+ from . import _parameter_sets as ps, _dataset_types as dr, _logging as l
15
+
16
+ T = t.TypeVar("T", bound=d.Dashboard)
17
+ M = t.TypeVar("M", bound=m.DataModel)
18
+
19
+
20
+ class SquirrelsProject:
21
+ """
22
+ Initiate an instance of this class to interact with a Squirrels project through Python code. For example this can be handy to experiment with the datasets produced by Squirrels in a Jupyter notebook.
23
+ """
24
+
25
+ def __init__(
26
+ self, *, filepath: str = ".", load_dotenv_globally: bool = False,
27
+ log_to_file: bool = False, log_level: str | None = None, log_format: str | None = None,
28
+ ) -> None:
29
+ """
30
+ Constructor for SquirrelsProject class. Loads the file contents of the Squirrels project into memory as member fields.
31
+
32
+ Arguments:
33
+ filepath: The path to the Squirrels project file. Defaults to the current working directory.
34
+ log_level: The logging level to use. Options are "DEBUG", "INFO", and "WARNING". Default is from SQRL_LOGGING__LOG_LEVEL environment variable or "INFO".
35
+ log_to_file: Whether to enable logging to file(s) in the "logs/" folder with rotation and retention policies. Default is False.
36
+ log_format: The format of the log records. Options are "text" and "json". Default is from SQRL_LOGGING__LOG_FORMAT environment variable or "text".
37
+ """
38
+ self._filepath = filepath
39
+ self._load_dotenv_globally = load_dotenv_globally
40
+ self._logger = self._get_logger(filepath, log_to_file, log_level, log_format)
41
+ self._ensure_virtual_datalake_exists(filepath)
42
+
43
+ def _get_logger(self, filepath: str, log_to_file: bool, log_level: str | None, log_format: str | None) -> u.Logger:
44
+ env_vars = self._env_vars
45
+ # CLI arguments take precedence over environment variables
46
+ log_level = log_level if log_level is not None else env_vars.get(c.SQRL_LOGGING_LOG_LEVEL, "INFO")
47
+ log_format = log_format if log_format is not None else env_vars.get(c.SQRL_LOGGING_LOG_FORMAT, "text")
48
+ log_to_file = log_to_file or u.to_bool(env_vars.get(c.SQRL_LOGGING_LOG_TO_FILE, "false"))
49
+ log_file_size_mb = int(env_vars.get(c.SQRL_LOGGING_LOG_FILE_SIZE_MB, 50))
50
+ log_file_backup_count = int(env_vars.get(c.SQRL_LOGGING_LOG_FILE_BACKUP_COUNT, 1))
51
+ return l.get_logger(filepath, log_to_file, log_level, log_format, log_file_size_mb, log_file_backup_count)
52
+
53
+ def _ensure_virtual_datalake_exists(self, project_path: str) -> None:
54
+ target_path = u.Path(project_path, c.TARGET_FOLDER)
55
+ target_path.mkdir(parents=True, exist_ok=True)
56
+
57
+ # Attempt to set up the virtual data lake with DATA_PATH if possible
58
+ try:
59
+ is_ducklake = self._datalake_db_path.startswith("ducklake:")
60
+
61
+ data_path = self._env_vars.get(c.SQRL_VDL_DATA_PATH, c.DEFAULT_VDL_DATA_PATH)
62
+ data_path = data_path.format(project_path=project_path)
63
+
64
+ options = f"(DATA_PATH '{data_path}')" if is_ducklake else ""
65
+ attach_stmt = f"ATTACH '{self._datalake_db_path}' AS vdl {options}"
66
+ with duckdb.connect() as conn:
67
+ conn.execute(attach_stmt)
68
+ # TODO: support incremental loads for build models and avoid cleaning up old files all the time
69
+ conn.execute("CALL ducklake_expire_snapshots('vdl', older_than => now())")
70
+ conn.execute("CALL ducklake_cleanup_old_files('vdl', cleanup_all => true)")
71
+
72
+ except Exception as e:
73
+ if "DATA_PATH parameter" in str(e):
74
+ first_line = str(e).split("\n")[0]
75
+ note = "NOTE: Squirrels does not allow changing the data path for an existing Virtual Data Lake (VDL)"
76
+ raise u.ConfigurationError(f"{first_line}\n\n{note}")
77
+
78
+ if is_ducklake and not any(x in self._datalake_db_path for x in [":sqlite:", ":postgres:", ":mysql:"]):
79
+ extended_error = "\n Note: if you're using DuckDB for the metadata database, only one process can connect to the VDL at a time."
80
+ else:
81
+ extended_error = ""
82
+
83
+ raise u.ConfigurationError(f"Failed to attach Virtual Data Lake (VDL).{extended_error}") from e
84
+
85
+ @ft.cached_property
86
+ def _env_vars(self) -> dict[str, str]:
87
+ dotenv_files = [c.DOTENV_FILE, c.DOTENV_LOCAL_FILE]
88
+ dotenv_vars = {}
89
+ for file in dotenv_files:
90
+ full_path = u.Path(self._filepath, file)
91
+ if self._load_dotenv_globally:
92
+ load_dotenv(full_path)
93
+ dotenv_vars.update({k: v for k, v in dotenv_values(full_path).items() if v is not None})
94
+ return {**os.environ, **dotenv_vars}
95
+
96
+ @ft.cached_property
97
+ def _elevated_access_level(self) -> u.ACCESS_LEVEL:
98
+ elevated_access_level = self._env_vars.get(c.SQRL_PERMISSIONS_ELEVATED_ACCESS_LEVEL, "admin").lower()
99
+
100
+ if elevated_access_level not in ["admin", "member", "guest"]:
101
+ raise u.ConfigurationError(f"{c.SQRL_PERMISSIONS_ELEVATED_ACCESS_LEVEL} has been set to an invalid access level: {elevated_access_level}")
102
+
103
+ return elevated_access_level
104
+
105
+ @ft.cached_property
106
+ def _datalake_db_path(self) -> str:
107
+ datalake_db_path = self._env_vars.get(c.SQRL_VDL_CATALOG_DB_PATH, c.DEFAULT_VDL_CATALOG_DB_PATH)
108
+ datalake_db_path = datalake_db_path.format(project_path=self._filepath)
109
+ return datalake_db_path
110
+
111
+ @ft.cached_property
112
+ def _manifest_cfg(self) -> mf.ManifestConfig:
113
+ return mf.ManifestIO.load_from_file(self._logger, self._filepath, self._env_vars)
114
+
115
+ @ft.cached_property
116
+ def _seeds(self) -> s.Seeds:
117
+ return s.SeedsIO.load_files(self._logger, self._filepath, self._env_vars)
118
+
119
+ @ft.cached_property
120
+ def _sources(self) -> so.Sources:
121
+ return so.SourcesIO.load_file(self._logger, self._filepath, self._env_vars)
122
+
123
+ @ft.cached_property
124
+ def _build_model_files(self) -> dict[str, mq.QueryFileWithConfig]:
125
+ return m.ModelsIO.load_build_files(self._logger, self._filepath)
126
+
127
+ @ft.cached_property
128
+ def _dbview_model_files(self) -> dict[str, mq.QueryFileWithConfig]:
129
+ return m.ModelsIO.load_dbview_files(self._logger, self._filepath, self._env_vars)
130
+
131
+ @ft.cached_property
132
+ def _federate_model_files(self) -> dict[str, mq.QueryFileWithConfig]:
133
+ return m.ModelsIO.load_federate_files(self._logger, self._filepath)
134
+
135
+ @ft.cached_property
136
+ def _context_func(self) -> m.ContextFunc:
137
+ return m.ModelsIO.load_context_func(self._logger, self._filepath)
138
+
139
+ @ft.cached_property
140
+ def _dashboards(self) -> dict[str, d.DashboardDefinition]:
141
+ return d.DashboardsIO.load_files(self._logger, self._filepath)
142
+
143
+ @ft.cached_property
144
+ def _conn_args(self) -> cs.ConnectionsArgs:
145
+ return cs.ConnectionSetIO.load_conn_py_args(self._logger, self._filepath, self._env_vars, self._manifest_cfg)
146
+
147
+ @ft.cached_property
148
+ def _conn_set(self) -> cs.ConnectionSet:
149
+ return cs.ConnectionSetIO.load_from_file(self._logger, self._filepath, self._manifest_cfg, self._conn_args)
150
+
151
+ @ft.cached_property
152
+ def _custom_user_fields_cls_and_provider_functions(self) -> tuple[type[CustomUserFields], list[ProviderFunctionType]]:
153
+ user_module_path = u.Path(self._filepath, c.PYCONFIGS_FOLDER, c.USER_FILE)
154
+ user_module = PyModule(user_module_path)
155
+
156
+ # Load CustomUserFields class (adds to Authenticator.providers as side effect)
157
+ CustomUserFieldsCls = user_module.get_func_or_class("CustomUserFields", default_attr=CustomUserFields)
158
+ provider_functions = Authenticator.providers
159
+ Authenticator.providers = []
160
+
161
+ if not issubclass(CustomUserFieldsCls, CustomUserFields):
162
+ raise ConfigurationError(f"CustomUserFields class in '{c.USER_FILE}' must inherit from CustomUserFields")
163
+
164
+ return CustomUserFieldsCls, provider_functions
165
+
166
+ @ft.cached_property
167
+ def _auth_args(self) -> AuthProviderArgs:
168
+ conn_args = self._conn_args
169
+ return AuthProviderArgs(conn_args.project_path, conn_args.proj_vars, conn_args.env_vars)
170
+
171
+ @ft.cached_property
172
+ def _auth(self) -> Authenticator:
173
+ CustomUserFieldsCls, provider_functions = self._custom_user_fields_cls_and_provider_functions
174
+ external_only = (self._manifest_cfg.authentication.type == mf.AuthenticationType.EXTERNAL)
175
+ return Authenticator(self._logger, self._filepath, self._auth_args, provider_functions, custom_user_fields_cls=CustomUserFieldsCls, external_only=external_only)
176
+
177
+ @ft.cached_property
178
+ def _guest_user(self) -> AbstractUser:
179
+ custom_fields = self._auth.CustomUserFields()
180
+ return GuestUser(username="", custom_fields=custom_fields)
181
+
182
+ @ft.cached_property
183
+ def _admin_user(self) -> AbstractUser:
184
+ custom_fields = self._auth.CustomUserFields()
185
+ return RegisteredUser(username="", access_level="admin", custom_fields=custom_fields)
186
+
187
+ @ft.cached_property
188
+ def _param_args(self) -> ps.ParametersArgs:
189
+ conn_args = self._conn_args
190
+ return ps.ParametersArgs(conn_args.project_path, conn_args.proj_vars, conn_args.env_vars)
191
+
192
+ @ft.cached_property
193
+ def _param_cfg_set(self) -> ps.ParameterConfigsSet:
194
+ return ps.ParameterConfigsSetIO.load_from_file(
195
+ self._logger, self._filepath, self._manifest_cfg, self._seeds, self._conn_set, self._param_args, self._datalake_db_path
196
+ )
197
+
198
+ @ft.cached_property
199
+ def _j2_env(self) -> u.EnvironmentWithMacros:
200
+ env = u.EnvironmentWithMacros(self._logger, loader=u.j2.FileSystemLoader(self._filepath))
201
+
202
+ def value_to_str(value: t.Any, attribute: str | None = None) -> str:
203
+ if attribute is None:
204
+ return str(value)
205
+ else:
206
+ return str(getattr(value, attribute))
207
+
208
+ def join(value: list[t.Any], d: str = ", ", attribute: str | None = None) -> str:
209
+ return d.join(map(lambda x: value_to_str(x, attribute), value))
210
+
211
+ def quote(value: t.Any, q: str = "'", attribute: str | None = None) -> str:
212
+ return q + value_to_str(value, attribute) + q
213
+
214
+ def quote_and_join(value: list[t.Any], q: str = "'", d: str = ", ", attribute: str | None = None) -> str:
215
+ return d.join(map(lambda x: quote(x, q, attribute), value))
216
+
217
+ env.filters["join"] = join
218
+ env.filters["quote"] = quote
219
+ env.filters["quote_and_join"] = quote_and_join
220
+ return env
221
+
222
+ def close(self) -> None:
223
+ """
224
+ Deliberately close any open resources within the Squirrels project, such as database connections (instead of relying on the garbage collector).
225
+ """
226
+ self._conn_set.dispose()
227
+ self._auth.close()
228
+
229
+ def __exit__(self, exc_type, exc_val, traceback):
230
+ self.close()
231
+
232
+
233
+ def _add_model(self, models_dict: dict[str, M], model: M) -> None:
234
+ if model.name in models_dict:
235
+ raise ConfigurationError(f"Names across all models must be unique. Model '{model.name}' is duplicated")
236
+ models_dict[model.name] = model
237
+
238
+
239
+ def _get_static_models(self) -> dict[str, m.StaticModel]:
240
+ models_dict: dict[str, m.StaticModel] = {}
241
+
242
+ seeds_dict = self._seeds.get_dataframes()
243
+ for key, seed in seeds_dict.items():
244
+ self._add_model(models_dict, m.Seed(key, seed.config, seed.df, logger=self._logger, env_vars=self._env_vars, conn_set=self._conn_set))
245
+
246
+ for source_name, source_config in self._sources.sources.items():
247
+ self._add_model(models_dict, m.SourceModel(source_name, source_config, logger=self._logger, env_vars=self._env_vars, conn_set=self._conn_set))
248
+
249
+ for name, val in self._build_model_files.items():
250
+ model = m.BuildModel(name, val.config, val.query_file, logger=self._logger, env_vars=self._env_vars, conn_set=self._conn_set, j2_env=self._j2_env)
251
+ self._add_model(models_dict, model)
252
+
253
+ return models_dict
254
+
255
+
256
+ async def build(self, *, full_refresh: bool = False, select: str | None = None) -> None:
257
+ """
258
+ Build the Virtual Data Lake (VDL) for the Squirrels project
259
+
260
+ Arguments:
261
+ full_refresh: Whether to drop all tables and rebuild the VDL from scratch. Default is False.
262
+ select: The name of a specific model to build. If None, all models are built. Default is None.
263
+ """
264
+ models_dict: dict[str, m.StaticModel] = self._get_static_models()
265
+ builder = ModelBuilder(self._datalake_db_path, self._conn_set, models_dict, self._conn_args, self._logger)
266
+ await builder.build(full_refresh, select)
267
+
268
+ def _get_models_dict(self, always_python_df: bool) -> dict[str, m.DataModel]:
269
+ models_dict: dict[str, m.DataModel] = self._get_static_models()
270
+
271
+ for name, val in self._dbview_model_files.items():
272
+ self._add_model(models_dict, m.DbviewModel(
273
+ name, val.config, val.query_file, logger=self._logger, env_vars=self._env_vars, conn_set=self._conn_set, j2_env=self._j2_env
274
+ ))
275
+ models_dict[name].needs_python_df = always_python_df
276
+
277
+ for name, val in self._federate_model_files.items():
278
+ self._add_model(models_dict, m.FederateModel(
279
+ name, val.config, val.query_file, logger=self._logger, env_vars=self._env_vars, conn_set=self._conn_set, j2_env=self._j2_env
280
+ ))
281
+ models_dict[name].needs_python_df = always_python_df
282
+
283
+ return models_dict
284
+
285
+ def _generate_dag(self, dataset: str) -> m.DAG:
286
+ models_dict = self._get_models_dict(always_python_df=False)
287
+
288
+ dataset_config = self._manifest_cfg.datasets[dataset]
289
+ target_model = models_dict[dataset_config.model]
290
+ target_model.is_target = True
291
+ dag = m.DAG(dataset_config, target_model, models_dict, self._datalake_db_path, self._logger)
292
+
293
+ return dag
294
+
295
+ def _generate_dag_with_fake_target(self, sql_query: str | None, *, always_python_df: bool = False) -> m.DAG:
296
+ models_dict = self._get_models_dict(always_python_df=always_python_df)
297
+
298
+ if sql_query is None:
299
+ dependencies = set(models_dict.keys())
300
+ else:
301
+ dependencies, parsed = u.parse_dependent_tables(sql_query, models_dict.keys())
302
+
303
+ substitutions = {}
304
+ for model_name in dependencies:
305
+ model = models_dict[model_name]
306
+ if isinstance(model, m.SourceModel) and not model.is_queryable:
307
+ raise InvalidInputError(400, "cannot_query_source_model", f"Source model '{model_name}' cannot be queried with DuckDB")
308
+ if isinstance(model, m.BuildModel):
309
+ substitutions[model_name] = f"vdl.{model_name}"
310
+ elif isinstance(model, m.SourceModel):
311
+ if model.model_config.load_to_vdl:
312
+ substitutions[model_name] = f"vdl.{model_name}"
313
+ else:
314
+ # DuckDB connection without load_to_vdl - reference via attached database
315
+ conn_name = model.model_config.get_connection()
316
+ table_name = model.model_config.get_table()
317
+ substitutions[model_name] = f"db_{conn_name}.{table_name}"
318
+
319
+ sql_query = parsed.transform(
320
+ lambda node: sqlglot.expressions.Table(this=substitutions[node.name], alias=node.alias)
321
+ if isinstance(node, sqlglot.expressions.Table) and node.name in substitutions
322
+ else node
323
+ ).sql()
324
+
325
+ model_config = mc.FederateModelConfig(depends_on=dependencies)
326
+ query_file = mq.SqlQueryFile("", sql_query or "SELECT 1")
327
+ fake_target_model = m.FederateModel(
328
+ "__fake_target", model_config, query_file, logger=self._logger, env_vars=self._env_vars, conn_set=self._conn_set, j2_env=self._j2_env
329
+ )
330
+ fake_target_model.is_target = True
331
+ dag = m.DAG(None, fake_target_model, models_dict, self._datalake_db_path, self._logger)
332
+ return dag
333
+
334
+ async def _get_compiled_dag(
335
+ self, user: AbstractUser, *, sql_query: str | None = None, selections: dict[str, t.Any] = {}, configurables: dict[str, str] = {},
336
+ always_python_df: bool = False
337
+ ) -> m.DAG:
338
+ dag = self._generate_dag_with_fake_target(sql_query, always_python_df=always_python_df)
339
+
340
+ configurables = {**self._manifest_cfg.get_default_configurables(), **configurables}
341
+ await dag.execute(
342
+ self._param_args, self._param_cfg_set, self._context_func, user, selections,
343
+ runquery=False, configurables=configurables
344
+ )
345
+ return dag
346
+
347
+ def _get_all_connections(self) -> list[rm.ConnectionItemModel]:
348
+ connections = []
349
+ for conn_name, conn_props in self._conn_set.get_connections_as_dict().items():
350
+ if isinstance(conn_props, mf.ConnectionProperties):
351
+ label = conn_props.label if conn_props.label is not None else conn_name
352
+ connections.append(rm.ConnectionItemModel(name=conn_name, label=label))
353
+ return connections
354
+
355
+ def _get_all_data_models(self, compiled_dag: m.DAG) -> list[rm.DataModelItem]:
356
+ return compiled_dag.get_all_data_models()
357
+
358
+ async def get_all_data_models(self) -> list[rm.DataModelItem]:
359
+ """
360
+ Get all data models in the project
361
+
362
+ Returns:
363
+ A list of DataModelItem objects
364
+ """
365
+ compiled_dag = await self._get_compiled_dag(self._admin_user)
366
+ return self._get_all_data_models(compiled_dag)
367
+
368
+ def _get_all_data_lineage(self, compiled_dag: m.DAG) -> list[rm.LineageRelation]:
369
+ all_lineage = compiled_dag.get_all_model_lineage()
370
+
371
+ # Add dataset nodes to the lineage
372
+ for dataset in self._manifest_cfg.datasets.values():
373
+ target_dataset = rm.LineageNode(name=dataset.name, type="dataset")
374
+ source_model = rm.LineageNode(name=dataset.model, type="model")
375
+ all_lineage.append(rm.LineageRelation(type="runtime", source=source_model, target=target_dataset))
376
+
377
+ # Add dashboard nodes to the lineage
378
+ for dashboard in self._dashboards.values():
379
+ target_dashboard = rm.LineageNode(name=dashboard.dashboard_name, type="dashboard")
380
+ datasets = set(x.dataset for x in dashboard.config.depends_on)
381
+ for dataset in datasets:
382
+ source_dataset = rm.LineageNode(name=dataset, type="dataset")
383
+ all_lineage.append(rm.LineageRelation(type="runtime", source=source_dataset, target=target_dashboard))
384
+
385
+ return all_lineage
386
+
387
+ async def get_all_data_lineage(self) -> list[rm.LineageRelation]:
388
+ """
389
+ Get all data lineage in the project
390
+
391
+ Returns:
392
+ A list of LineageRelation objects
393
+ """
394
+ compiled_dag = await self._get_compiled_dag(self._admin_user)
395
+ return self._get_all_data_lineage(compiled_dag)
396
+
397
+ async def compile(
398
+ self, *, selected_model: str | None = None, test_set: str | None = None, do_all_test_sets: bool = False,
399
+ runquery: bool = False, clear: bool = False, buildtime_only: bool = False, runtime_only: bool = False
400
+ ) -> None:
401
+ """
402
+ Compile models into the "target/compile" folder.
403
+
404
+ Behavior:
405
+ - Buildtime outputs: target/compile/buildtime/*.sql (for SQL build models) and dag.png
406
+ - Runtime outputs: target/compile/runtime/[test_set]/dbviews/*.sql, federates/*.sql, dag.png
407
+ If runquery=True, also write CSVs for runtime models.
408
+ - Options: clear entire compile folder first; compile only buildtime or only runtime.
409
+
410
+ Arguments:
411
+ selected_model: The name of the model to compile. If specified, the compiled SQL query is also printed in the terminal. If None, all models for the selected dataset are compiled. Default is None.
412
+ test_set: The name of the test set to compile with. If None, the default test set is used (which can vary by dataset). Ignored if `do_all_test_sets` argument is True. Default is None.
413
+ do_all_test_sets: Whether to compile all applicable test sets for the selected dataset(s). If True, the `test_set` argument is ignored. Default is False.
414
+ runquery: Whether to run all compiled queries and save each result as a CSV file. If True and `selected_model` is specified, all upstream models of the selected model is compiled as well. Default is False.
415
+ clear: Whether to clear the "target/compile/" folder before compiling. Default is False.
416
+ buildtime_only: Whether to compile only buildtime models. Default is False.
417
+ runtime_only: Whether to compile only runtime models. Default is False.
418
+ """
419
+ border = "=" * 80
420
+ underlines = "-" * len(border)
421
+
422
+ compile_root = Path(self._filepath, c.TARGET_FOLDER, c.COMPILE_FOLDER)
423
+ if clear and compile_root.exists():
424
+ shutil.rmtree(compile_root)
425
+
426
+ models_dict = self._get_models_dict(always_python_df=False)
427
+
428
+ model_to_compile = None
429
+ if selected_model is not None:
430
+ normalized = u.normalize_name(selected_model)
431
+ model_to_compile = models_dict.get(normalized)
432
+ if model_to_compile is None:
433
+ print(f"No such model found: {selected_model}")
434
+ return
435
+ if not isinstance(model_to_compile, m.QueryModel):
436
+ print(f"Model '{selected_model}' is not a query model. Nothing to do.")
437
+ return
438
+
439
+ # Buildtime compilation
440
+ if not runtime_only:
441
+ print(underlines)
442
+ print(f"Compiling buildtime models")
443
+ print(underlines)
444
+
445
+ buildtime_folder = Path(compile_root, c.COMPILE_BUILDTIME_FOLDER)
446
+ buildtime_folder.mkdir(parents=True, exist_ok=True)
447
+
448
+ def write_buildtime_model(model: m.DataModel, static_models: dict[str, m.StaticModel]) -> None:
449
+ if not isinstance(model, m.BuildModel):
450
+ return
451
+
452
+ model.compile_for_build(self._conn_args, static_models)
453
+
454
+ if isinstance(model.compiled_query, mq.SqlModelQuery):
455
+ out_path = Path(buildtime_folder, f"{model.name}.sql")
456
+ with open(out_path, 'w') as f:
457
+ f.write(model.compiled_query.query)
458
+ print(f"Successfully compiled build model: {model.name}")
459
+ elif isinstance(model.compiled_query, mq.PyModelQuery):
460
+ print(f"The build model '{model.name}' is in Python. Compilation for Python is not supported yet.")
461
+
462
+ static_models = self._get_static_models()
463
+ if model_to_compile is not None:
464
+ write_buildtime_model(model_to_compile, static_models)
465
+ else:
466
+ coros = [asyncio.to_thread(write_buildtime_model, m, static_models) for m in static_models.values()]
467
+ await u.asyncio_gather(coros)
468
+
469
+ print(underlines)
470
+ print()
471
+
472
+ # Runtime compilation
473
+ if not buildtime_only:
474
+ if do_all_test_sets:
475
+ test_set_names_set = set(self._manifest_cfg.selection_test_sets.keys())
476
+ test_set_names_set.add(c.DEFAULT_TEST_SET_NAME)
477
+ test_set_names = list(test_set_names_set)
478
+ else:
479
+ test_set_names = [test_set or c.DEFAULT_TEST_SET_NAME]
480
+
481
+ for ts_name in test_set_names:
482
+ print(underlines)
483
+ print(f"Compiling runtime models (test set '{ts_name}')")
484
+ print(underlines)
485
+
486
+ # Build user and selections from test set config if present
487
+ ts_conf = self._manifest_cfg.selection_test_sets.get(ts_name, self._manifest_cfg.get_default_test_set())
488
+ # Separate base fields from custom fields
489
+ access_level = ts_conf.user.access_level
490
+ custom_fields = self._auth.CustomUserFields(**ts_conf.user.custom_fields)
491
+ if access_level == "guest":
492
+ user = GuestUser(username="", custom_fields=custom_fields)
493
+ else:
494
+ user = RegisteredUser(username="", access_level=access_level, custom_fields=custom_fields)
495
+
496
+ # Generate DAG across all models. When runquery=True, force models to produce Python dataframes so CSVs can be written.
497
+ dag = await self._get_compiled_dag(
498
+ user=user, selections=ts_conf.parameters, configurables=ts_conf.configurables, always_python_df=runquery,
499
+ )
500
+ if runquery:
501
+ await dag._run_models()
502
+
503
+ # Prepare output folders
504
+ runtime_folder = Path(compile_root, c.COMPILE_RUNTIME_FOLDER, ts_name)
505
+ dbviews_folder = Path(runtime_folder, c.DBVIEWS_FOLDER)
506
+ federates_folder = Path(runtime_folder, c.FEDERATES_FOLDER)
507
+ dbviews_folder.mkdir(parents=True, exist_ok=True)
508
+ federates_folder.mkdir(parents=True, exist_ok=True)
509
+ with open(Path(runtime_folder, "placeholders.json"), "w") as f:
510
+ json.dump(dag.placeholders, f)
511
+
512
+ # Function to write runtime models
513
+ def write_runtime_model(model: m.DataModel) -> None:
514
+ if not isinstance(model, m.QueryModel):
515
+ return
516
+
517
+ if model.model_type not in (m.ModelType.DBVIEW, m.ModelType.FEDERATE):
518
+ return
519
+
520
+ subfolder = dbviews_folder if model.model_type == m.ModelType.DBVIEW else federates_folder
521
+ model_type = "dbview" if model.model_type == m.ModelType.DBVIEW else "federate"
522
+
523
+ if isinstance(model.compiled_query, mq.SqlModelQuery):
524
+ out_sql = Path(subfolder, f"{model.name}.sql")
525
+ with open(out_sql, 'w') as f:
526
+ f.write(model.compiled_query.query)
527
+ print(f"Successfully compiled {model_type} model: {model.name}")
528
+ elif isinstance(model.compiled_query, mq.PyModelQuery):
529
+ print(f"The {model_type} model '{model.name}' is in Python. Compilation for Python is not supported yet.")
530
+
531
+ if runquery and isinstance(model.result, pl.LazyFrame):
532
+ out_csv = Path(subfolder, f"{model.name}.csv")
533
+ model.result.collect().write_csv(out_csv)
534
+ print(f"Successfully created CSV for {model_type} model: {model.name}")
535
+
536
+ # If selected_model is provided for runtime, only emit that model's outputs
537
+ if model_to_compile is not None:
538
+ write_runtime_model(model_to_compile)
539
+ else:
540
+ models_to_compile = dag.models_dict.values()
541
+ coros = [asyncio.to_thread(write_runtime_model, model) for model in models_to_compile]
542
+ await u.asyncio_gather(coros)
543
+
544
+ print(underlines)
545
+ print()
546
+
547
+ print(f"All compilations complete! See the '{c.TARGET_FOLDER}/{c.COMPILE_FOLDER}/' folder for results.")
548
+ if model_to_compile and isinstance(model_to_compile.compiled_query, mq.SqlModelQuery):
549
+ print()
550
+ print(border)
551
+ print(f"Compiled SQL query for model '{model_to_compile.name}':")
552
+ print(underlines)
553
+ print(model_to_compile.compiled_query.query)
554
+ print(border)
555
+ print()
556
+
557
+ def _permission_error(self, user: AbstractUser, data_type: str, data_name: str, scope: str) -> InvalidInputError:
558
+ return InvalidInputError(403, f"unauthorized_access_to_{data_type}", f"User '{user}' does not have permission to access {scope} {data_type}: {data_name}")
559
+
560
+ def seed(self, name: str) -> pl.LazyFrame:
561
+ """
562
+ Method to retrieve a seed as a polars LazyFrame given a seed name.
563
+
564
+ Arguments:
565
+ name: The name of the seed to retrieve
566
+
567
+ Returns:
568
+ The seed as a polars LazyFrame
569
+ """
570
+ seeds_dict = self._seeds.get_dataframes()
571
+ try:
572
+ return seeds_dict[name].df
573
+ except KeyError:
574
+ available_seeds = list(seeds_dict.keys())
575
+ raise KeyError(f"Seed '{name}' not found. Available seeds are: {available_seeds}")
576
+
577
+ def dataset_metadata(self, name: str) -> dr.DatasetMetadata:
578
+ """
579
+ Method to retrieve the metadata of a dataset given a dataset name.
580
+
581
+ Arguments:
582
+ name: The name of the dataset to retrieve.
583
+
584
+ Returns:
585
+ A DatasetMetadata object containing the dataset description and column details.
586
+ """
587
+ dag = self._generate_dag(name)
588
+ dag.target_model.process_pass_through_columns(dag.models_dict)
589
+ return dr.DatasetMetadata(
590
+ target_model_config=dag.target_model.model_config
591
+ )
592
+
593
+ async def dataset(
594
+ self, name: str, *, selections: dict[str, t.Any] = {}, user: AbstractUser | None = None, require_auth: bool = True,
595
+ configurables: dict[str, str] = {}
596
+ ) -> dr.DatasetResult:
597
+ """
598
+ Async method to retrieve a dataset as a DatasetResult object (with metadata) given parameter selections.
599
+
600
+ Arguments:
601
+ name: The name of the dataset to retrieve.
602
+ selections: A dictionary of parameter selections to apply to the dataset. Optional, default is empty dictionary.
603
+ user: The user to use for authentication. If None, no user is used. Optional, default is None.
604
+
605
+ Returns:
606
+ A DatasetResult object containing the dataset result (as a polars DataFrame), its description, and the column details.
607
+ """
608
+ if user is None:
609
+ user = self._guest_user
610
+
611
+ scope = self._manifest_cfg.datasets[name].scope
612
+ if require_auth and not self._auth.can_user_access_scope(user, scope):
613
+ raise self._permission_error(user, "dataset", name, scope.name)
614
+
615
+ dag = self._generate_dag(name)
616
+ configurables = {**self._manifest_cfg.get_default_configurables(name), **configurables}
617
+ await dag.execute(
618
+ self._param_args, self._param_cfg_set, self._context_func, user, dict(selections), configurables=configurables
619
+ )
620
+ assert isinstance(dag.target_model.result, pl.LazyFrame)
621
+ return dr.DatasetResult(
622
+ target_model_config=dag.target_model.model_config,
623
+ df=dag.target_model.result.collect().with_row_index("_row_num", offset=1)
624
+ )
625
+
626
+ async def dashboard(
627
+ self, name: str, *, selections: dict[str, t.Any] = {}, user: AbstractUser | None = None, dashboard_type: t.Type[T] = d.PngDashboard,
628
+ configurables: dict[str, str] = {}
629
+ ) -> T:
630
+ """
631
+ Async method to retrieve a dashboard given parameter selections.
632
+
633
+ Arguments:
634
+ name: The name of the dashboard to retrieve.
635
+ selections: A dictionary of parameter selections to apply to the dashboard. Optional, default is empty dictionary.
636
+ user: The user to use for authentication. If None, no user is used. Optional, default is None.
637
+ dashboard_type: Return type of the method (mainly used for type hints). For instance, provide PngDashboard if you want the return type to be a PngDashboard. Optional, default is squirrels.Dashboard.
638
+
639
+ Returns:
640
+ The dashboard type specified by the "dashboard_type" argument.
641
+ """
642
+ if user is None:
643
+ user = self._guest_user
644
+
645
+ scope = self._dashboards[name].config.scope
646
+ if not self._auth.can_user_access_scope(user, scope):
647
+ raise self._permission_error(user, "dashboard", name, scope.name)
648
+
649
+ async def get_dataset_df(dataset_name: str, fixed_params: dict[str, t.Any]) -> pl.DataFrame:
650
+ final_selections = {**selections, **fixed_params}
651
+ result = await self.dataset(
652
+ dataset_name, selections=final_selections, user=user, require_auth=False, configurables=configurables
653
+ )
654
+ return result.df
655
+
656
+ args = d.DashboardArgs(self._param_args, get_dataset_df)
657
+ try:
658
+ return await self._dashboards[name].get_dashboard(args, dashboard_type=dashboard_type)
659
+ except KeyError:
660
+ raise KeyError(f"No dashboard file found for: {name}")
661
+
662
+ async def query_models(
663
+ self, sql_query: str, *, user: AbstractUser | None = None, selections: dict[str, t.Any] = {}, configurables: dict[str, str] = {}
664
+ ) -> dr.DatasetResult:
665
+ if user is None:
666
+ user = self._guest_user
667
+
668
+ dag = await self._get_compiled_dag(user=user, sql_query=sql_query, selections=selections, configurables=configurables)
669
+ await dag._run_models()
670
+ assert isinstance(dag.target_model.result, pl.LazyFrame)
671
+ return dr.DatasetResult(
672
+ target_model_config=dag.target_model.model_config,
673
+ df=dag.target_model.result.collect().with_row_index("_row_num", offset=1)
674
+ )
675
+
676
+ async def get_compiled_model_query(
677
+ self, model_name: str, *, user: AbstractUser | None = None, selections: dict[str, t.Any] = {}, configurables: dict[str, str] = {}
678
+ ) -> rm.CompiledQueryModel:
679
+ """
680
+ Compile the specified data model and return its language and compiled definition.
681
+ """
682
+ if user is None:
683
+ user = self._guest_user
684
+
685
+ name = u.normalize_name(model_name)
686
+ models_dict = self._get_models_dict(always_python_df=False)
687
+ if name not in models_dict:
688
+ raise InvalidInputError(404, "model_not_found", f"No data model found with name: {model_name}")
689
+
690
+ model = models_dict[name]
691
+ # Only build, dbview, and federate models support runtime compiled definition in this context
692
+ if not isinstance(model, (m.BuildModel, m.DbviewModel, m.FederateModel)):
693
+ raise InvalidInputError(400, "unsupported_model_type", "Only build, dbview, and federate models currently support compiled definition via this endpoint")
694
+
695
+ # Build a DAG with this model as the target, without a dataset context
696
+ model.is_target = True
697
+ dag = m.DAG(None, model, models_dict, self._datalake_db_path, self._logger)
698
+
699
+ cfg = {**self._manifest_cfg.get_default_configurables(), **configurables}
700
+ await dag.execute(
701
+ self._param_args, self._param_cfg_set, self._context_func, user, selections, runquery=False, configurables=cfg
702
+ )
703
+
704
+ language = "sql" if isinstance(model.query_file, mq.SqlQueryFile) else "python"
705
+ if isinstance(model, m.BuildModel):
706
+ # Compile SQL build models; Python build models not yet supported
707
+ if isinstance(model.query_file, mq.SqlQueryFile):
708
+ static_models = self._get_static_models()
709
+ compiled = model._compile_sql_model(model.query_file, self._conn_args, static_models)
710
+ definition = compiled.query
711
+ else:
712
+ definition = "# Compiling Python build models is currently not supported. This will be available in a future version of Squirrels..."
713
+ elif isinstance(model.compiled_query, mq.SqlModelQuery):
714
+ definition = model.compiled_query.query
715
+ elif isinstance(model.compiled_query, mq.PyModelQuery):
716
+ definition = "# Compiling Python data models is currently not supported. This will be available in a future version of Squirrels..."
717
+ else:
718
+ raise NotImplementedError(f"Query type not supported: {model.compiled_query.__class__.__name__}")
719
+
720
+ return rm.CompiledQueryModel(language=language, definition=definition, placeholders=dag.placeholders)
721
+