quollio-core 0.4.3__tar.gz → 0.4.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {quollio_core-0.4.3 → quollio_core-0.4.5}/PKG-INFO +5 -1
  2. {quollio_core-0.4.3 → quollio_core-0.4.5}/pyproject.toml +4 -0
  3. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/__init__.py +1 -1
  4. quollio_core-0.4.5/quollio_core/bricks.py +237 -0
  5. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/.gitignore +4 -0
  6. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/README.md +5 -0
  7. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/dbt_project.yml +21 -0
  8. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/models/quollio_lineage_column_level.sql +73 -0
  9. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/models/quollio_lineage_column_level.yml +14 -0
  10. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/models/quollio_lineage_table_level.sql +63 -0
  11. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/models/quollio_lineage_table_level.yml +11 -0
  12. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/models/sources.yml +84 -0
  13. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/package-lock.yml +14 -0
  14. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/packages.yml +13 -0
  15. quollio_core-0.4.5/quollio_core/dbt_projects/databricks/profiles/profiles_template.yml +14 -0
  16. quollio_core-0.4.5/quollio_core/dbt_projects/redshift/macros/materialization/divided_view.sql +97 -0
  17. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_stats_columns.sql +1 -1
  18. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/macros/materialization/divided_view.sql +4 -0
  19. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_stats_columns.sql +1 -1
  20. {quollio_core-0.4.3/quollio_core/dbt_projects/redshift → quollio_core-0.4.5/quollio_core/dbt_projects/snowflake}/package-lock.yml +1 -1
  21. quollio_core-0.4.5/quollio_core/dbt_projects/snowflake/seeds/.gitkeep +0 -0
  22. quollio_core-0.4.5/quollio_core/dbt_projects/snowflake/snapshots/.gitkeep +0 -0
  23. quollio_core-0.4.5/quollio_core/helper/__init__.py +0 -0
  24. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/helper/env_default.py +4 -1
  25. quollio_core-0.4.5/quollio_core/profilers/__init__.py +0 -0
  26. quollio_core-0.4.5/quollio_core/profilers/databricks.py +196 -0
  27. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/profilers/lineage.py +12 -0
  28. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/profilers/stats.py +0 -1
  29. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/redshift.py +4 -5
  30. quollio_core-0.4.5/quollio_core/repository/__init__.py +0 -0
  31. quollio_core-0.4.5/quollio_core/repository/databricks.py +62 -0
  32. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/snowflake.py +4 -5
  33. quollio_core-0.4.3/quollio_core/dbt_projects/redshift/macros/materialization/divided_view.sql +0 -65
  34. {quollio_core-0.4.3 → quollio_core-0.4.5}/LICENSE +0 -0
  35. {quollio_core-0.4.3 → quollio_core-0.4.5}/README.md +0 -0
  36. {quollio_core-0.4.3/quollio_core/dbt_projects/redshift → quollio_core-0.4.5/quollio_core/dbt_projects/databricks}/analyses/.gitkeep +0 -0
  37. {quollio_core-0.4.3/quollio_core/dbt_projects/redshift → quollio_core-0.4.5/quollio_core/dbt_projects/databricks}/macros/.gitkeep +0 -0
  38. {quollio_core-0.4.3/quollio_core/dbt_projects/redshift → quollio_core-0.4.5/quollio_core/dbt_projects/databricks}/seeds/.gitkeep +0 -0
  39. {quollio_core-0.4.3/quollio_core/dbt_projects/redshift → quollio_core-0.4.5/quollio_core/dbt_projects/databricks}/snapshots/.gitkeep +0 -0
  40. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/README.md +0 -0
  41. {quollio_core-0.4.3/quollio_core/dbt_projects/snowflake → quollio_core-0.4.5/quollio_core/dbt_projects/redshift}/analyses/.gitkeep +0 -0
  42. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/dbt_project.yml +0 -0
  43. {quollio_core-0.4.3/quollio_core/dbt_projects/snowflake → quollio_core-0.4.5/quollio_core/dbt_projects/redshift}/macros/.gitkeep +0 -0
  44. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_lineage_table_level.sql +0 -0
  45. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_lineage_table_level.yml +0 -0
  46. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_lineage_view_level.sql +0 -0
  47. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_lineage_view_level.yml +0 -0
  48. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_sqllineage_sources.sql +0 -0
  49. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_sqllineage_sources.yml +0 -0
  50. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_stats_columns.yml +0 -0
  51. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_stats_profiling_columns.sql +0 -0
  52. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/quollio_stats_profiling_columns.yml +0 -0
  53. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/models/sources.yml +0 -0
  54. {quollio_core-0.4.3/quollio_core/dbt_projects/snowflake → quollio_core-0.4.5/quollio_core/dbt_projects/redshift}/package-lock.yml +0 -0
  55. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/packages.yml +0 -0
  56. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/redshift/profiles/profiles_template.yml +0 -0
  57. {quollio_core-0.4.3/quollio_core/dbt_projects/snowflake → quollio_core-0.4.5/quollio_core/dbt_projects/redshift}/seeds/.gitkeep +0 -0
  58. {quollio_core-0.4.3/quollio_core/dbt_projects/snowflake → quollio_core-0.4.5/quollio_core/dbt_projects/redshift}/snapshots/.gitkeep +0 -0
  59. /quollio_core-0.4.3/quollio_core/helper/__init__.py → /quollio_core-0.4.5/quollio_core/dbt_projects/seeds/.gitkeep +0 -0
  60. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/README.md +0 -0
  61. /quollio_core-0.4.3/quollio_core/profilers/__init__.py → /quollio_core-0.4.5/quollio_core/dbt_projects/snowflake/analyses/.gitkeep +0 -0
  62. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/dbt_project.yml +0 -0
  63. /quollio_core-0.4.3/quollio_core/repository/__init__.py → /quollio_core-0.4.5/quollio_core/dbt_projects/snowflake/macros/.gitkeep +0 -0
  64. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_lineage_column_level.sql +0 -0
  65. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_lineage_column_level.yml +0 -0
  66. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_lineage_table_level.sql +0 -0
  67. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_lineage_table_level.yml +0 -0
  68. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_sqllineage_sources.sql +0 -0
  69. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_sqllineage_sources.yml +0 -0
  70. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_stats_columns.yml +0 -0
  71. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_stats_profiling_columns.sql +0 -0
  72. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/quollio_stats_profiling_columns.yml +0 -0
  73. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/models/sources.yml +0 -0
  74. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/packages.yml +0 -0
  75. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/dbt_projects/snowflake/profiles/profiles_template.yml +0 -0
  76. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/helper/core.py +0 -0
  77. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/profilers/redshift.py +0 -0
  78. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/profilers/snowflake.py +0 -0
  79. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/profilers/sqllineage.py +0 -0
  80. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/repository/dbt.py +0 -0
  81. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/repository/qdc.py +0 -0
  82. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/repository/redshift.py +0 -0
  83. {quollio_core-0.4.3 → quollio_core-0.4.5}/quollio_core/repository/snowflake.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: quollio-core
3
- Version: 0.4.3
3
+ Version: 0.4.5
4
4
  Summary: Quollio Core
5
5
  Author-email: quollio-dev <qt.dev@quollio.com>
6
6
  Maintainer-email: RyoAriyama <ryo.arym@gmail.com>, tharuta <35373297+TakumiHaruta@users.noreply.github.com>
@@ -21,14 +21,18 @@ Requires-Dist: blake3==0.3.3
21
21
  Requires-Dist: dbt-core==1.7.10
22
22
  Requires-Dist: dbt-snowflake==1.7.0
23
23
  Requires-Dist: dbt-redshift==1.7.1
24
+ Requires-Dist: dbt-databricks==1.7.1
24
25
  Requires-Dist: jinja2==3.1.3
25
26
  Requires-Dist: PyYAML==6.0.1
26
27
  Requires-Dist: requests==2.31.0
27
28
  Requires-Dist: pyjwt==2.8.0
28
29
  Requires-Dist: redshift-connector==2.0.915
29
30
  Requires-Dist: snowflake-connector-python==3.5.0
31
+ Requires-Dist: databricks-sdk==0.17.0
32
+ Requires-Dist: databricks-sql-connector==2.9.5
30
33
  Requires-Dist: sqlglot==20.8.0
31
34
  Requires-Dist: black>=22.3.0 ; extra == "test"
35
+ Requires-Dist: coverage>=7.3.2 ; extra == "test"
32
36
  Requires-Dist: isort>=5.10.1 ; extra == "test"
33
37
  Requires-Dist: pyproject-flake8>=0.0.1-alpha.2 ; extra == "test"
34
38
  Requires-Dist: pytest>=5.2 ; extra == "test"
@@ -33,12 +33,15 @@ dependencies = [
33
33
  ,"dbt-core==1.7.10"
34
34
  ,"dbt-snowflake==1.7.0"
35
35
  ,"dbt-redshift==1.7.1"
36
+ ,"dbt-databricks==1.7.1"
36
37
  ,"jinja2==3.1.3"
37
38
  ,"PyYAML==6.0.1"
38
39
  ,"requests==2.31.0"
39
40
  ,"pyjwt==2.8.0"
40
41
  ,"redshift-connector==2.0.915"
41
42
  ,"snowflake-connector-python==3.5.0"
43
+ ,"databricks-sdk==0.17.0"
44
+ ,"databricks-sql-connector==2.9.5"
42
45
  ,"sqlglot==20.8.0"
43
46
  ]
44
47
  dynamic = ["version", "description"]
@@ -50,6 +53,7 @@ Home = "https://quollio.com"
50
53
  [project.optional-dependencies]
51
54
  test = [
52
55
  "black>=22.3.0"
56
+ ,"coverage>=7.3.2"
53
57
  ,"isort>=5.10.1"
54
58
  ,"pyproject-flake8>=0.0.1-alpha.2"
55
59
  ,"pytest>=5.2"
@@ -1,4 +1,4 @@
1
1
  """Quollio Core"""
2
2
 
3
- __version__ = "0.4.3"
3
+ __version__ = "0.4.5"
4
4
  __author__ = "Quollio Technologies, Inc"
@@ -0,0 +1,237 @@
1
+ import argparse
2
+ import logging
3
+ import os
4
+
5
+ from quollio_core.helper.core import setup_dbt_profile
6
+ from quollio_core.helper.env_default import env_default
7
+ from quollio_core.profilers.databricks import (
8
+ databricks_column_level_lineage,
9
+ databricks_column_stats,
10
+ databricks_table_level_lineage,
11
+ )
12
+ from quollio_core.repository import databricks as db
13
+ from quollio_core.repository import dbt, qdc
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def build_view(
19
+ conn: db.DatabricksConnectionConfig,
20
+ target_tables: str,
21
+ log_level: str = "info",
22
+ ) -> None:
23
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s - %(message)s")
24
+
25
+ logger.info("Build profiler views using dbt")
26
+ # set parameters
27
+ dbt_client = dbt.DBTClient()
28
+ current_dir = os.path.dirname(os.path.abspath(__file__))
29
+ project_path = f"{current_dir}/dbt_projects/databricks"
30
+ template_path = f"{current_dir}/dbt_projects/databricks/profiles"
31
+ template_name = "profiles_template.yml"
32
+
33
+ # build views using dbt
34
+ setup_dbt_profile(connections_json=conn.as_dict(), template_path=template_path, template_name=template_name)
35
+ # FIXME: when executing some of the commands, directory changes due to the library bug.
36
+ # https://github.com/dbt-labs/dbt-core/issues/8997
37
+ dbt_client.invoke(
38
+ cmd="deps",
39
+ project_dir=project_path,
40
+ profile_dir=template_path,
41
+ options=["--no-use-colors", "--log-level", log_level],
42
+ )
43
+
44
+ run_options = ["--no-use-colors", "--log-level", log_level, "--select", target_tables]
45
+ dbt_client.invoke(
46
+ cmd="run",
47
+ project_dir=project_path,
48
+ profile_dir=template_path,
49
+ options=run_options,
50
+ )
51
+ return
52
+
53
+
54
+ def load_lineage(
55
+ conn: db.DatabricksConnectionConfig,
56
+ qdc_client: qdc.QDCExternalAPIClient,
57
+ tenant_id: str,
58
+ ) -> None:
59
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s - %(message)s")
60
+
61
+ logger.info("Generate Databricks table to table lineage.")
62
+ databricks_table_level_lineage(
63
+ conn=conn, qdc_client=qdc_client, tenant_id=tenant_id, dbt_table_name="quollio_lineage_table_level"
64
+ )
65
+
66
+ logger.info("Generate Databricks column to column lineage.")
67
+ databricks_column_level_lineage(
68
+ conn=conn, qdc_client=qdc_client, tenant_id=tenant_id, dbt_table_name="quollio_lineage_column_level"
69
+ )
70
+
71
+ logger.info("Lineage data is successfully loaded.")
72
+ return
73
+
74
+
75
+ def load_column_stats(
76
+ conn: db.DatabricksConnectionConfig,
77
+ qdc_client: qdc.QDCExternalAPIClient,
78
+ tenant_id: str,
79
+ ) -> None:
80
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s - %(message)s")
81
+
82
+ logger.info("Generate Databricks column stats.")
83
+ databricks_column_stats(
84
+ conn=conn,
85
+ qdc_client=qdc_client,
86
+ tenant_id=tenant_id,
87
+ )
88
+
89
+ logger.info("Column stats are successfully loaded.")
90
+ return
91
+
92
+
93
+ if __name__ == "__main__":
94
+ parser = argparse.ArgumentParser(
95
+ prog="Quollio Intelligence Agent for Databricks",
96
+ description="Build views and load lineage and stats to Quollio from Databricks using dbt.",
97
+ epilog="Copyright (c) 2024 Quollio Technologies, Inc.",
98
+ )
99
+ parser.add_argument(
100
+ "commands",
101
+ choices=["build_view", "load_lineage", "load_stats"],
102
+ type=str,
103
+ nargs="+",
104
+ help="""
105
+ The command to execute.
106
+ 'build_view': Build views using dbt,
107
+ 'load_lineage': Load lineage data from created views to Quollio,
108
+ 'load_stats': Load stats from created views to Quollio,
109
+ 'load_sqllineage': Load lineage data from sql parse result(alpha),
110
+ """,
111
+ )
112
+ parser.add_argument(
113
+ "--host", type=str, action=env_default("DATABRICKS_HOST"), required=False, help="Host for Databricks workspace"
114
+ )
115
+ parser.add_argument(
116
+ "--http_path",
117
+ type=str,
118
+ action=env_default("DATABRICKS_HTTP_PATH"),
119
+ required=False,
120
+ help="HTTP path for a Databricks compute resource (i.e warehouse)",
121
+ )
122
+ parser.add_argument(
123
+ "--port",
124
+ type=int,
125
+ action=env_default("DATABRICKS_PORT"),
126
+ required=False,
127
+ help="Port for Databricks compute resource",
128
+ )
129
+ parser.add_argument(
130
+ "--databricks_client_secret",
131
+ type=str,
132
+ action=env_default("DATABRICKS_CLIENT_SECRET"),
133
+ required=False,
134
+ help="Secret for the service principal",
135
+ )
136
+ parser.add_argument(
137
+ "--databricks_client_id",
138
+ type=str,
139
+ action=env_default("DATABRICKS_CLIENT_ID"),
140
+ required=False,
141
+ help="Client id for the service principal",
142
+ )
143
+ parser.add_argument(
144
+ "--catalog",
145
+ type=str,
146
+ required=False,
147
+ action=env_default("DATABRICKS_TARGET_CATALOG"),
148
+ help="Target database name where the views are built by dbt",
149
+ )
150
+ parser.add_argument(
151
+ "--schema",
152
+ type=str,
153
+ action=env_default("DATABRICKS_TARGET_SCHEMA"),
154
+ required=False,
155
+ help="Target schema name where the views are built by dbt",
156
+ )
157
+ parser.add_argument(
158
+ "--log_level",
159
+ type=str,
160
+ choices=["debug", "info", "warn", "error", "none"],
161
+ action=env_default("LOG_LEVEL"),
162
+ required=False,
163
+ help="The log level for dbt commands. Default value is info",
164
+ )
165
+ parser.add_argument(
166
+ "--api_url",
167
+ type=str,
168
+ action=env_default("QDC_API_URL"),
169
+ required=False,
170
+ help="The base URL of Quollio External API",
171
+ )
172
+ parser.add_argument(
173
+ "--client_id",
174
+ type=str,
175
+ action=env_default("QDC_CLIENT_ID"),
176
+ required=False,
177
+ help="The client id that is created on Quollio console to let clients access Quollio External API",
178
+ )
179
+ parser.add_argument(
180
+ "--client_secret",
181
+ type=str,
182
+ action=env_default("QDC_CLIENT_SECRET"),
183
+ required=False,
184
+ help="The client secrete that is created on Quollio console to let clients access Quollio External API",
185
+ )
186
+ parser.add_argument(
187
+ "--tenant_id",
188
+ type=str,
189
+ action=env_default("TENANT_ID"),
190
+ required=False,
191
+ help="The tenant id (company id) where the lineage and stats are loaded",
192
+ )
193
+ parser.add_argument(
194
+ "--target_tables",
195
+ type=str,
196
+ nargs="*",
197
+ choices=["quollio_lineage_table_level", "quollio_lineage_view_level"],
198
+ action=env_default("DATABRICKS_TARGET_TABLES"),
199
+ required=False,
200
+ help="Target tables you want to create with dbt module. \
201
+ You need to specify this parameter if you want to specify tables, not all ones. \
202
+ Please specify table name with blank delimiter like tableA tableB \
203
+ if you want to create two or more tables",
204
+ )
205
+
206
+ args = parser.parse_args()
207
+
208
+ conn = db.DatabricksConnectionConfig(
209
+ host=args.host,
210
+ http_path=args.http_path,
211
+ client_id=args.databricks_client_id,
212
+ client_secret=args.databricks_client_secret,
213
+ catalog=args.catalog,
214
+ schema=args.schema,
215
+ )
216
+
217
+ if len(args.commands) == 0:
218
+ raise ValueError("No command is provided")
219
+
220
+ if "build_view" in args.commands:
221
+ build_view(
222
+ conn=conn,
223
+ target_tables=args.target_tables,
224
+ log_level=args.log_level,
225
+ )
226
+
227
+ if "load_lineage" in args.commands:
228
+ qdc_client = qdc.QDCExternalAPIClient(
229
+ base_url=args.api_url, client_id=args.client_id, client_secret=args.client_secret
230
+ )
231
+ load_lineage(conn=conn, qdc_client=qdc_client, tenant_id=args.tenant_id)
232
+
233
+ if "load_stats" in args.commands:
234
+ qdc_client = qdc.QDCExternalAPIClient(
235
+ base_url=args.api_url, client_id=args.client_id, client_secret=args.client_secret
236
+ )
237
+ databricks_column_stats(conn=conn, qdc_client=qdc_client, tenant_id=args.tenant_id)
@@ -0,0 +1,4 @@
1
+
2
+ target/
3
+ dbt_packages/
4
+ logs/
@@ -0,0 +1,5 @@
1
+ ### Quollio Intelligence Agent Support For Databricks
2
+ Notable Files:
3
+ 1. [quollio_lineage_table_level.sql](models/quollio_lineage_table_level.sql) - Generates table lineage data from Databricks system tables.
4
+ 2. [quollio_lineage_column_level.sql](models/quollio_lineage_table_level.sql) - Generates column lineage data from Databricks system tables.
5
+ 3. [sources.yml](models/sources.yml) - Refrences sources in the Databricks system catalog.
@@ -0,0 +1,21 @@
1
+ name: 'quollio_intelligence_databricks'
2
+ version: '1.0.0'
3
+ config-version: 2
4
+
5
+ profile: 'quollio_intelligence_databricks'
6
+
7
+ model-paths: ["models"]
8
+ analysis-paths: ["analyses"]
9
+ test-paths: ["tests"]
10
+ seed-paths: ["seeds"]
11
+ macro-paths: ["macros"]
12
+ snapshot-paths: ["snapshots"]
13
+
14
+ clean-targets:
15
+ - "target"
16
+ - "dbt_packages"
17
+
18
+ models:
19
+ +dbt-osmosis: "{model}.yml"
20
+ # Databricks automatically enables grants on SQL endpoints
21
+ # https://docs.getdbt.com/reference/resource-configs/grants
@@ -0,0 +1,73 @@
1
+ -- Gets full table lineage from Databricks
2
+ WITH columns_lineage_history AS (
3
+ SELECT
4
+ -- The databricks columns table does not have a full table name, create with CONCAT()
5
+ source_table_full_name AS upstream_table,
6
+ target_table_full_name as downstream_table,
7
+ source_column_name as upstream_column,
8
+ target_column_name as downstream_column,
9
+ event_time,
10
+ RANK() OVER (
11
+ PARTITION BY target_table_full_name
12
+ ORDER BY
13
+ event_time DESC
14
+ ) AS rank
15
+ FROM
16
+ {{ source('access','column_lineage') }}
17
+ WHERE
18
+ source_table_full_name IS NOT NULL
19
+ AND target_table_full_name IS NOT NULL
20
+ AND source_table_full_name NOT LIKE "%quollio%"
21
+ AND target_table_full_name NOT LIKE "%quollio%"
22
+ ),
23
+ -- Gets list of existing columns in catalogs
24
+ existing_columns (
25
+ SELECT
26
+ CONCAT(table_catalog, '.', table_schema, '.', table_name) AS table_full_name,
27
+ column_name
28
+ FROM
29
+ {{ source('inf_sch','columns') }}
30
+ ),
31
+
32
+ -- Checks if the downstream tables exists and group operations.
33
+ downstream_column_exists (
34
+ SELECT
35
+ upstream_table AS UPSTREAM_TABLE_NAME,
36
+ upstream_column AS UPSTREAM_COLUMN_NAME,
37
+ downstream_table AS DOWNSTREAM_TABLE_NAME,
38
+ downstream_column AS DOWNSTREAM_COLUMN_NAME,
39
+ event_time
40
+ FROM
41
+ columns_lineage_history clh
42
+ INNER JOIN existing_columns ec ON clh.downstream_table = ec.table_full_name
43
+ AND clh.downstream_column = ec.column_name
44
+ WHERE
45
+ rank = 1
46
+ GROUP BY UPSTREAM_TABLE, UPSTREAM_COLUMN, DOWNSTREAM_TABLE, DOWNSTREAM_COLUMN, EVENT_TIME
47
+ ),
48
+
49
+ -- Aggregates the column lineage
50
+ aggregated_column_lineage AS (
51
+ SELECT
52
+ downstream_table_name,
53
+ downstream_column_name,
54
+ collect_set(
55
+ named_struct(
56
+ 'upstream_table_name', upstream_table_name,
57
+ 'upstream_column_name', upstream_column_name
58
+ )
59
+ ) AS upstream_columns
60
+ FROM
61
+ downstream_column_exists
62
+ GROUP BY
63
+ downstream_table_name,
64
+ downstream_column_name
65
+ )
66
+
67
+ SELECT
68
+ downstream_table_name AS DOWNSTREAM_TABLE_NAME,
69
+ downstream_column_name AS DOWNSTREAM_COLUMN_NAME,
70
+ to_json(upstream_columns) AS UPSTREAM_COLUMNS
71
+ FROM
72
+ aggregated_column_lineage
73
+
@@ -0,0 +1,14 @@
1
+ version: 2
2
+
3
+ model:
4
+ - name: quollio_lineage_column_level
5
+ columns:
6
+ - name: UPSTREAM_COLUMNS
7
+ description: 'String column with all upstream columns in JSON format'
8
+ type: string
9
+ - name: DOWNSTREAM_TABLE_NAME
10
+ description: 'Full downstream table name in <catalog>.<schema>.<table> format'
11
+ type: string
12
+ - name: DOWNSTREAM_COLUMN_NAME
13
+ description: 'Downstream column name'
14
+ type: string
@@ -0,0 +1,63 @@
1
+ -- Gets full table lineage from Databricks
2
+ WITH table_lineage_history AS (
3
+ SELECT
4
+ source_table_full_name as upstream_table,
5
+ target_table_full_name as downstream_table,
6
+ target_type,
7
+ event_time,
8
+ RANK() OVER (
9
+ PARTITION BY target_table_full_name
10
+ ORDER BY
11
+ event_time DESC
12
+ ) AS rank
13
+ FROM
14
+ {{ source('access','table_lineage') }}
15
+ WHERE
16
+ source_table_full_name IS NOT NULL
17
+ AND target_table_full_name IS NOT NULL
18
+ AND source_table_full_name NOT LIKE "%quollio%"
19
+ AND target_table_full_name NOT LIKE "%quollio%"
20
+ ),
21
+ -- Gets list of existing tables in catalogs
22
+ existing_tables (
23
+ SELECT
24
+ CONCAT(table_catalog, '.', table_schema, '.', table_name) AS table_full_name
25
+ FROM
26
+ {{ source('inf_sch','tables') }}
27
+ ),
28
+
29
+ -- Checks if the downstream tables exists and group operations.
30
+ downstream_table_exists (
31
+ SELECT
32
+ upstream_table,
33
+ downstream_table,
34
+ target_type,
35
+ event_time
36
+ FROM
37
+ table_lineage_history tlh
38
+ INNER JOIN existing_tables et ON tlh.downstream_table = et.table_full_name
39
+ WHERE
40
+ rank = 1
41
+ GROUP BY upstream_table, downstream_table, target_type, event_time
42
+ ),
43
+
44
+ aggregated_table_lineage AS (
45
+ SELECT
46
+ downstream_table,
47
+ collect_set(
48
+ named_struct(
49
+ 'upstream_object_name', upstream_table
50
+ )
51
+ ) AS upstream_tables
52
+ FROM
53
+ downstream_table_exists
54
+ GROUP BY
55
+ downstream_table
56
+ )
57
+ SELECT
58
+ downstream_table as DOWNSTREAM_TABLE_NAME,
59
+ to_json(upstream_tables) as UPSTREAM_TABLES
60
+
61
+ FROM
62
+ aggregated_table_lineage
63
+
@@ -0,0 +1,11 @@
1
+ version: 2
2
+
3
+ model:
4
+ - name: quollio_lineage_column_level
5
+ columns:
6
+ - name: UPSTREAM_TABLES
7
+ description: 'String column with all upstream tables in JSON format'
8
+ type: string
9
+ - name: DOWNSTREAM_TABLE_NAME
10
+ description: 'Full downstream table name in <catalog>.<schema>.<table> format'
11
+ type: string
@@ -0,0 +1,84 @@
1
+ version: 2
2
+
3
+ sources:
4
+ - name: access
5
+ database: system
6
+ schema: access
7
+ tables:
8
+ - name: table_lineage
9
+ description: Describes table level lineage
10
+ columns:
11
+ - name: source_table_full_name
12
+ description: ''
13
+ type: string
14
+ - name: target_table_full_name
15
+ description: ''
16
+ type: string
17
+ - name: target_type
18
+ description: ''
19
+ type: string
20
+ - name: event_time
21
+ description: ''
22
+ type: timestamp
23
+
24
+ - name: column_lineage
25
+ description: Describes column level lineage
26
+ columns:
27
+ - name: source_table_full_name
28
+ description: ''
29
+ type: string
30
+ - name: target_table_full_name
31
+ description: ''
32
+ type: string
33
+ - name: event_time
34
+ description: ''
35
+ type: timestamp
36
+ - name: source_column_name
37
+ description: ''
38
+ type: string
39
+ - name: target_column_name
40
+ description: ''
41
+ type: string
42
+
43
+ - name: inf_sch
44
+ database: system
45
+ schema: information_schema
46
+ tables:
47
+ - name: tables
48
+ description: Lists existing tables (i.e., not deleted).
49
+ columns:
50
+ - name: table_catalog
51
+ description: ''
52
+ type: string
53
+ - name: table_schema
54
+ description: ''
55
+ type: string
56
+ - name: table_name
57
+ description: ''
58
+ type: string
59
+
60
+ - name: views
61
+ description: Lists existing views (i.e., not deleted). Views are treated as tables.
62
+ columns:
63
+ - name: table_catalog
64
+ description: ''
65
+ type: string
66
+ - name: table_schema
67
+ description: ''
68
+ type: string
69
+ - name: table_name
70
+ description: ''
71
+ type: string
72
+
73
+ - name: columns
74
+ description: ''
75
+ columns:
76
+ - name: table_catalog
77
+ description: ''
78
+ type: string
79
+ - name: table_schema
80
+ description: ''
81
+ type: string
82
+ - name: table_name
83
+ description: ''
84
+ type: string
@@ -0,0 +1,14 @@
1
+ packages:
2
+ - package: dbt-labs/dbt_utils
3
+ version: 1.1.1
4
+ - package: dbt-labs/spark_utils
5
+ version: 0.3.0
6
+ - package: dbt-labs/codegen
7
+ version: 0.12.1
8
+ - package: dbt-labs/dbt_external_tables
9
+ version: 0.8.7
10
+ - package: dbt-labs/dbt_project_evaluator
11
+ version: 0.8.1
12
+ - package: brooklyn-data/dbt_artifacts
13
+ version: 2.6.2
14
+ sha1_hash: cbb324267dbf6c6fb7de11b162e4fbafd1e32a9c
@@ -0,0 +1,13 @@
1
+ packages:
2
+ - package: dbt-labs/dbt_utils
3
+ version: [">=0.0.0", "<2.0.0"]
4
+ - package: dbt-labs/spark_utils
5
+ version: [">=0.0.0", "<1.0.0"]
6
+ - package: dbt-labs/codegen
7
+ version: [">=0.0.0", "<1.0.0"]
8
+ - package: dbt-labs/dbt_external_tables
9
+ version: [">=0.0.0", "<1.0.0"]
10
+ - package: dbt-labs/dbt_project_evaluator
11
+ version: [">=0.0.0", "<1.0.0"]
12
+ - package: brooklyn-data/dbt_artifacts
13
+ version: [">=2.0.0", "<3.0.0"]
@@ -0,0 +1,14 @@
1
+ quollio_intelligence_databricks:
2
+ target: project
3
+ outputs:
4
+ project:
5
+ type: databricks
6
+ host: {{ host }}
7
+ http_path: {{ http_path }}
8
+ catalog: {{ catalog }}
9
+ schema: {{ schema }}
10
+ auth_type: oauth
11
+ client_id: {{ client_id }}
12
+ client_secret: {{ client_secret }}
13
+ databricks_port: {{ databricks_port }}
14
+