fabricks 3.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. fabricks/__init__.py +0 -0
  2. fabricks/api/__init__.py +11 -0
  3. fabricks/api/cdc/__init__.py +6 -0
  4. fabricks/api/cdc/nocdc.py +3 -0
  5. fabricks/api/cdc/scd1.py +3 -0
  6. fabricks/api/cdc/scd2.py +3 -0
  7. fabricks/api/context.py +27 -0
  8. fabricks/api/core.py +4 -0
  9. fabricks/api/deploy.py +3 -0
  10. fabricks/api/exceptions.py +19 -0
  11. fabricks/api/extenders.py +3 -0
  12. fabricks/api/job_schema.py +3 -0
  13. fabricks/api/log.py +3 -0
  14. fabricks/api/masks.py +3 -0
  15. fabricks/api/metastore/__init__.py +10 -0
  16. fabricks/api/metastore/database.py +3 -0
  17. fabricks/api/metastore/table.py +3 -0
  18. fabricks/api/metastore/view.py +6 -0
  19. fabricks/api/notebooks/__init__.py +0 -0
  20. fabricks/api/notebooks/cluster.py +6 -0
  21. fabricks/api/notebooks/initialize.py +42 -0
  22. fabricks/api/notebooks/process.py +54 -0
  23. fabricks/api/notebooks/run.py +59 -0
  24. fabricks/api/notebooks/schedule.py +75 -0
  25. fabricks/api/notebooks/terminate.py +31 -0
  26. fabricks/api/parsers.py +3 -0
  27. fabricks/api/schedules.py +3 -0
  28. fabricks/api/udfs.py +3 -0
  29. fabricks/api/utils.py +9 -0
  30. fabricks/api/version.py +3 -0
  31. fabricks/api/views.py +6 -0
  32. fabricks/cdc/__init__.py +14 -0
  33. fabricks/cdc/base/__init__.py +4 -0
  34. fabricks/cdc/base/_types.py +10 -0
  35. fabricks/cdc/base/cdc.py +5 -0
  36. fabricks/cdc/base/configurator.py +223 -0
  37. fabricks/cdc/base/generator.py +177 -0
  38. fabricks/cdc/base/merger.py +110 -0
  39. fabricks/cdc/base/processor.py +471 -0
  40. fabricks/cdc/cdc.py +5 -0
  41. fabricks/cdc/nocdc.py +20 -0
  42. fabricks/cdc/scd.py +22 -0
  43. fabricks/cdc/scd1.py +15 -0
  44. fabricks/cdc/scd2.py +15 -0
  45. fabricks/cdc/templates/__init__.py +0 -0
  46. fabricks/cdc/templates/ctes/base.sql.jinja +35 -0
  47. fabricks/cdc/templates/ctes/current.sql.jinja +28 -0
  48. fabricks/cdc/templates/ctes/deduplicate_hash.sql.jinja +32 -0
  49. fabricks/cdc/templates/ctes/deduplicate_key.sql.jinja +31 -0
  50. fabricks/cdc/templates/ctes/rectify.sql.jinja +113 -0
  51. fabricks/cdc/templates/ctes/slice.sql.jinja +1 -0
  52. fabricks/cdc/templates/filter.sql.jinja +4 -0
  53. fabricks/cdc/templates/filters/final.sql.jinja +4 -0
  54. fabricks/cdc/templates/filters/latest.sql.jinja +17 -0
  55. fabricks/cdc/templates/filters/update.sql.jinja +30 -0
  56. fabricks/cdc/templates/macros/bactick.sql.jinja +1 -0
  57. fabricks/cdc/templates/macros/hash.sql.jinja +18 -0
  58. fabricks/cdc/templates/merge.sql.jinja +3 -0
  59. fabricks/cdc/templates/merges/nocdc.sql.jinja +41 -0
  60. fabricks/cdc/templates/merges/scd1.sql.jinja +73 -0
  61. fabricks/cdc/templates/merges/scd2.sql.jinja +54 -0
  62. fabricks/cdc/templates/queries/__init__.py +0 -0
  63. fabricks/cdc/templates/queries/context.sql.jinja +186 -0
  64. fabricks/cdc/templates/queries/final.sql.jinja +1 -0
  65. fabricks/cdc/templates/queries/nocdc/complete.sql.jinja +10 -0
  66. fabricks/cdc/templates/queries/nocdc/update.sql.jinja +34 -0
  67. fabricks/cdc/templates/queries/scd1.sql.jinja +85 -0
  68. fabricks/cdc/templates/queries/scd2.sql.jinja +98 -0
  69. fabricks/cdc/templates/query.sql.jinja +15 -0
  70. fabricks/context/__init__.py +72 -0
  71. fabricks/context/_types.py +133 -0
  72. fabricks/context/config/__init__.py +92 -0
  73. fabricks/context/config/utils.py +53 -0
  74. fabricks/context/log.py +77 -0
  75. fabricks/context/runtime.py +117 -0
  76. fabricks/context/secret.py +103 -0
  77. fabricks/context/spark_session.py +82 -0
  78. fabricks/context/utils.py +80 -0
  79. fabricks/core/__init__.py +4 -0
  80. fabricks/core/dags/__init__.py +9 -0
  81. fabricks/core/dags/base.py +99 -0
  82. fabricks/core/dags/generator.py +157 -0
  83. fabricks/core/dags/log.py +12 -0
  84. fabricks/core/dags/processor.py +228 -0
  85. fabricks/core/dags/run.py +39 -0
  86. fabricks/core/dags/terminator.py +25 -0
  87. fabricks/core/dags/utils.py +54 -0
  88. fabricks/core/extenders.py +33 -0
  89. fabricks/core/job_schema.py +32 -0
  90. fabricks/core/jobs/__init__.py +21 -0
  91. fabricks/core/jobs/base/__init__.py +10 -0
  92. fabricks/core/jobs/base/_types.py +284 -0
  93. fabricks/core/jobs/base/checker.py +139 -0
  94. fabricks/core/jobs/base/configurator.py +306 -0
  95. fabricks/core/jobs/base/exception.py +85 -0
  96. fabricks/core/jobs/base/generator.py +447 -0
  97. fabricks/core/jobs/base/invoker.py +206 -0
  98. fabricks/core/jobs/base/job.py +5 -0
  99. fabricks/core/jobs/base/processor.py +249 -0
  100. fabricks/core/jobs/bronze.py +395 -0
  101. fabricks/core/jobs/get_job.py +127 -0
  102. fabricks/core/jobs/get_job_conf.py +152 -0
  103. fabricks/core/jobs/get_job_id.py +31 -0
  104. fabricks/core/jobs/get_jobs.py +107 -0
  105. fabricks/core/jobs/get_schedule.py +10 -0
  106. fabricks/core/jobs/get_schedules.py +32 -0
  107. fabricks/core/jobs/gold.py +415 -0
  108. fabricks/core/jobs/silver.py +373 -0
  109. fabricks/core/masks.py +52 -0
  110. fabricks/core/parsers/__init__.py +12 -0
  111. fabricks/core/parsers/_types.py +6 -0
  112. fabricks/core/parsers/base.py +95 -0
  113. fabricks/core/parsers/decorator.py +11 -0
  114. fabricks/core/parsers/get_parser.py +26 -0
  115. fabricks/core/parsers/utils.py +69 -0
  116. fabricks/core/schedules/__init__.py +14 -0
  117. fabricks/core/schedules/diagrams.py +21 -0
  118. fabricks/core/schedules/generate.py +20 -0
  119. fabricks/core/schedules/get_schedule.py +5 -0
  120. fabricks/core/schedules/get_schedules.py +9 -0
  121. fabricks/core/schedules/process.py +9 -0
  122. fabricks/core/schedules/run.py +3 -0
  123. fabricks/core/schedules/terminate.py +6 -0
  124. fabricks/core/schedules/views.py +61 -0
  125. fabricks/core/steps/__init__.py +4 -0
  126. fabricks/core/steps/_types.py +7 -0
  127. fabricks/core/steps/base.py +423 -0
  128. fabricks/core/steps/get_step.py +10 -0
  129. fabricks/core/steps/get_step_conf.py +26 -0
  130. fabricks/core/udfs.py +106 -0
  131. fabricks/core/views.py +41 -0
  132. fabricks/deploy/__init__.py +92 -0
  133. fabricks/deploy/masks.py +8 -0
  134. fabricks/deploy/notebooks.py +71 -0
  135. fabricks/deploy/schedules.py +10 -0
  136. fabricks/deploy/tables.py +82 -0
  137. fabricks/deploy/udfs.py +19 -0
  138. fabricks/deploy/utils.py +36 -0
  139. fabricks/deploy/views.py +509 -0
  140. fabricks/metastore/README.md +3 -0
  141. fabricks/metastore/__init__.py +5 -0
  142. fabricks/metastore/_types.py +65 -0
  143. fabricks/metastore/database.py +65 -0
  144. fabricks/metastore/dbobject.py +66 -0
  145. fabricks/metastore/pyproject.toml +20 -0
  146. fabricks/metastore/table.py +768 -0
  147. fabricks/metastore/utils.py +51 -0
  148. fabricks/metastore/view.py +53 -0
  149. fabricks/utils/__init__.py +0 -0
  150. fabricks/utils/_types.py +6 -0
  151. fabricks/utils/azure_queue.py +93 -0
  152. fabricks/utils/azure_table.py +154 -0
  153. fabricks/utils/console.py +51 -0
  154. fabricks/utils/fdict.py +240 -0
  155. fabricks/utils/helpers.py +228 -0
  156. fabricks/utils/log.py +236 -0
  157. fabricks/utils/mermaid.py +32 -0
  158. fabricks/utils/path.py +242 -0
  159. fabricks/utils/pip.py +61 -0
  160. fabricks/utils/pydantic.py +94 -0
  161. fabricks/utils/read/__init__.py +11 -0
  162. fabricks/utils/read/_types.py +3 -0
  163. fabricks/utils/read/read.py +305 -0
  164. fabricks/utils/read/read_excel.py +5 -0
  165. fabricks/utils/read/read_yaml.py +33 -0
  166. fabricks/utils/schema/__init__.py +7 -0
  167. fabricks/utils/schema/get_json_schema_for_type.py +161 -0
  168. fabricks/utils/schema/get_schema_for_type.py +99 -0
  169. fabricks/utils/spark.py +76 -0
  170. fabricks/utils/sqlglot.py +56 -0
  171. fabricks/utils/write/__init__.py +8 -0
  172. fabricks/utils/write/delta.py +46 -0
  173. fabricks/utils/write/stream.py +27 -0
  174. fabricks-3.0.11.dist-info/METADATA +23 -0
  175. fabricks-3.0.11.dist-info/RECORD +176 -0
  176. fabricks-3.0.11.dist-info/WHEEL +4 -0
fabricks/__init__.py ADDED
File without changes
@@ -0,0 +1,11 @@
1
+ from fabricks.api.context import init_spark_session
2
+ from fabricks.api.core import get_job, get_jobs, get_step
3
+ from fabricks.api.deploy import Deploy
4
+
5
+ __all__ = [
6
+ "init_spark_session",
7
+ "get_job",
8
+ "get_jobs",
9
+ "get_step",
10
+ "Deploy",
11
+ ]
@@ -0,0 +1,6 @@
1
+ from fabricks.api.cdc.nocdc import NoCDC
2
+ from fabricks.api.cdc.scd1 import SCD1
3
+ from fabricks.api.cdc.scd2 import SCD2
4
+ from fabricks.cdc.cdc import CDC
5
+
6
+ __all__ = ["CDC", "SCD1", "SCD2", "NoCDC"]
@@ -0,0 +1,3 @@
1
+ from fabricks.cdc.nocdc import NoCDC
2
+
3
+ __all__ = ["NoCDC"]
@@ -0,0 +1,3 @@
1
+ from fabricks.cdc.scd1 import SCD1
2
+
3
+ __all__ = ["SCD1"]
@@ -0,0 +1,3 @@
1
+ from fabricks.cdc.scd2 import SCD2
2
+
3
+ __all__ = ["SCD2"]
@@ -0,0 +1,27 @@
1
+ from fabricks.context import BRONZE, DBUTILS, GOLD, SECRET_SCOPE, SILVER, SPARK, init_spark_session, pprint_runtime
2
+ from fabricks.core.jobs.base._types import Bronzes, Golds, Silvers, Steps
3
+
4
+ # step
5
+ BRONZES = Bronzes
6
+ SILVERS = Silvers
7
+ GOLDS = Golds
8
+ STEPS = Steps
9
+
10
+
11
+ __all__ = [
12
+ "BRONZE",
13
+ "Bronzes",
14
+ "BRONZES",
15
+ "DBUTILS",
16
+ "GOLD",
17
+ "Golds",
18
+ "GOLDS",
19
+ "init_spark_session",
20
+ "pprint_runtime",
21
+ "SECRET_SCOPE",
22
+ "SILVER",
23
+ "Silvers",
24
+ "SILVERS",
25
+ "SPARK",
26
+ "STEPS",
27
+ ]
fabricks/api/core.py ADDED
@@ -0,0 +1,4 @@
1
+ from fabricks.core.jobs import BaseJob, get_job, get_jobs
2
+ from fabricks.core.steps import get_step
3
+
4
+ __all__ = ["BaseJob", "get_job", "get_jobs", "get_step"]
fabricks/api/deploy.py ADDED
@@ -0,0 +1,3 @@
1
+ from fabricks.deploy import Deploy
2
+
3
+ __all__ = ["Deploy"]
@@ -0,0 +1,19 @@
1
+ from fabricks.core.jobs.base.exception import (
2
+ CheckException,
3
+ CheckWarning,
4
+ PostRunCheckException,
5
+ PostRunCheckWarning,
6
+ PreRunCheckException,
7
+ PreRunCheckWarning,
8
+ SkipRunCheckWarning,
9
+ )
10
+
11
+ __all__ = [
12
+ "CheckException",
13
+ "CheckWarning",
14
+ "PreRunCheckException",
15
+ "PostRunCheckException",
16
+ "PreRunCheckWarning",
17
+ "PostRunCheckWarning",
18
+ "SkipRunCheckWarning",
19
+ ]
@@ -0,0 +1,3 @@
1
+ from fabricks.core.extenders import extender
2
+
3
+ __all__ = ["extender"]
@@ -0,0 +1,3 @@
1
+ from fabricks.core.job_schema import get_job_schema, print_job_schema
2
+
3
+ __all__ = ["get_job_schema", "print_job_schema"]
fabricks/api/log.py ADDED
@@ -0,0 +1,3 @@
1
+ from fabricks.context.log import DEFAULT_LOGGER, send_message_to_channel
2
+
3
+ __all__ = ["DEFAULT_LOGGER", "send_message_to_channel"]
fabricks/api/masks.py ADDED
@@ -0,0 +1,3 @@
1
+ from fabricks.core.masks import register_all_masks, register_mask
2
+
3
+ __all__ = ["register_all_masks", "register_mask"]
@@ -0,0 +1,10 @@
1
+ from fabricks.api.metastore.database import Database
2
+ from fabricks.api.metastore.table import Table
3
+ from fabricks.api.metastore.view import View, create_or_replace_view
4
+
5
+ __all__ = [
6
+ "create_or_replace_view",
7
+ "Database",
8
+ "Table",
9
+ "View",
10
+ ]
@@ -0,0 +1,3 @@
1
+ from fabricks.metastore import Database
2
+
3
+ __all__ = ["Database"]
@@ -0,0 +1,3 @@
1
+ from fabricks.metastore import Table
2
+
3
+ __all__ = ["Table"]
@@ -0,0 +1,6 @@
1
+ from fabricks.metastore import View
2
+
3
+ create_or_replace_view = View.create_or_replace
4
+
5
+
6
+ __all__ = ["View", "create_or_replace_view"]
File without changes
@@ -0,0 +1,6 @@
1
+ # Databricks notebook source
2
+ from databricks.sdk.runtime import dbutils
3
+
4
+ # COMMAND ----------
5
+
6
+ dbutils.notebook.exit(value="exit (0)") # type: ignore
@@ -0,0 +1,42 @@
1
+ # Databricks notebook source
2
+ # MAGIC %run ./add_missing_modules
3
+
4
+ # COMMAND ----------
5
+
6
+ from databricks.sdk.runtime import dbutils, display
7
+
8
+ from fabricks.core.schedules import generate
9
+
10
+ # COMMAND ----------
11
+
12
+ dbutils.widgets.text("schedule", "---")
13
+
14
+ # COMMAND ----------
15
+
16
+ schedule = dbutils.widgets.get("schedule")
17
+ assert schedule != "---", "no schedule provided"
18
+
19
+ # COMMAND ----------
20
+
21
+ print(schedule)
22
+
23
+ # COMMAND ----------
24
+
25
+ schedule_id, job_df, dependency_df = generate(schedule=schedule)
26
+
27
+ # COMMAND ----------
28
+
29
+ display(job_df)
30
+
31
+ # COMMAND ----------
32
+
33
+ display(dependency_df)
34
+
35
+ # COMMAND ----------
36
+
37
+ dbutils.jobs.taskValues.set(key="schedule_id", value=schedule_id)
38
+ dbutils.jobs.taskValues.set(key="schedule", value=schedule)
39
+
40
+ # COMMAND ----------
41
+
42
+ dbutils.notebook.exit(value="exit (0)") # type: ignore
@@ -0,0 +1,54 @@
1
+ # Databricks notebook source
2
+ # MAGIC %run ./add_missing_modules
3
+
4
+ # COMMAND ----------
5
+
6
+ from databricks.sdk.runtime import dbutils
7
+ from pyspark.errors.exceptions.base import IllegalArgumentException
8
+
9
+ from fabricks.core.schedules import process
10
+
11
+ # COMMAND ----------
12
+
13
+ dbutils.widgets.text("step", "---")
14
+ dbutils.widgets.text("schedule_id", "---")
15
+
16
+ # COMMAND ----------
17
+
18
+ try:
19
+ schedule_id = dbutils.jobs.taskValues.get(taskKey="initialize", key="schedule_id")
20
+ except (TypeError, IllegalArgumentException, ValueError):
21
+ schedule_id = dbutils.widgets.get("schedule_id")
22
+ assert schedule_id != "---", "no schedule_id provided"
23
+
24
+ assert schedule_id is not None
25
+
26
+ # COMMAND ----------
27
+
28
+ step = dbutils.widgets.get("step")
29
+ assert step != "---", "no step provided"
30
+
31
+ # COMMAND ----------
32
+
33
+ schedule = dbutils.widgets.get("schedule")
34
+ assert schedule != "---", "no schedule provided"
35
+
36
+ # COMMAND ----------
37
+
38
+ print(schedule_id)
39
+
40
+ # COMMAND ----------
41
+
42
+ print(step)
43
+
44
+ # COMMAND ----------
45
+
46
+ print(schedule)
47
+
48
+ # COMMAND ----------
49
+
50
+ process(schedule_id=schedule_id, schedule=schedule, step=step)
51
+
52
+ # COMMAND ----------
53
+
54
+ dbutils.notebook.exit(value="exit (0)") # type: ignore
@@ -0,0 +1,59 @@
1
+ # Databricks notebook source
2
+ # MAGIC %run ./add_missing_modules
3
+
4
+ # COMMAND ----------
5
+
6
+ import json
7
+
8
+ from databricks.sdk.runtime import dbutils
9
+
10
+ from fabricks.core.schedules import run
11
+
12
+ # COMMAND ----------
13
+
14
+ dbutils.widgets.text("step", "---")
15
+ dbutils.widgets.text("job_id", "---")
16
+ dbutils.widgets.text("job", "--")
17
+ dbutils.widgets.text("schedule_id", "---")
18
+ dbutils.widgets.text("schedule", "---")
19
+
20
+ # COMMAND ----------
21
+
22
+ step = dbutils.widgets.get("step")
23
+ assert step != "---"
24
+
25
+ # COMMAND ----------
26
+
27
+ job_id = dbutils.widgets.get("job_id")
28
+ assert job_id != "---"
29
+
30
+ # COMMAND ----------
31
+
32
+ job = dbutils.widgets.get("job")
33
+ assert job != "---"
34
+
35
+ # COMMAND ----------
36
+
37
+ schedule_id = dbutils.widgets.get("schedule_id")
38
+ assert schedule_id != "---"
39
+
40
+ # COMMAND ----------
41
+
42
+ schedule = dbutils.widgets.get("schedule")
43
+ assert schedule != "---"
44
+
45
+ # COMMAND ----------
46
+
47
+ try:
48
+ context = json.loads(dbutils.notebook.entry_point.getDbutils().notebook().getContext().toJson()) # type: ignore
49
+ notebook_id = context.get("tags").get("jobId")
50
+ except: # noqa: E722
51
+ notebook_id = None
52
+
53
+ # COMMAND ----------
54
+
55
+ run(step=step, job_id=job_id, schedule_id=schedule_id, schedule=schedule, notebook_id=notebook_id)
56
+
57
+ # COMMAND ----------
58
+
59
+ dbutils.notebook.exit(value="exit (0)") # type: ignore
@@ -0,0 +1,75 @@
1
+ # Databricks notebook source
2
+ # MAGIC %run ./add_missing_modules
3
+
4
+ # COMMAND ----------
5
+
6
+ from logging import DEBUG
7
+ from typing import Any, cast
8
+
9
+ from databricks.sdk.runtime import dbutils, display, spark
10
+
11
+ from fabricks.context import PATH_NOTEBOOKS
12
+ from fabricks.context.log import DEFAULT_LOGGER
13
+ from fabricks.core import get_step
14
+ from fabricks.core.jobs.base._types import TStep
15
+ from fabricks.core.schedules import generate, terminate
16
+ from fabricks.utils.helpers import run_in_parallel, run_notebook
17
+
18
+ # COMMAND ----------
19
+
20
+ DEFAULT_LOGGER.setLevel(DEBUG)
21
+
22
+ # COMMAND ----------
23
+
24
+ dbutils.widgets.text("schedule", "---")
25
+
26
+ # COMMAND ----------
27
+
28
+ schedule = dbutils.widgets.get("schedule")
29
+ assert schedule != "---", "no schedule provided"
30
+
31
+ # COMMAND ----------
32
+
33
+ schedule_id, job_df, dependency_df = generate(schedule=schedule)
34
+
35
+ # COMMAND ----------
36
+
37
+ print(schedule_id)
38
+
39
+ # COMMAND ----------
40
+
41
+ display(job_df)
42
+
43
+ # COMMAND ----------
44
+
45
+ display(dependency_df)
46
+
47
+ # COMMAND ----------
48
+ steps = [row.step for row in spark.sql("select step from {df} group by step", df=job_df).collect()]
49
+
50
+ # COMMAND ----------
51
+
52
+
53
+ def _schedule(task: Any):
54
+ step = get_step(step=cast(TStep, task))
55
+ run_notebook(
56
+ PATH_NOTEBOOKS.joinpath("process"),
57
+ timeout=step.timeouts.step,
58
+ step=task,
59
+ schedule_id=schedule_id,
60
+ schedule=schedule,
61
+ workers=step.workers,
62
+ )
63
+
64
+
65
+ # COMMAND ----------
66
+
67
+ run_in_parallel(_schedule, steps)
68
+
69
+ # COMMAND ----------
70
+
71
+ terminate(schedule_id=schedule_id)
72
+
73
+ # COMMAND ----------
74
+
75
+ dbutils.notebook.exit(value="exit (0)") # type: ignore
@@ -0,0 +1,31 @@
1
+ # Databricks notebook source
2
+ # MAGIC %run ./add_missing_modules
3
+
4
+ # COMMAND ----------
5
+
6
+ from databricks.sdk.runtime import dbutils
7
+ from pyspark.errors.exceptions.base import IllegalArgumentException
8
+
9
+ from fabricks.core.schedules import terminate
10
+
11
+ # COMMAND ----------
12
+
13
+ try:
14
+ schedule_id = dbutils.jobs.taskValues.get(taskKey="initialize", key="schedule_id")
15
+ except (TypeError, IllegalArgumentException, ValueError):
16
+ schedule_id = dbutils.widgets.get("schedule_id")
17
+ assert schedule_id != "---", "no schedule_id provided"
18
+
19
+ assert schedule_id is not None
20
+
21
+ # COMMAND ----------
22
+
23
+ print(schedule_id)
24
+
25
+ # COMMAND ----------
26
+
27
+ terminate(schedule_id=schedule_id)
28
+
29
+ # COMMAND ----------
30
+
31
+ dbutils.notebook.exit(value="exit (0)") # type: ignore
@@ -0,0 +1,3 @@
1
+ from fabricks.core.parsers import BaseParser, ParserOptions, parser
2
+
3
+ __all__ = ["BaseParser", "ParserOptions", "parser"]
@@ -0,0 +1,3 @@
1
+ from fabricks.core.schedules import create_or_replace_view, create_or_replace_views, generate, process, terminate
2
+
3
+ __all__ = ["create_or_replace_view", "create_or_replace_views", "terminate", "generate", "process"]
fabricks/api/udfs.py ADDED
@@ -0,0 +1,3 @@
1
+ from fabricks.core.udfs import register_all_udfs, register_udf, udf
2
+
3
+ __all__ = ["udf", "register_all_udfs", "register_udf"]
fabricks/api/utils.py ADDED
@@ -0,0 +1,9 @@
1
+ from fabricks.utils.helpers import concat_dfs, concat_ws, run_in_parallel
2
+ from fabricks.utils.path import Path
3
+
4
+ __all__ = [
5
+ "concat_dfs",
6
+ "concat_ws",
7
+ "Path",
8
+ "run_in_parallel",
9
+ ]
@@ -0,0 +1,3 @@
1
+ import importlib.metadata
2
+
3
+ FABRICKS_VERSION = importlib.metadata.version("fabricks")
fabricks/api/views.py ADDED
@@ -0,0 +1,6 @@
1
+ from fabricks.core.views import create_or_replace_view, create_or_replace_views
2
+
3
+ __all__ = [
4
+ "create_or_replace_view",
5
+ "create_or_replace_views",
6
+ ]
@@ -0,0 +1,14 @@
1
+ from fabricks.cdc.base import AllowedChangeDataCaptures, BaseCDC
2
+ from fabricks.cdc.cdc import CDC
3
+ from fabricks.cdc.nocdc import NoCDC
4
+ from fabricks.cdc.scd1 import SCD1
5
+ from fabricks.cdc.scd2 import SCD2
6
+
7
+ __all__ = [
8
+ "BaseCDC",
9
+ "CDC",
10
+ "AllowedChangeDataCaptures",
11
+ "NoCDC",
12
+ "SCD1",
13
+ "SCD2",
14
+ ]
@@ -0,0 +1,4 @@
1
+ from fabricks.cdc.base._types import AllowedChangeDataCaptures
2
+ from fabricks.cdc.base.cdc import BaseCDC
3
+
4
+ __all__ = ["BaseCDC", "AllowedChangeDataCaptures"]
@@ -0,0 +1,10 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Literal, Union
4
+
5
+ from pyspark.sql import DataFrame
6
+
7
+ from fabricks.metastore.table import Table
8
+
9
+ AllowedChangeDataCaptures = Literal["nocdc", "scd1", "scd2"]
10
+ AllowedSources = Union[DataFrame, Table, str]
@@ -0,0 +1,5 @@
1
+ from fabricks.cdc.base.merger import Merger
2
+
3
+
4
+ class BaseCDC(Merger):
5
+ pass