fabricks 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fabricks/__init__.py +0 -0
- fabricks/api/__init__.py +11 -0
- fabricks/api/cdc/__init__.py +6 -0
- fabricks/api/cdc/nocdc.py +3 -0
- fabricks/api/cdc/scd1.py +3 -0
- fabricks/api/cdc/scd2.py +3 -0
- fabricks/api/context.py +27 -0
- fabricks/api/core.py +4 -0
- fabricks/api/deploy.py +3 -0
- fabricks/api/exceptions.py +19 -0
- fabricks/api/extenders.py +3 -0
- fabricks/api/job_schema.py +3 -0
- fabricks/api/log.py +3 -0
- fabricks/api/masks.py +3 -0
- fabricks/api/metastore/__init__.py +10 -0
- fabricks/api/metastore/database.py +3 -0
- fabricks/api/metastore/table.py +3 -0
- fabricks/api/metastore/view.py +6 -0
- fabricks/api/notebooks/__init__.py +0 -0
- fabricks/api/notebooks/cluster.py +6 -0
- fabricks/api/notebooks/initialize.py +42 -0
- fabricks/api/notebooks/process.py +54 -0
- fabricks/api/notebooks/run.py +59 -0
- fabricks/api/notebooks/schedule.py +75 -0
- fabricks/api/notebooks/terminate.py +31 -0
- fabricks/api/parsers.py +3 -0
- fabricks/api/schedules.py +3 -0
- fabricks/api/udfs.py +3 -0
- fabricks/api/utils.py +9 -0
- fabricks/api/version.py +3 -0
- fabricks/api/views.py +6 -0
- fabricks/cdc/__init__.py +14 -0
- fabricks/cdc/base/__init__.py +4 -0
- fabricks/cdc/base/_types.py +10 -0
- fabricks/cdc/base/cdc.py +5 -0
- fabricks/cdc/base/configurator.py +223 -0
- fabricks/cdc/base/generator.py +177 -0
- fabricks/cdc/base/merger.py +110 -0
- fabricks/cdc/base/processor.py +471 -0
- fabricks/cdc/cdc.py +5 -0
- fabricks/cdc/nocdc.py +20 -0
- fabricks/cdc/scd.py +22 -0
- fabricks/cdc/scd1.py +15 -0
- fabricks/cdc/scd2.py +15 -0
- fabricks/cdc/templates/__init__.py +0 -0
- fabricks/cdc/templates/ctes/base.sql.jinja +35 -0
- fabricks/cdc/templates/ctes/current.sql.jinja +28 -0
- fabricks/cdc/templates/ctes/deduplicate_hash.sql.jinja +32 -0
- fabricks/cdc/templates/ctes/deduplicate_key.sql.jinja +31 -0
- fabricks/cdc/templates/ctes/rectify.sql.jinja +113 -0
- fabricks/cdc/templates/ctes/slice.sql.jinja +1 -0
- fabricks/cdc/templates/filter.sql.jinja +4 -0
- fabricks/cdc/templates/filters/final.sql.jinja +4 -0
- fabricks/cdc/templates/filters/latest.sql.jinja +17 -0
- fabricks/cdc/templates/filters/update.sql.jinja +30 -0
- fabricks/cdc/templates/macros/bactick.sql.jinja +1 -0
- fabricks/cdc/templates/macros/hash.sql.jinja +18 -0
- fabricks/cdc/templates/merge.sql.jinja +3 -0
- fabricks/cdc/templates/merges/nocdc.sql.jinja +41 -0
- fabricks/cdc/templates/merges/scd1.sql.jinja +73 -0
- fabricks/cdc/templates/merges/scd2.sql.jinja +54 -0
- fabricks/cdc/templates/queries/__init__.py +0 -0
- fabricks/cdc/templates/queries/context.sql.jinja +186 -0
- fabricks/cdc/templates/queries/final.sql.jinja +1 -0
- fabricks/cdc/templates/queries/nocdc/complete.sql.jinja +10 -0
- fabricks/cdc/templates/queries/nocdc/update.sql.jinja +34 -0
- fabricks/cdc/templates/queries/scd1.sql.jinja +85 -0
- fabricks/cdc/templates/queries/scd2.sql.jinja +98 -0
- fabricks/cdc/templates/query.sql.jinja +15 -0
- fabricks/context/__init__.py +72 -0
- fabricks/context/_types.py +133 -0
- fabricks/context/config/__init__.py +92 -0
- fabricks/context/config/utils.py +53 -0
- fabricks/context/log.py +77 -0
- fabricks/context/runtime.py +117 -0
- fabricks/context/secret.py +103 -0
- fabricks/context/spark_session.py +82 -0
- fabricks/context/utils.py +80 -0
- fabricks/core/__init__.py +4 -0
- fabricks/core/dags/__init__.py +9 -0
- fabricks/core/dags/base.py +99 -0
- fabricks/core/dags/generator.py +157 -0
- fabricks/core/dags/log.py +12 -0
- fabricks/core/dags/processor.py +228 -0
- fabricks/core/dags/run.py +39 -0
- fabricks/core/dags/terminator.py +25 -0
- fabricks/core/dags/utils.py +54 -0
- fabricks/core/extenders.py +33 -0
- fabricks/core/job_schema.py +32 -0
- fabricks/core/jobs/__init__.py +21 -0
- fabricks/core/jobs/base/__init__.py +10 -0
- fabricks/core/jobs/base/_types.py +284 -0
- fabricks/core/jobs/base/checker.py +139 -0
- fabricks/core/jobs/base/configurator.py +306 -0
- fabricks/core/jobs/base/exception.py +85 -0
- fabricks/core/jobs/base/generator.py +447 -0
- fabricks/core/jobs/base/invoker.py +206 -0
- fabricks/core/jobs/base/job.py +5 -0
- fabricks/core/jobs/base/processor.py +249 -0
- fabricks/core/jobs/bronze.py +395 -0
- fabricks/core/jobs/get_job.py +127 -0
- fabricks/core/jobs/get_job_conf.py +152 -0
- fabricks/core/jobs/get_job_id.py +31 -0
- fabricks/core/jobs/get_jobs.py +107 -0
- fabricks/core/jobs/get_schedule.py +10 -0
- fabricks/core/jobs/get_schedules.py +32 -0
- fabricks/core/jobs/gold.py +415 -0
- fabricks/core/jobs/silver.py +373 -0
- fabricks/core/masks.py +52 -0
- fabricks/core/parsers/__init__.py +12 -0
- fabricks/core/parsers/_types.py +6 -0
- fabricks/core/parsers/base.py +95 -0
- fabricks/core/parsers/decorator.py +11 -0
- fabricks/core/parsers/get_parser.py +26 -0
- fabricks/core/parsers/utils.py +69 -0
- fabricks/core/schedules/__init__.py +14 -0
- fabricks/core/schedules/diagrams.py +21 -0
- fabricks/core/schedules/generate.py +20 -0
- fabricks/core/schedules/get_schedule.py +5 -0
- fabricks/core/schedules/get_schedules.py +9 -0
- fabricks/core/schedules/process.py +9 -0
- fabricks/core/schedules/run.py +3 -0
- fabricks/core/schedules/terminate.py +6 -0
- fabricks/core/schedules/views.py +61 -0
- fabricks/core/steps/__init__.py +4 -0
- fabricks/core/steps/_types.py +7 -0
- fabricks/core/steps/base.py +423 -0
- fabricks/core/steps/get_step.py +10 -0
- fabricks/core/steps/get_step_conf.py +26 -0
- fabricks/core/udfs.py +106 -0
- fabricks/core/views.py +41 -0
- fabricks/deploy/__init__.py +92 -0
- fabricks/deploy/masks.py +8 -0
- fabricks/deploy/notebooks.py +71 -0
- fabricks/deploy/schedules.py +10 -0
- fabricks/deploy/tables.py +82 -0
- fabricks/deploy/udfs.py +19 -0
- fabricks/deploy/utils.py +36 -0
- fabricks/deploy/views.py +509 -0
- fabricks/metastore/README.md +3 -0
- fabricks/metastore/__init__.py +5 -0
- fabricks/metastore/_types.py +65 -0
- fabricks/metastore/database.py +65 -0
- fabricks/metastore/dbobject.py +66 -0
- fabricks/metastore/pyproject.toml +20 -0
- fabricks/metastore/table.py +768 -0
- fabricks/metastore/utils.py +51 -0
- fabricks/metastore/view.py +53 -0
- fabricks/utils/__init__.py +0 -0
- fabricks/utils/_types.py +6 -0
- fabricks/utils/azure_queue.py +93 -0
- fabricks/utils/azure_table.py +154 -0
- fabricks/utils/console.py +51 -0
- fabricks/utils/fdict.py +240 -0
- fabricks/utils/helpers.py +228 -0
- fabricks/utils/log.py +236 -0
- fabricks/utils/mermaid.py +32 -0
- fabricks/utils/path.py +242 -0
- fabricks/utils/pip.py +61 -0
- fabricks/utils/pydantic.py +94 -0
- fabricks/utils/read/__init__.py +11 -0
- fabricks/utils/read/_types.py +3 -0
- fabricks/utils/read/read.py +305 -0
- fabricks/utils/read/read_excel.py +5 -0
- fabricks/utils/read/read_yaml.py +33 -0
- fabricks/utils/schema/__init__.py +7 -0
- fabricks/utils/schema/get_json_schema_for_type.py +161 -0
- fabricks/utils/schema/get_schema_for_type.py +99 -0
- fabricks/utils/spark.py +76 -0
- fabricks/utils/sqlglot.py +56 -0
- fabricks/utils/write/__init__.py +8 -0
- fabricks/utils/write/delta.py +46 -0
- fabricks/utils/write/stream.py +27 -0
- fabricks-3.0.11.dist-info/METADATA +23 -0
- fabricks-3.0.11.dist-info/RECORD +176 -0
- fabricks-3.0.11.dist-info/WHEEL +4 -0
fabricks/__init__.py
ADDED
|
File without changes
|
fabricks/api/__init__.py
ADDED
fabricks/api/cdc/scd1.py
ADDED
fabricks/api/cdc/scd2.py
ADDED
fabricks/api/context.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from fabricks.context import BRONZE, DBUTILS, GOLD, SECRET_SCOPE, SILVER, SPARK, init_spark_session, pprint_runtime
|
|
2
|
+
from fabricks.core.jobs.base._types import Bronzes, Golds, Silvers, Steps
|
|
3
|
+
|
|
4
|
+
# step
|
|
5
|
+
BRONZES = Bronzes
|
|
6
|
+
SILVERS = Silvers
|
|
7
|
+
GOLDS = Golds
|
|
8
|
+
STEPS = Steps
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"BRONZE",
|
|
13
|
+
"Bronzes",
|
|
14
|
+
"BRONZES",
|
|
15
|
+
"DBUTILS",
|
|
16
|
+
"GOLD",
|
|
17
|
+
"Golds",
|
|
18
|
+
"GOLDS",
|
|
19
|
+
"init_spark_session",
|
|
20
|
+
"pprint_runtime",
|
|
21
|
+
"SECRET_SCOPE",
|
|
22
|
+
"SILVER",
|
|
23
|
+
"Silvers",
|
|
24
|
+
"SILVERS",
|
|
25
|
+
"SPARK",
|
|
26
|
+
"STEPS",
|
|
27
|
+
]
|
fabricks/api/core.py
ADDED
fabricks/api/deploy.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from fabricks.core.jobs.base.exception import (
|
|
2
|
+
CheckException,
|
|
3
|
+
CheckWarning,
|
|
4
|
+
PostRunCheckException,
|
|
5
|
+
PostRunCheckWarning,
|
|
6
|
+
PreRunCheckException,
|
|
7
|
+
PreRunCheckWarning,
|
|
8
|
+
SkipRunCheckWarning,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"CheckException",
|
|
13
|
+
"CheckWarning",
|
|
14
|
+
"PreRunCheckException",
|
|
15
|
+
"PostRunCheckException",
|
|
16
|
+
"PreRunCheckWarning",
|
|
17
|
+
"PostRunCheckWarning",
|
|
18
|
+
"SkipRunCheckWarning",
|
|
19
|
+
]
|
fabricks/api/log.py
ADDED
fabricks/api/masks.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# Databricks notebook source
|
|
2
|
+
# MAGIC %run ./add_missing_modules
|
|
3
|
+
|
|
4
|
+
# COMMAND ----------
|
|
5
|
+
|
|
6
|
+
from databricks.sdk.runtime import dbutils, display
|
|
7
|
+
|
|
8
|
+
from fabricks.core.schedules import generate
|
|
9
|
+
|
|
10
|
+
# COMMAND ----------
|
|
11
|
+
|
|
12
|
+
dbutils.widgets.text("schedule", "---")
|
|
13
|
+
|
|
14
|
+
# COMMAND ----------
|
|
15
|
+
|
|
16
|
+
schedule = dbutils.widgets.get("schedule")
|
|
17
|
+
assert schedule != "---", "no schedule provided"
|
|
18
|
+
|
|
19
|
+
# COMMAND ----------
|
|
20
|
+
|
|
21
|
+
print(schedule)
|
|
22
|
+
|
|
23
|
+
# COMMAND ----------
|
|
24
|
+
|
|
25
|
+
schedule_id, job_df, dependency_df = generate(schedule=schedule)
|
|
26
|
+
|
|
27
|
+
# COMMAND ----------
|
|
28
|
+
|
|
29
|
+
display(job_df)
|
|
30
|
+
|
|
31
|
+
# COMMAND ----------
|
|
32
|
+
|
|
33
|
+
display(dependency_df)
|
|
34
|
+
|
|
35
|
+
# COMMAND ----------
|
|
36
|
+
|
|
37
|
+
dbutils.jobs.taskValues.set(key="schedule_id", value=schedule_id)
|
|
38
|
+
dbutils.jobs.taskValues.set(key="schedule", value=schedule)
|
|
39
|
+
|
|
40
|
+
# COMMAND ----------
|
|
41
|
+
|
|
42
|
+
dbutils.notebook.exit(value="exit (0)") # type: ignore
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# Databricks notebook source
|
|
2
|
+
# MAGIC %run ./add_missing_modules
|
|
3
|
+
|
|
4
|
+
# COMMAND ----------
|
|
5
|
+
|
|
6
|
+
from databricks.sdk.runtime import dbutils
|
|
7
|
+
from pyspark.errors.exceptions.base import IllegalArgumentException
|
|
8
|
+
|
|
9
|
+
from fabricks.core.schedules import process
|
|
10
|
+
|
|
11
|
+
# COMMAND ----------
|
|
12
|
+
|
|
13
|
+
dbutils.widgets.text("step", "---")
|
|
14
|
+
dbutils.widgets.text("schedule_id", "---")
|
|
15
|
+
|
|
16
|
+
# COMMAND ----------
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
schedule_id = dbutils.jobs.taskValues.get(taskKey="initialize", key="schedule_id")
|
|
20
|
+
except (TypeError, IllegalArgumentException, ValueError):
|
|
21
|
+
schedule_id = dbutils.widgets.get("schedule_id")
|
|
22
|
+
assert schedule_id != "---", "no schedule_id provided"
|
|
23
|
+
|
|
24
|
+
assert schedule_id is not None
|
|
25
|
+
|
|
26
|
+
# COMMAND ----------
|
|
27
|
+
|
|
28
|
+
step = dbutils.widgets.get("step")
|
|
29
|
+
assert step != "---", "no step provided"
|
|
30
|
+
|
|
31
|
+
# COMMAND ----------
|
|
32
|
+
|
|
33
|
+
schedule = dbutils.widgets.get("schedule")
|
|
34
|
+
assert schedule != "---", "no schedule provided"
|
|
35
|
+
|
|
36
|
+
# COMMAND ----------
|
|
37
|
+
|
|
38
|
+
print(schedule_id)
|
|
39
|
+
|
|
40
|
+
# COMMAND ----------
|
|
41
|
+
|
|
42
|
+
print(step)
|
|
43
|
+
|
|
44
|
+
# COMMAND ----------
|
|
45
|
+
|
|
46
|
+
print(schedule)
|
|
47
|
+
|
|
48
|
+
# COMMAND ----------
|
|
49
|
+
|
|
50
|
+
process(schedule_id=schedule_id, schedule=schedule, step=step)
|
|
51
|
+
|
|
52
|
+
# COMMAND ----------
|
|
53
|
+
|
|
54
|
+
dbutils.notebook.exit(value="exit (0)") # type: ignore
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# Databricks notebook source
|
|
2
|
+
# MAGIC %run ./add_missing_modules
|
|
3
|
+
|
|
4
|
+
# COMMAND ----------
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
|
|
8
|
+
from databricks.sdk.runtime import dbutils
|
|
9
|
+
|
|
10
|
+
from fabricks.core.schedules import run
|
|
11
|
+
|
|
12
|
+
# COMMAND ----------
|
|
13
|
+
|
|
14
|
+
dbutils.widgets.text("step", "---")
|
|
15
|
+
dbutils.widgets.text("job_id", "---")
|
|
16
|
+
dbutils.widgets.text("job", "--")
|
|
17
|
+
dbutils.widgets.text("schedule_id", "---")
|
|
18
|
+
dbutils.widgets.text("schedule", "---")
|
|
19
|
+
|
|
20
|
+
# COMMAND ----------
|
|
21
|
+
|
|
22
|
+
step = dbutils.widgets.get("step")
|
|
23
|
+
assert step != "---"
|
|
24
|
+
|
|
25
|
+
# COMMAND ----------
|
|
26
|
+
|
|
27
|
+
job_id = dbutils.widgets.get("job_id")
|
|
28
|
+
assert job_id != "---"
|
|
29
|
+
|
|
30
|
+
# COMMAND ----------
|
|
31
|
+
|
|
32
|
+
job = dbutils.widgets.get("job")
|
|
33
|
+
assert job != "---"
|
|
34
|
+
|
|
35
|
+
# COMMAND ----------
|
|
36
|
+
|
|
37
|
+
schedule_id = dbutils.widgets.get("schedule_id")
|
|
38
|
+
assert schedule_id != "---"
|
|
39
|
+
|
|
40
|
+
# COMMAND ----------
|
|
41
|
+
|
|
42
|
+
schedule = dbutils.widgets.get("schedule")
|
|
43
|
+
assert schedule != "---"
|
|
44
|
+
|
|
45
|
+
# COMMAND ----------
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
context = json.loads(dbutils.notebook.entry_point.getDbutils().notebook().getContext().toJson()) # type: ignore
|
|
49
|
+
notebook_id = context.get("tags").get("jobId")
|
|
50
|
+
except: # noqa: E722
|
|
51
|
+
notebook_id = None
|
|
52
|
+
|
|
53
|
+
# COMMAND ----------
|
|
54
|
+
|
|
55
|
+
run(step=step, job_id=job_id, schedule_id=schedule_id, schedule=schedule, notebook_id=notebook_id)
|
|
56
|
+
|
|
57
|
+
# COMMAND ----------
|
|
58
|
+
|
|
59
|
+
dbutils.notebook.exit(value="exit (0)") # type: ignore
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# Databricks notebook source
|
|
2
|
+
# MAGIC %run ./add_missing_modules
|
|
3
|
+
|
|
4
|
+
# COMMAND ----------
|
|
5
|
+
|
|
6
|
+
from logging import DEBUG
|
|
7
|
+
from typing import Any, cast
|
|
8
|
+
|
|
9
|
+
from databricks.sdk.runtime import dbutils, display, spark
|
|
10
|
+
|
|
11
|
+
from fabricks.context import PATH_NOTEBOOKS
|
|
12
|
+
from fabricks.context.log import DEFAULT_LOGGER
|
|
13
|
+
from fabricks.core import get_step
|
|
14
|
+
from fabricks.core.jobs.base._types import TStep
|
|
15
|
+
from fabricks.core.schedules import generate, terminate
|
|
16
|
+
from fabricks.utils.helpers import run_in_parallel, run_notebook
|
|
17
|
+
|
|
18
|
+
# COMMAND ----------
|
|
19
|
+
|
|
20
|
+
DEFAULT_LOGGER.setLevel(DEBUG)
|
|
21
|
+
|
|
22
|
+
# COMMAND ----------
|
|
23
|
+
|
|
24
|
+
dbutils.widgets.text("schedule", "---")
|
|
25
|
+
|
|
26
|
+
# COMMAND ----------
|
|
27
|
+
|
|
28
|
+
schedule = dbutils.widgets.get("schedule")
|
|
29
|
+
assert schedule != "---", "no schedule provided"
|
|
30
|
+
|
|
31
|
+
# COMMAND ----------
|
|
32
|
+
|
|
33
|
+
schedule_id, job_df, dependency_df = generate(schedule=schedule)
|
|
34
|
+
|
|
35
|
+
# COMMAND ----------
|
|
36
|
+
|
|
37
|
+
print(schedule_id)
|
|
38
|
+
|
|
39
|
+
# COMMAND ----------
|
|
40
|
+
|
|
41
|
+
display(job_df)
|
|
42
|
+
|
|
43
|
+
# COMMAND ----------
|
|
44
|
+
|
|
45
|
+
display(dependency_df)
|
|
46
|
+
|
|
47
|
+
# COMMAND ----------
|
|
48
|
+
steps = [row.step for row in spark.sql("select step from {df} group by step", df=job_df).collect()]
|
|
49
|
+
|
|
50
|
+
# COMMAND ----------
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _schedule(task: Any):
|
|
54
|
+
step = get_step(step=cast(TStep, task))
|
|
55
|
+
run_notebook(
|
|
56
|
+
PATH_NOTEBOOKS.joinpath("process"),
|
|
57
|
+
timeout=step.timeouts.step,
|
|
58
|
+
step=task,
|
|
59
|
+
schedule_id=schedule_id,
|
|
60
|
+
schedule=schedule,
|
|
61
|
+
workers=step.workers,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# COMMAND ----------
|
|
66
|
+
|
|
67
|
+
run_in_parallel(_schedule, steps)
|
|
68
|
+
|
|
69
|
+
# COMMAND ----------
|
|
70
|
+
|
|
71
|
+
terminate(schedule_id=schedule_id)
|
|
72
|
+
|
|
73
|
+
# COMMAND ----------
|
|
74
|
+
|
|
75
|
+
dbutils.notebook.exit(value="exit (0)") # type: ignore
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# Databricks notebook source
|
|
2
|
+
# MAGIC %run ./add_missing_modules
|
|
3
|
+
|
|
4
|
+
# COMMAND ----------
|
|
5
|
+
|
|
6
|
+
from databricks.sdk.runtime import dbutils
|
|
7
|
+
from pyspark.errors.exceptions.base import IllegalArgumentException
|
|
8
|
+
|
|
9
|
+
from fabricks.core.schedules import terminate
|
|
10
|
+
|
|
11
|
+
# COMMAND ----------
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
schedule_id = dbutils.jobs.taskValues.get(taskKey="initialize", key="schedule_id")
|
|
15
|
+
except (TypeError, IllegalArgumentException, ValueError):
|
|
16
|
+
schedule_id = dbutils.widgets.get("schedule_id")
|
|
17
|
+
assert schedule_id != "---", "no schedule_id provided"
|
|
18
|
+
|
|
19
|
+
assert schedule_id is not None
|
|
20
|
+
|
|
21
|
+
# COMMAND ----------
|
|
22
|
+
|
|
23
|
+
print(schedule_id)
|
|
24
|
+
|
|
25
|
+
# COMMAND ----------
|
|
26
|
+
|
|
27
|
+
terminate(schedule_id=schedule_id)
|
|
28
|
+
|
|
29
|
+
# COMMAND ----------
|
|
30
|
+
|
|
31
|
+
dbutils.notebook.exit(value="exit (0)") # type: ignore
|
fabricks/api/parsers.py
ADDED
fabricks/api/udfs.py
ADDED
fabricks/api/utils.py
ADDED
fabricks/api/version.py
ADDED
fabricks/api/views.py
ADDED
fabricks/cdc/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from fabricks.cdc.base import AllowedChangeDataCaptures, BaseCDC
|
|
2
|
+
from fabricks.cdc.cdc import CDC
|
|
3
|
+
from fabricks.cdc.nocdc import NoCDC
|
|
4
|
+
from fabricks.cdc.scd1 import SCD1
|
|
5
|
+
from fabricks.cdc.scd2 import SCD2
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"BaseCDC",
|
|
9
|
+
"CDC",
|
|
10
|
+
"AllowedChangeDataCaptures",
|
|
11
|
+
"NoCDC",
|
|
12
|
+
"SCD1",
|
|
13
|
+
"SCD2",
|
|
14
|
+
]
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Literal, Union
|
|
4
|
+
|
|
5
|
+
from pyspark.sql import DataFrame
|
|
6
|
+
|
|
7
|
+
from fabricks.metastore.table import Table
|
|
8
|
+
|
|
9
|
+
AllowedChangeDataCaptures = Literal["nocdc", "scd1", "scd2"]
|
|
10
|
+
AllowedSources = Union[DataFrame, Table, str]
|