fabricks 3.0.5.2__py3-none-any.whl → 3.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. fabricks/api/__init__.py +2 -0
  2. fabricks/api/context.py +1 -2
  3. fabricks/api/deploy.py +3 -0
  4. fabricks/api/job_schema.py +2 -2
  5. fabricks/api/masks.py +3 -0
  6. fabricks/api/notebooks/initialize.py +2 -2
  7. fabricks/api/notebooks/process.py +2 -2
  8. fabricks/api/notebooks/run.py +2 -2
  9. fabricks/api/notebooks/schedule.py +75 -0
  10. fabricks/api/notebooks/terminate.py +2 -2
  11. fabricks/api/schedules.py +2 -16
  12. fabricks/cdc/__init__.py +2 -2
  13. fabricks/cdc/base/__init__.py +2 -2
  14. fabricks/cdc/base/_types.py +9 -2
  15. fabricks/cdc/base/configurator.py +86 -41
  16. fabricks/cdc/base/generator.py +44 -35
  17. fabricks/cdc/base/merger.py +16 -14
  18. fabricks/cdc/base/processor.py +232 -144
  19. fabricks/cdc/nocdc.py +8 -7
  20. fabricks/cdc/templates/{query → ctes}/base.sql.jinja +7 -6
  21. fabricks/cdc/templates/ctes/current.sql.jinja +28 -0
  22. fabricks/cdc/templates/ctes/deduplicate_hash.sql.jinja +32 -0
  23. fabricks/cdc/templates/ctes/deduplicate_key.sql.jinja +31 -0
  24. fabricks/cdc/templates/{query → ctes}/rectify.sql.jinja +4 -22
  25. fabricks/cdc/templates/ctes/slice.sql.jinja +1 -0
  26. fabricks/cdc/templates/filter.sql.jinja +4 -4
  27. fabricks/cdc/templates/macros/bactick.sql.jinja +1 -0
  28. fabricks/cdc/templates/macros/hash.sql.jinja +18 -0
  29. fabricks/cdc/templates/merge.sql.jinja +3 -2
  30. fabricks/cdc/templates/merges/nocdc.sql.jinja +41 -0
  31. fabricks/cdc/templates/queries/context.sql.jinja +186 -0
  32. fabricks/cdc/templates/{query/nocdc.sql.jinja → queries/nocdc/complete.sql.jinja} +1 -1
  33. fabricks/cdc/templates/queries/nocdc/update.sql.jinja +35 -0
  34. fabricks/cdc/templates/{query → queries}/scd1.sql.jinja +2 -28
  35. fabricks/cdc/templates/{query → queries}/scd2.sql.jinja +29 -48
  36. fabricks/cdc/templates/query.sql.jinja +15 -11
  37. fabricks/context/__init__.py +18 -4
  38. fabricks/context/_types.py +2 -0
  39. fabricks/context/config/__init__.py +92 -0
  40. fabricks/context/config/utils.py +53 -0
  41. fabricks/context/log.py +8 -2
  42. fabricks/context/runtime.py +87 -263
  43. fabricks/context/secret.py +1 -1
  44. fabricks/context/spark_session.py +1 -1
  45. fabricks/context/utils.py +80 -0
  46. fabricks/core/dags/generator.py +6 -7
  47. fabricks/core/dags/log.py +2 -15
  48. fabricks/core/dags/processor.py +11 -11
  49. fabricks/core/dags/utils.py +15 -1
  50. fabricks/core/{scripts/job_schema.py → job_schema.py} +4 -0
  51. fabricks/core/jobs/base/_types.py +64 -22
  52. fabricks/core/jobs/base/checker.py +13 -12
  53. fabricks/core/jobs/base/configurator.py +41 -67
  54. fabricks/core/jobs/base/generator.py +55 -24
  55. fabricks/core/jobs/base/invoker.py +54 -30
  56. fabricks/core/jobs/base/processor.py +43 -26
  57. fabricks/core/jobs/bronze.py +45 -38
  58. fabricks/core/jobs/get_jobs.py +2 -2
  59. fabricks/core/jobs/get_schedule.py +10 -0
  60. fabricks/core/jobs/get_schedules.py +32 -0
  61. fabricks/core/jobs/gold.py +61 -48
  62. fabricks/core/jobs/silver.py +39 -40
  63. fabricks/core/masks.py +52 -0
  64. fabricks/core/parsers/base.py +2 -2
  65. fabricks/core/schedules/__init__.py +14 -0
  66. fabricks/core/schedules/diagrams.py +46 -0
  67. fabricks/core/schedules/get_schedule.py +5 -0
  68. fabricks/core/schedules/get_schedules.py +9 -0
  69. fabricks/core/schedules/run.py +3 -0
  70. fabricks/core/schedules/views.py +61 -0
  71. fabricks/core/steps/base.py +110 -72
  72. fabricks/core/udfs.py +12 -23
  73. fabricks/core/views.py +20 -13
  74. fabricks/deploy/__init__.py +97 -0
  75. fabricks/deploy/masks.py +8 -0
  76. fabricks/deploy/notebooks.py +71 -0
  77. fabricks/deploy/schedules.py +8 -0
  78. fabricks/{core/deploy → deploy}/tables.py +16 -13
  79. fabricks/{core/deploy → deploy}/udfs.py +3 -1
  80. fabricks/deploy/utils.py +36 -0
  81. fabricks/{core/deploy → deploy}/views.py +5 -9
  82. fabricks/metastore/database.py +3 -3
  83. fabricks/metastore/dbobject.py +4 -4
  84. fabricks/metastore/table.py +157 -88
  85. fabricks/metastore/view.py +13 -6
  86. fabricks/utils/_types.py +6 -0
  87. fabricks/utils/azure_table.py +4 -3
  88. fabricks/utils/helpers.py +141 -11
  89. fabricks/utils/log.py +29 -18
  90. fabricks/utils/read/_types.py +1 -1
  91. fabricks/utils/schema/get_schema_for_type.py +6 -0
  92. fabricks/utils/write/delta.py +3 -3
  93. {fabricks-3.0.5.2.dist-info → fabricks-3.0.7.dist-info}/METADATA +2 -1
  94. fabricks-3.0.7.dist-info/RECORD +175 -0
  95. fabricks/api/notebooks/add_fabricks.py +0 -13
  96. fabricks/api/notebooks/optimize.py +0 -29
  97. fabricks/api/notebooks/vacuum.py +0 -29
  98. fabricks/cdc/templates/query/context.sql.jinja +0 -101
  99. fabricks/cdc/templates/query/current.sql.jinja +0 -32
  100. fabricks/cdc/templates/query/deduplicate_hash.sql.jinja +0 -21
  101. fabricks/cdc/templates/query/deduplicate_key.sql.jinja +0 -14
  102. fabricks/cdc/templates/query/hash.sql.jinja +0 -1
  103. fabricks/cdc/templates/query/slice.sql.jinja +0 -14
  104. fabricks/config/__init__.py +0 -0
  105. fabricks/config/base.py +0 -8
  106. fabricks/config/fabricks/__init__.py +0 -26
  107. fabricks/config/fabricks/base.py +0 -90
  108. fabricks/config/fabricks/environment.py +0 -9
  109. fabricks/config/fabricks/pyproject.py +0 -47
  110. fabricks/config/jobs/__init__.py +0 -6
  111. fabricks/config/jobs/base.py +0 -101
  112. fabricks/config/jobs/bronze.py +0 -38
  113. fabricks/config/jobs/gold.py +0 -27
  114. fabricks/config/jobs/silver.py +0 -22
  115. fabricks/config/runtime.py +0 -67
  116. fabricks/config/steps/__init__.py +0 -6
  117. fabricks/config/steps/base.py +0 -50
  118. fabricks/config/steps/bronze.py +0 -7
  119. fabricks/config/steps/gold.py +0 -14
  120. fabricks/config/steps/silver.py +0 -15
  121. fabricks/core/deploy/__init__.py +0 -17
  122. fabricks/core/schedules.py +0 -142
  123. fabricks/core/scripts/__init__.py +0 -9
  124. fabricks/core/scripts/armageddon.py +0 -87
  125. fabricks/core/scripts/stats.py +0 -51
  126. fabricks/core/scripts/steps.py +0 -26
  127. fabricks-3.0.5.2.dist-info/RECORD +0 -177
  128. /fabricks/cdc/templates/{filter → filters}/final.sql.jinja +0 -0
  129. /fabricks/cdc/templates/{filter → filters}/latest.sql.jinja +0 -0
  130. /fabricks/cdc/templates/{filter → filters}/update.sql.jinja +0 -0
  131. /fabricks/cdc/templates/{merge → merges}/scd1.sql.jinja +0 -0
  132. /fabricks/cdc/templates/{merge → merges}/scd2.sql.jinja +0 -0
  133. /fabricks/cdc/templates/{query → queries}/__init__.py +0 -0
  134. /fabricks/cdc/templates/{query → queries}/final.sql.jinja +0 -0
  135. /fabricks/core/{utils.py → parsers/utils.py} +0 -0
  136. /fabricks/core/{scripts → schedules}/generate.py +0 -0
  137. /fabricks/core/{scripts → schedules}/process.py +0 -0
  138. /fabricks/core/{scripts → schedules}/terminate.py +0 -0
  139. {fabricks-3.0.5.2.dist-info → fabricks-3.0.7.dist-info}/WHEEL +0 -0
@@ -1,13 +1,13 @@
1
- from typing import Optional, Union
1
+ from typing import Any, Optional, Union
2
2
  from uuid import uuid4
3
3
 
4
4
  import pandas as pd
5
5
  from pyspark.sql import DataFrame, SparkSession
6
- from pyspark.sql.connect.dataframe import DataFrame as CDataFrame
7
6
 
8
7
  from fabricks.context import SPARK
9
8
  from fabricks.context.log import DEFAULT_LOGGER
10
9
  from fabricks.metastore.dbobject import DbObject
10
+ from fabricks.utils._types import DataFrameLike
11
11
 
12
12
 
13
13
  class View(DbObject):
@@ -18,7 +18,7 @@ class View(DbObject):
18
18
  spark: Optional[SparkSession] = None,
19
19
  ) -> str:
20
20
  if spark is None:
21
- if isinstance(df, (DataFrame, CDataFrame)):
21
+ if isinstance(df, DataFrameLike):
22
22
  spark = df.sparkSession
23
23
  else:
24
24
  spark = SPARK
@@ -35,12 +35,19 @@ class View(DbObject):
35
35
  return uuid
36
36
 
37
37
 
38
- def create_or_replace_global_temp_view(name: str, df: DataFrame, uuid: Optional[bool] = False) -> str:
38
+ def create_or_replace_global_temp_view(
39
+ name: str,
40
+ df: DataFrame,
41
+ uuid: Optional[bool] = False,
42
+ job: Optional[Any] = None,
43
+ ) -> str:
39
44
  if uuid:
40
45
  name = f"{name}__{str(uuid4().hex)}"
41
46
 
42
- job = name.split("__")[0]
43
- DEFAULT_LOGGER.debug(f"create global temp view {name}", extra={"job": job})
47
+ if job is None:
48
+ job = name.split("__")[0]
49
+
50
+ DEFAULT_LOGGER.debug(f"create global temp view {name}", extra={"label": job})
44
51
  df.createOrReplaceGlobalTempView(name)
45
52
 
46
53
  return f"global_temp.{name}"
@@ -0,0 +1,6 @@
1
+ from typing import Union
2
+
3
+ from pyspark.sql import DataFrame
4
+ from pyspark.sql.connect.dataframe import DataFrame as ConnectDataFrame
5
+
6
+ DataFrameLike = Union[DataFrame, ConnectDataFrame]
@@ -3,9 +3,10 @@ from typing import TYPE_CHECKING, List, Optional, Union
3
3
 
4
4
  from azure.data.tables import TableClient, TableServiceClient
5
5
  from pyspark.sql import DataFrame
6
- from pyspark.sql.connect.dataframe import DataFrame as CDataFrame
7
6
  from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
8
7
 
8
+ from fabricks.utils._types import DataFrameLike
9
+
9
10
  if TYPE_CHECKING:
10
11
  from azure.core.credentials import TokenCredential
11
12
 
@@ -121,7 +122,7 @@ class AzureTable:
121
122
  raise e
122
123
 
123
124
  def delete(self, data: Union[List, DataFrame, dict]):
124
- if isinstance(data, (DataFrame, CDataFrame)):
125
+ if isinstance(data, DataFrameLike):
125
126
  data = [row.asDict() for row in data.collect()]
126
127
  elif not isinstance(data, List):
127
128
  data = [data]
@@ -130,7 +131,7 @@ class AzureTable:
130
131
  self.submit(operations)
131
132
 
132
133
  def upsert(self, data: Union[List, DataFrame, dict]):
133
- if isinstance(data, (DataFrame, CDataFrame)):
134
+ if isinstance(data, DataFrameLike):
134
135
  data = [row.asDict() for row in data.collect()]
135
136
  elif not isinstance(data, List):
136
137
  data = [data]
fabricks/utils/helpers.py CHANGED
@@ -1,11 +1,12 @@
1
- from concurrent.futures import ThreadPoolExecutor
1
+ import logging
2
2
  from functools import reduce
3
- from typing import Any, Callable, Iterable, List, Optional, Union
3
+ from queue import Queue
4
+ from typing import Any, Callable, Iterable, List, Literal, Optional, Union
4
5
 
5
6
  from pyspark.sql import DataFrame
6
- from pyspark.sql.connect.dataframe import DataFrame as CDataFrame
7
7
  from typing_extensions import deprecated
8
8
 
9
+ from fabricks.utils._types import DataFrameLike
9
10
  from fabricks.utils.path import Path
10
11
  from fabricks.utils.spark import spark
11
12
 
@@ -34,34 +35,163 @@ def run_threads(func: Callable, iter: Union[List, DataFrame, range, set], worker
34
35
  return run_in_parallel(func, iter, workers)
35
36
 
36
37
 
38
+ def _process_queue_item(func: Callable, task_queue: Queue, result_queue: Queue, stop_signal: Any):
39
+ """Worker function that processes items from a queue."""
40
+ while True:
41
+ try:
42
+ item = task_queue.get(timeout=1)
43
+
44
+ if item is stop_signal:
45
+ task_queue.put(stop_signal) # Put it back for other workers
46
+ break
47
+
48
+ result = func(item)
49
+ result_queue.put(result)
50
+ except Exception:
51
+ continue
52
+
53
+
54
+ def _run_in_parallel_legacy(
55
+ func: Callable,
56
+ iterable: Union[List, DataFrame, range, set],
57
+ workers: int = 8,
58
+ progress_bar: Optional[bool] = False,
59
+ position: Optional[int] = None,
60
+ ) -> List[Any]:
61
+ from concurrent.futures import ThreadPoolExecutor
62
+
63
+ iterable = iterable.collect() if isinstance(iterable, DataFrameLike) else iterable # type: ignore
64
+
65
+ with ThreadPoolExecutor(max_workers=workers) as executor:
66
+ if progress_bar:
67
+ from tqdm import tqdm
68
+
69
+ results = list(tqdm(executor.map(func, iterable), total=len(iterable), position=position))
70
+ else:
71
+ results = list(executor.map(func, iterable))
72
+
73
+ return results
74
+
75
+
37
76
  def run_in_parallel(
38
77
  func: Callable,
39
78
  iterable: Union[List, DataFrame, range, set],
40
79
  workers: int = 8,
41
80
  progress_bar: Optional[bool] = False,
42
81
  position: Optional[int] = None,
82
+ loglevel: int = logging.CRITICAL,
83
+ logger: Optional[logging.Logger] = None,
84
+ run_as: Optional[Literal["ThreadPool", "ProcessPool", "Pool", "Queue", "Legacy"]] = "Legacy",
43
85
  ) -> List[Any]:
44
86
  """
45
- Runs the given function in parallel on the elements of the iterable using multiple threads.
87
+ Runs the given function in parallel on the elements of the iterable using multiple threads or processes.
46
88
 
47
89
  Args:
48
90
  func (Callable): The function to be executed in parallel.
49
91
  iterable (Union[List, DataFrame, range, set]): The iterable containing the elements on which the function will be executed.
50
- workers (int, optional): The number of worker threads to use. Defaults to 8.
92
+ workers (int, optional): The number of worker threads/processes to use. Defaults to 8.
93
+ progress_bar (Optional[bool], optional): Whether to display a progress bar. Defaults to False.
94
+ position (Optional[int], optional): Position for the progress bar. Defaults to None.
95
+ loglevel (int, optional): Log level to set during execution. Defaults to logging.CRITICAL.
96
+ logger (Optional[logging.Logger], optional): Logger instance to use. Defaults to None.
97
+ run_as (Optional[Literal["ThreadPool", "ProcessPool", "Pool", "Queue"]], optional): Type of run as to use.
51
98
 
52
99
  Returns:
53
100
  List[Any]: A list containing the results of the function calls.
54
101
 
55
102
  """
56
- iterable = iterable.collect() if isinstance(iterable, (DataFrame, CDataFrame)) else iterable # type: ignore
103
+ if logger is None:
104
+ logger = logging.getLogger()
57
105
 
58
- with ThreadPoolExecutor(max_workers=workers) as executor:
59
- if progress_bar:
60
- from tqdm import tqdm
106
+ current_loglevel = logger.getEffectiveLevel()
107
+ logger.setLevel(loglevel)
108
+
109
+ if run_as == "Legacy":
110
+ results = _run_in_parallel_legacy(
111
+ func=func,
112
+ iterable=iterable,
113
+ workers=workers,
114
+ progress_bar=progress_bar,
115
+ position=position,
116
+ )
117
+
118
+ else:
119
+ iterables = iterable.collect() if isinstance(iterable, DataFrameLike) else iterable # type: ignore
120
+ results = []
121
+
122
+ if run_as == "Queue":
123
+ import threading
124
+
125
+ task_queue = Queue()
126
+ result_queue = Queue()
127
+ stop_signal = object()
128
+
129
+ for item in iterables:
130
+ task_queue.put(item)
131
+
132
+ task_queue.put(stop_signal)
133
+
134
+ threads = []
135
+ for _ in range(workers):
136
+ t = threading.Thread(target=_process_queue_item, args=(func, task_queue, result_queue, stop_signal))
137
+ t.start()
138
+
139
+ threads.append(t)
140
+
141
+ if progress_bar:
142
+ from tqdm import tqdm
143
+
144
+ with tqdm(total=len(iterables), position=position) as t:
145
+ for _ in range(len(iterables)):
146
+ result = result_queue.get()
147
+ results.append(result)
148
+
149
+ t.update()
150
+ t.refresh()
151
+
152
+ else:
153
+ for _ in range(len(iterables)):
154
+ results.append(result_queue.get())
155
+
156
+ for t in threads:
157
+ t.join()
158
+
159
+ elif run_as == "Pool":
160
+ from multiprocessing import Pool
161
+
162
+ with Pool(processes=workers) as p:
163
+ if progress_bar:
164
+ from tqdm import tqdm
165
+
166
+ with tqdm(total=len(iterables), position=position) as t:
167
+ for result in p.map(func, iterables):
168
+ results.append(result)
169
+
170
+ t.update()
171
+ t.refresh()
172
+
173
+ else:
174
+ results = list(p.map(func, iterables))
61
175
 
62
- results = list(tqdm(executor.map(func, iterable), total=len(iterable), position=position))
63
176
  else:
64
- results = list(executor.map(func, iterable))
177
+ from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
178
+
179
+ Executor = ProcessPoolExecutor if run_as == "ProcessPool" else ThreadPoolExecutor
180
+ with Executor(max_workers=workers) as exe:
181
+ if progress_bar:
182
+ from tqdm import tqdm
183
+
184
+ with tqdm(total=len(iterables), position=position) as t:
185
+ for result in exe.map(func, iterables):
186
+ results.append(result)
187
+
188
+ t.update()
189
+ t.refresh()
190
+
191
+ else:
192
+ results = list(exe.map(func, iterables))
193
+
194
+ logger.setLevel(current_loglevel)
65
195
 
66
196
  return results
67
197
 
fabricks/utils/log.py CHANGED
@@ -3,7 +3,9 @@ import json
3
3
  import logging
4
4
  import sys
5
5
  from datetime import datetime
6
+ from datetime import timezone as tz
6
7
  from typing import Optional, Tuple
8
+ from zoneinfo import ZoneInfo
7
9
 
8
10
  from pyspark.sql import DataFrame
9
11
 
@@ -11,12 +13,11 @@ from fabricks.utils.azure_table import AzureTable
11
13
 
12
14
 
13
15
  class LogFormatter(logging.Formatter):
14
- def __init__(self, debugmode: Optional[bool] = False):
16
+ def __init__(self, debugmode: Optional[bool] = False, timezone: Optional[str] = None):
15
17
  super().__init__(fmt="%(levelname)s%(prefix)s%(message)s [%(timestamp)s]%(extra)s")
16
18
 
17
- if debugmode is None:
18
- debugmode = False
19
- self.debugmode = debugmode
19
+ self.debugmode = False if debugmode is None else debugmode
20
+ self.timezone = ZoneInfo(timezone) if timezone else tz.utc
20
21
 
21
22
  COLORS = {
22
23
  logging.DEBUG: "\033[36m",
@@ -37,8 +38,8 @@ class LogFormatter(logging.Formatter):
37
38
  "CRITICAL": "",
38
39
  }
39
40
 
40
- def formatTime(self, record):
41
- ct = datetime.fromtimestamp(record.created)
41
+ def formatTime(self, record) -> str:
42
+ ct = datetime.fromtimestamp(record.created, tz=tz.utc).astimezone(self.timezone)
42
43
  s = ct.strftime("%d/%m/%y %H:%M:%S")
43
44
  return f"{self.COLORS[logging.DEBUG]}{s}{self.RESET}"
44
45
 
@@ -48,8 +49,11 @@ class LogFormatter(logging.Formatter):
48
49
  levelname_formatted = f"{self.COLORS[record.levelno]}{levelname}:{padding}{self.RESET}"
49
50
 
50
51
  prefix = ""
51
- if hasattr(record, "job"):
52
- prefix = f"{record.__dict__.get('job')} - "
52
+
53
+ if hasattr(record, "label"):
54
+ prefix = f"{record.__dict__.get('label')} - "
55
+ elif hasattr(record, "job"):
56
+ prefix = f"{record.__dict__.get('job')} - " # keep for backward compatibility
53
57
  elif hasattr(record, "step"):
54
58
  prefix = f"{self.BRIGHT}{record.__dict__.get('step')}{self.RESET} - "
55
59
 
@@ -65,6 +69,9 @@ class LogFormatter(logging.Formatter):
65
69
  if hasattr(record, "content"):
66
70
  extra += f"\n---\n{record.__dict__.get('content')}\n---"
67
71
 
72
+ if hasattr(record, "context"):
73
+ extra += f"\n---\n{json.dumps(record.__dict__.get('context'), indent=2, default=str)}\n---"
74
+
68
75
  if hasattr(record, "df"):
69
76
  df = record.__dict__.get("df")
70
77
  if isinstance(df, DataFrame):
@@ -79,15 +86,19 @@ class LogFormatter(logging.Formatter):
79
86
 
80
87
 
81
88
  class AzureTableLogHandler(logging.Handler):
82
- def __init__(self, table: AzureTable, debugmode: Optional[bool] = False):
89
+ def __init__(self, table: AzureTable, debugmode: Optional[bool] = False, timezone: Optional[str] = None):
83
90
  super().__init__()
84
91
 
85
92
  self.buffer = []
86
93
  self.table = table
87
94
 
88
- if debugmode is None:
89
- debugmode = False
90
- self.debugmode = debugmode
95
+ self.debugmode = False if debugmode is None else debugmode
96
+ self.timezone = ZoneInfo(timezone) if timezone else tz.utc
97
+
98
+ def formatTime(self, record) -> str:
99
+ ct = datetime.fromtimestamp(record.created, tz=tz.utc).astimezone(self.timezone)
100
+ s = ct.strftime("%d/%m/%y %H:%M:%S")
101
+ return s
91
102
 
92
103
  def emit(self, record):
93
104
  if hasattr(record, "target"):
@@ -108,9 +119,7 @@ class AzureTableLogHandler(logging.Handler):
108
119
  level = "INFO"
109
120
 
110
121
  r = {
111
- "Created": str(
112
- datetime.fromtimestamp(record.created).strftime("%d/%m/%y %H:%M:%S")
113
- ), # timestamp not present when querying Azure Table
122
+ "Created": self.formatTime(record), # timestamp not present when querying Azure Table
114
123
  "Level": level,
115
124
  "Message": record.message,
116
125
  }
@@ -178,7 +187,8 @@ class AzureTableLogHandler(logging.Handler):
178
187
  class CustomConsoleHandler(logging.StreamHandler):
179
188
  def __init__(self, stream=None, debugmode: Optional[bool] = False):
180
189
  super().__init__(stream or sys.stderr)
181
- self.debugmode = debugmode if debugmode is not None else False
190
+
191
+ self.debugmode = False if debugmode is None else debugmode
182
192
 
183
193
  def emit(self, record):
184
194
  if hasattr(record, "sql"):
@@ -193,6 +203,7 @@ def get_logger(
193
203
  level: int,
194
204
  table: Optional[AzureTable] = None,
195
205
  debugmode: Optional[bool] = False,
206
+ timezone: Optional[str] = None,
196
207
  ) -> Tuple[logging.Logger, Optional[AzureTableLogHandler]]:
197
208
  logger = logging.getLogger(name)
198
209
  if logger.hasHandlers():
@@ -208,12 +219,12 @@ def get_logger(
208
219
  # Console handler
209
220
  console_handler = CustomConsoleHandler(debugmode=debugmode)
210
221
  console_handler.setLevel(level)
211
- console_format = LogFormatter(debugmode=debugmode)
222
+ console_format = LogFormatter(debugmode=debugmode, timezone=timezone)
212
223
  console_handler.setFormatter(console_format)
213
224
 
214
225
  if table is not None:
215
226
  # Azure Table handler
216
- azure_table_handler = AzureTableLogHandler(table=table, debugmode=debugmode)
227
+ azure_table_handler = AzureTableLogHandler(table=table, debugmode=debugmode, timezone=timezone)
217
228
  azure_table_handler.setLevel(level)
218
229
  else:
219
230
  azure_table_handler = None
@@ -1,3 +1,3 @@
1
1
  from typing import Literal
2
2
 
3
- IOModes = Literal["overwrite", "append"]
3
+ AllowedIOModes = Literal["overwrite", "append"]
@@ -90,4 +90,10 @@ def get_schema_for_type(proptype: Type) -> DataType: # type: ignore
90
90
  fields = [StructField(f.name, get_schema_for_type(f.type)) for f in dataclasses.fields(proptype)]
91
91
  return StructType(fields=fields)
92
92
 
93
+ if hasattr(proptype, "__origin__") and proptype.__origin__ == dict: # noqa E721
94
+ if len(proptype.__args__) == 2:
95
+ value_type = proptype.__args__[1]
96
+ value_schema = get_schema_for_type(value_type)
97
+ return MapType(StringType(), value_schema)
98
+
93
99
  raise NotImplementedError()
@@ -3,17 +3,17 @@ from typing import List, Optional, Union, get_args
3
3
  from pyspark.sql import DataFrame
4
4
 
5
5
  from fabricks.utils.path import Path
6
- from fabricks.utils.read._types import IOModes
6
+ from fabricks.utils.read._types import AllowedIOModes
7
7
 
8
8
 
9
9
  def write_delta(
10
10
  df: DataFrame,
11
11
  path: Path,
12
- mode: IOModes,
12
+ mode: AllowedIOModes,
13
13
  options: Optional[dict[str, str]] = None,
14
14
  partition_by: Union[Optional[List[str]], str] = None,
15
15
  ):
16
- assert mode in list(get_args(IOModes))
16
+ assert mode in list(get_args(AllowedIOModes))
17
17
 
18
18
  if isinstance(partition_by, str):
19
19
  partition_by = [partition_by]
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fabricks
3
- Version: 3.0.5.2
3
+ Version: 3.0.7
4
4
  Author-email: BMS DWH Team <bi_support@bmsuisse.ch>
5
5
  Requires-Python: <4,>=3.9
6
6
  Requires-Dist: azure-data-tables<13,>=12.5.0
7
7
  Requires-Dist: azure-identity>=1.10.0
8
8
  Requires-Dist: azure-storage-blob>=12.14.1
9
9
  Requires-Dist: azure-storage-queue<13,>=12.10.0
10
+ Requires-Dist: databricks-cli>=0.18.0
10
11
  Requires-Dist: databricks-sdk>=0.20.0
11
12
  Requires-Dist: importlib-metadata>=8.6.1
12
13
  Requires-Dist: jinja2>=2.11.3
@@ -0,0 +1,175 @@
1
+ fabricks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ fabricks/api/__init__.py,sha256=4dnYubkrmdep64e7_X8Wsk4Kc2N487oOct8H4qPlbjs,251
3
+ fabricks/api/context.py,sha256=tfGkdJNTJTc0meReV7y-9eL0_imF3nM2OIJOJRPBWso,513
4
+ fabricks/api/core.py,sha256=5ctTW5N3tPnuh69OrDb7eS32Ol19dIICAWQ5VMDYY7A,157
5
+ fabricks/api/deploy.py,sha256=4sZiYogkS3kZ2RZNxWtrB-vnI0SVoocxwwZrH4x1CZc,57
6
+ fabricks/api/exceptions.py,sha256=F_V1hy0rnICdR_nFacRldoI3KbgA3oG02KffHWm3yjA,409
7
+ fabricks/api/extenders.py,sha256=Qk4ZDgEkXe-dtPINmePkbtWlhkC209X6YKkqG-8lHAo,69
8
+ fabricks/api/job_schema.py,sha256=Gtovg2RXFMpbfjD-wCc2H0GvRT1nTuMJHcuy7G42qU4,120
9
+ fabricks/api/log.py,sha256=WKthYM8n_E1ng9x_XCVm7cJkfRvtd6Y470tgwz3vd2Y,130
10
+ fabricks/api/masks.py,sha256=M1w5WsoNhM1Z0vIu3FSfM_VcNNPR269UuqKXvA5MbwM,117
11
+ fabricks/api/parsers.py,sha256=nPUDzQ_Hz0fVmnBfGCqqHo7X7R6M-oGsXWDYSikjB54,121
12
+ fabricks/api/schedules.py,sha256=gdYrujsUiCrVMMJaKMDkt7McncO-uZPl_QNgp10gZZk,215
13
+ fabricks/api/udfs.py,sha256=3JTX4OWkoW7_AP9pUKHVS0C6zIBVdOJoAn8MpmB6R48,124
14
+ fabricks/api/utils.py,sha256=a-YrCXkDFzMmcNN8QOSDs_-YQtSePaDP4C4WYMX2AEg,196
15
+ fabricks/api/version.py,sha256=FukX94EbtmJMeajxyOwka8fMfFeaVc0cuM3I5CVIuK8,85
16
+ fabricks/api/views.py,sha256=dPqsGgDs9QOYc-5_QG_i4F_VoaFO5hGZQnIPZ31h5Ps,156
17
+ fabricks/api/cdc/__init__.py,sha256=Cl3LhLbQrA42IvNLqoV7CCbjQEYQMJfO6cAZv1l1aas,196
18
+ fabricks/api/cdc/nocdc.py,sha256=3E1Cn6cPHfEszGMaHEknrLqEvVKS-5-hk8s_GRu6TYY,58
19
+ fabricks/api/cdc/scd1.py,sha256=3mlWD50g-8SLMV51BLhtEz4ETcnOWgkOdaNIZCl75WY,55
20
+ fabricks/api/cdc/scd2.py,sha256=2XNDDvY0xaaqcq8o7V3Z2L0oTEUCPNaB-MxN4YDWcvQ,55
21
+ fabricks/api/metastore/__init__.py,sha256=ikM4mnzZRpcJ_CZcknYAq1qkWvKG2la-O453va8w1uo,255
22
+ fabricks/api/metastore/database.py,sha256=e1MMgY312aChjhVpHDuqZatoyp4ucp0_-6HIK-ah-hI,64
23
+ fabricks/api/metastore/table.py,sha256=2jsbBk90jzxZbHXm3MizwNGRruVrjIRMwD0_nK7WhiM,58
24
+ fabricks/api/metastore/view.py,sha256=MBUSN0YW8CcLoVJdkV5hFmKfS8CmqEMJkbG0pdToAF8,132
25
+ fabricks/api/notebooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ fabricks/api/notebooks/cluster.py,sha256=86AX2pS98XJ9RPxPS5hHJBoQw-rlQ3i4hpRoWeg8DPc,151
27
+ fabricks/api/notebooks/initialize.py,sha256=5M8VWECIdJgpB2Xf_RYOp90Cuil4KTCYLMUEd3gZQV8,803
28
+ fabricks/api/notebooks/process.py,sha256=9qwIVyBgufBdVFW1yZCxxeRpwBJQsIxVBXFd2HRubIo,1157
29
+ fabricks/api/notebooks/run.py,sha256=L7kZtti8D20jTQmAe6_a-8f92qHYFh3Td3x0qFKoFzo,1271
30
+ fabricks/api/notebooks/schedule.py,sha256=N15KaRQWW1KEjvi_NEw4Aa_t0Vr_C5tclTDgliVopXo,1571
31
+ fabricks/api/notebooks/terminate.py,sha256=ef6I0Pj3rlE_BlMiEXR6AkLxOuYS9tyWoj9ZJ-B0Y60,741
32
+ fabricks/cdc/__init__.py,sha256=bkrlLlhXThIvtABuOkNYJaQudICGw2Q8TbyZUVYORaw,316
33
+ fabricks/cdc/cdc.py,sha256=2CjPUtogWjnvyLjwiyVllcyDV1gpJ0QoRP0yUsiHXuc,69
34
+ fabricks/cdc/nocdc.py,sha256=ObWojMINqwzNSipIKMj57wvQy3on_EIh65X-7MAspIs,490
35
+ fabricks/cdc/scd.py,sha256=HzC9ifEu45B4P2aOSgi97AGB-C56l6sKTLqdVinnHKo,670
36
+ fabricks/cdc/scd1.py,sha256=WsOVRsp55WEw4-7nEtb3dfv310icExrj-zEJSEehyz8,334
37
+ fabricks/cdc/scd2.py,sha256=4vZkhc8pJAUlgiBmIw9j_2RsWuAFMcgCkU3WMVt0A-A,334
38
+ fabricks/cdc/base/__init__.py,sha256=kU4LmQ7x1rekCt8T3r83MmAQac6n2ov-Gh8mBbxIC48,157
39
+ fabricks/cdc/base/_types.py,sha256=WloCDC3ATrn0aZJ6E8BRYKZx19N3EE56r6qlBYhcuvQ,257
40
+ fabricks/cdc/base/cdc.py,sha256=9w5BqQxSVbFVEozJWmZQThqdppkE_SYi4fHSzJ7WMvA,78
41
+ fabricks/cdc/base/configurator.py,sha256=w6Ywif87iv1WG-5OM3XkzIRrsns-_QQ6XlADpk0YLlw,6434
42
+ fabricks/cdc/base/generator.py,sha256=pa_GJn7Pdi5vMnXN8zExmOPMpCqdZ3QoxHEB0wv0lsk,5933
43
+ fabricks/cdc/base/merger.py,sha256=1_j-hKnKKEcbogyXX0Cm2IdyB-tpDJAIMIz1MwWnXX0,4158
44
+ fabricks/cdc/base/processor.py,sha256=b8ATjVX-dW8JCrt4n3v8HlteTi1498jrItEla0BqynU,17689
45
+ fabricks/cdc/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
+ fabricks/cdc/templates/filter.sql.jinja,sha256=AQcOj4KRwKscVG3zepnEAN9Yxb03AM7068hqW7dtVI8,236
47
+ fabricks/cdc/templates/merge.sql.jinja,sha256=YS9wWckCVsUI1pUYiRSFBIuQ16WU3EPWSkhZVy2niBA,221
48
+ fabricks/cdc/templates/query.sql.jinja,sha256=Z0kSm9sRKJTLQ2Lb3NS7yu93GBxNls9HL7uAjTdirjk,868
49
+ fabricks/cdc/templates/ctes/base.sql.jinja,sha256=gO0dEQ00_NXOeZKRxfWvoUjpks0nHK-Is5H7xdI_J6s,1637
50
+ fabricks/cdc/templates/ctes/current.sql.jinja,sha256=bb72XdaJVce5k57dx_N6T4OfyQDUrFLulr3VM6irdn0,1278
51
+ fabricks/cdc/templates/ctes/deduplicate_hash.sql.jinja,sha256=DHcSNhAvoFZY0gSv3Zylat9ysGXKR22OGXs49KTNCjA,1095
52
+ fabricks/cdc/templates/ctes/deduplicate_key.sql.jinja,sha256=4-K4nk2Bfr_8c_x3YDYBP7JUx4c-b3Ef-7HFx0s2jrc,1122
53
+ fabricks/cdc/templates/ctes/rectify.sql.jinja,sha256=OfZmEmlI-0gvkD7Fa1sncWU9SJkcrR4mnlTIvX1Ysp0,4727
54
+ fabricks/cdc/templates/ctes/slice.sql.jinja,sha256=250I2rqd4wFgmmVDEaeJ3GdKofHcRZ3eBPSo-BOxIV0,80
55
+ fabricks/cdc/templates/filters/final.sql.jinja,sha256=x7pWNxxkw39FNt7KU5Se9_v-2tziItdL_rgzStSRv8U,173
56
+ fabricks/cdc/templates/filters/latest.sql.jinja,sha256=UIkxx5XRVbXFVaEuPXbToMCTycsomU27g9Y16gw31Q4,584
57
+ fabricks/cdc/templates/filters/update.sql.jinja,sha256=O_pqUljzUuyPGMtXR7Ia9dRj468h4L-oqVmwAKseL1I,1206
58
+ fabricks/cdc/templates/macros/bactick.sql.jinja,sha256=VYRlBRRvfRID83naDuAAWp5JmR9f9-46xTW2-8lr8DY,59
59
+ fabricks/cdc/templates/macros/hash.sql.jinja,sha256=0auoKcV9idL4Y6b4JP6ODaTFHK7L5nmY3MEL9YeKd5o,594
60
+ fabricks/cdc/templates/merges/nocdc.sql.jinja,sha256=lA4-PXogC_u6RqummOQX_WBFCNq_GMEXdO4aL3_Eg0o,825
61
+ fabricks/cdc/templates/merges/scd1.sql.jinja,sha256=GimfwgEobGgCzPce_FJdvQY9jmRJXFUZ4_CVhRgTTqY,1600
62
+ fabricks/cdc/templates/merges/scd2.sql.jinja,sha256=5qBO_1lr7xa-Ep8RqvJGCRISOv4uo_tiAtVOybp4tUU,1210
63
+ fabricks/cdc/templates/queries/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
+ fabricks/cdc/templates/queries/context.sql.jinja,sha256=19mAAAM9Gh2PAzuupCzc68zxEHH3spTpHZHGU9vke8o,3883
65
+ fabricks/cdc/templates/queries/final.sql.jinja,sha256=vxH434CO5k8Ia7tugaH8LC1co7Epaj7Z1M7Y9BdqzaI,111
66
+ fabricks/cdc/templates/queries/scd1.sql.jinja,sha256=siHULgKE3uRBGQYZFUR_eHNqFuGgO9xUCRVV2jnAXAI,3019
67
+ fabricks/cdc/templates/queries/scd2.sql.jinja,sha256=Nn0wUs9N-_QviZqUKRWAFdD17RR3EFBTMs9BpBu6z7E,3877
68
+ fabricks/cdc/templates/queries/nocdc/complete.sql.jinja,sha256=cVKKCSbiuuw1K7BOzfusX6KvzQNHU3YNUgoXgsu-c6k,267
69
+ fabricks/cdc/templates/queries/nocdc/update.sql.jinja,sha256=SDJFo7rWiU58DqzOG-Pl76Dto82PIHWDdKlX62M1j58,1299
70
+ fabricks/context/__init__.py,sha256=qfntJ9O6omzY_t6AhDP6Ndu9C5LMiVdWbo6ikhtoe7o,1446
71
+ fabricks/context/_types.py,sha256=FzQJ35vp0uc6pAq18bc-VHwMVEWtd0VDdm8xQmNr2Sg,2681
72
+ fabricks/context/log.py,sha256=CadrRf8iL6iXlGIGIhEIswa7wGqC-E-oLwWcGTyJ10s,2074
73
+ fabricks/context/runtime.py,sha256=7pXU_5gPyvChwiyxP4ch59eAgyoNOH9jMUlGWxwXWn8,3915
74
+ fabricks/context/secret.py,sha256=LEx7MXSFm29wvsBffNSIQ6p73wqqmFj2jmU2y64h-Fc,3191
75
+ fabricks/context/spark_session.py,sha256=BPaxKJXHZDI5oQiOPhmua_xjXnrVgluh--AVpvUgbck,2553
76
+ fabricks/context/utils.py,sha256=EQRscdUhdjwk2htZu8gCgNZ9PfRzzrR6e1kRrIbVlBM,2786
77
+ fabricks/context/config/__init__.py,sha256=pFEsGXBQkX5_FP0cwQMX427j6dQuTx81NR9snMxc8cU,3127
78
+ fabricks/context/config/utils.py,sha256=7KCTUiSbqQnDD5mbCO9_o1KbUgD-Xbei_UGgpMQi9nE,1371
79
+ fabricks/core/__init__.py,sha256=LaqDi4xuyHAoLOvS44PQdZdRfq9SmVr7mB6BDHyxYpc,209
80
+ fabricks/core/extenders.py,sha256=39bSm9QiW4vBAyT659joE-5p_EZiNM4gi8KA3-OgX3E,917
81
+ fabricks/core/job_schema.py,sha256=6-70oy0ZJd3V9AiXfc0Q8b8NVEynxQza_h7mB13uB-s,853
82
+ fabricks/core/masks.py,sha256=3UCxcCi-TgFHB7xT5ZvmEa8RMKev23X_JLE70Pr3rpY,1347
83
+ fabricks/core/udfs.py,sha256=gu7K-ohxcO0TdgA7IjzVMOZatZQYhFTklHo60a6k_Yc,2960
84
+ fabricks/core/views.py,sha256=52tekqeP0Xk5EPYO220YdfFbzItX6NnObROb-ye9COQ,1181
85
+ fabricks/core/dags/__init__.py,sha256=0DUKzVcXcROvxkN19P_kaOJ7da5BAM7Vt8EGQbp2KSY,240
86
+ fabricks/core/dags/base.py,sha256=tFj27SqeZUZ7pB_LOWkpdowZz5gj30JUANI4gWK3Pl8,3139
87
+ fabricks/core/dags/generator.py,sha256=4fp_CRsWnl_UauM9Jx-E4UCaxnm2_Q5103J58fRws2U,4832
88
+ fabricks/core/dags/log.py,sha256=v1xfpQGfddHDz9lflvXOWTXMde3CdERo9jzeSmNDRhY,402
89
+ fabricks/core/dags/processor.py,sha256=dcEWk0y2fuNt7RfxkeCny5axdg5aE4UpD7QwEijcQWM,7864
90
+ fabricks/core/dags/run.py,sha256=RIDULb9WakObSyYzmkglh8RwFRwC8-NFC-1yPDMkBC0,1074
91
+ fabricks/core/dags/terminator.py,sha256=Y6pV2UnSyrCIx2AQWJXoHk5Roq12gZqpennHx_Lbnzs,793
92
+ fabricks/core/dags/utils.py,sha256=4kyobLGl4tO0Flo6LxNzYjCU_G42vns1LrkxTO5_KLY,1585
93
+ fabricks/core/jobs/__init__.py,sha256=W_1m6LoGiXBml_8cucedju_hllSSnZGKsZjyFh-2oJw,531
94
+ fabricks/core/jobs/bronze.py,sha256=eDH2YLHbOgBoJoGZTFLJS9igqnqkeJtM56nahQK6zJ4,13815
95
+ fabricks/core/jobs/get_job.py,sha256=35zay3Z_WoJIylzEQlGle6UvrE1EClfRbFEVGvszof0,3675
96
+ fabricks/core/jobs/get_job_conf.py,sha256=3vAj_usCbNqejMUKOF85LPaHBYAwxgrDG7LYgY-vBUw,4812
97
+ fabricks/core/jobs/get_job_id.py,sha256=6dLyzxGHlRvJZVJSwZkCk3iXzWkIhePC_6FhoP0gEN4,607
98
+ fabricks/core/jobs/get_jobs.py,sha256=nJ-8DPFq1GyzWo9Mxlwq2dEeAqwg1jeQg-CHietAb1Q,3341
99
+ fabricks/core/jobs/get_schedule.py,sha256=46pJR5LWZfuxUtLBmtB-RP6ng_W-K-ahJmD29KNmcGw,259
100
+ fabricks/core/jobs/get_schedules.py,sha256=kryDUBrBrtAaMp8Ou5YqMOCOMKvg1GmbbOQBtiiRleM,794
101
+ fabricks/core/jobs/gold.py,sha256=HzeuuOtsjr3lsasBJ1ODzLQzcoanhbzZiP6a9acNsnA,14503
102
+ fabricks/core/jobs/silver.py,sha256=kdrCBfh1jkhWJUFubGUV4kxan5eRUZl-LI-iSJxyJE4,13093
103
+ fabricks/core/jobs/base/__init__.py,sha256=_AdWtyL7yZG2TOZ9e8WyNPrOjmm6EDkI_TNym5cLDws,208
104
+ fabricks/core/jobs/base/_types.py,sha256=y66BtJlJskq7wGzn7te5XYjO-NEqeQGUC11kkbew8AU,8405
105
+ fabricks/core/jobs/base/checker.py,sha256=Cdfh8rQYy4MvMFl0HyC3alGUWm8zrFXk08m2t2JMu6Y,5477
106
+ fabricks/core/jobs/base/configurator.py,sha256=9G5F7Qg5FWHPbHgdh8Qxc85OoSX0rnjD4c9itwU5KKc,10415
107
+ fabricks/core/jobs/base/exception.py,sha256=HrdxEuOfK5rY-ItZvEL3iywLgdpYUpmWFkjjjks7oYc,2318
108
+ fabricks/core/jobs/base/generator.py,sha256=NlJgR1461rx4bOBfvpmBrS6PvNeHD6QH6FOqAZiXDvM,16987
109
+ fabricks/core/jobs/base/invoker.py,sha256=FvjfpNqi542slxC2yLu1BIu5EklNUWySxDF8cD_SqKQ,7602
110
+ fabricks/core/jobs/base/job.py,sha256=dWmk2PpQH2NETaaDS6KoiefRnDHfDMdCyhmogkdcSFI,93
111
+ fabricks/core/jobs/base/processor.py,sha256=qkNiJSSLaEnivKGBcd9UZyIVFexnv-n1p_5mCZIy1rA,9076
112
+ fabricks/core/parsers/__init__.py,sha256=TGjyUeiiTkJrAxIpu2D_c2rQcbe5YRpmBW9oh0F9AqU,322
113
+ fabricks/core/parsers/_types.py,sha256=JC2Oh-wUvaX8SBzeuf5owPgRaj-Q3-7MXxyIYPQ7QwA,147
114
+ fabricks/core/parsers/base.py,sha256=P8IrLQKGakwaAQ-4gf4vElVwWoSpkixYd9kNthu1VDM,3292
115
+ fabricks/core/parsers/decorator.py,sha256=kn_Mj-JLWTFaRiciZ3KavmSUcWFPY3ve-buMruHrX_Q,307
116
+ fabricks/core/parsers/get_parser.py,sha256=TTnVPwKqKpFu6jJJnXEuiEctWGtimk8w2p1jF2U7ibg,909
117
+ fabricks/core/parsers/utils.py,sha256=qdn2ElpqBgDsW55-tACWZaFOT0ebrBYg2fenqSgd6YI,2456
118
+ fabricks/core/schedules/__init__.py,sha256=bDjNMcm7itimAo4gun0W4W9bZKwZmWUjkMqAQIcqI2Y,431
119
+ fabricks/core/schedules/diagrams.py,sha256=DoQR80DLs0CQpUzxscBeO1mWNjgx8btBWaqcj2EFOBc,1379
120
+ fabricks/core/schedules/generate.py,sha256=aKnAe7ZCafAczLa4ka9Er_oltOxgXyNoS63_OZEktcE,623
121
+ fabricks/core/schedules/get_schedule.py,sha256=PJcEq4enlsRJunS-MjXi-VFIczbeuBStP2giZ_-EaRc,116
122
+ fabricks/core/schedules/get_schedules.py,sha256=b6KSl-QmiNgih2l6dESB0va9yDVxaGOJ_ZB96Wc3NC8,174
123
+ fabricks/core/schedules/process.py,sha256=6124f7AbCSFjseXQsLIX_4kl_-tesi_CtwzYZKOMNRE,305
124
+ fabricks/core/schedules/run.py,sha256=AcRiyUmepAUzlGO_DxO-JhRUtieXfxE6sR8xfRWYlWI,58
125
+ fabricks/core/schedules/terminate.py,sha256=-RvtOrxTOZl2sZQ6KfNHJL6H2LCAEMSVRyylA-wVl_w,167
126
+ fabricks/core/schedules/views.py,sha256=8hYwPLCvvN-nem2lNAKvUY5hC71v88z4-y8j0poUApM,1949
127
+ fabricks/core/steps/__init__.py,sha256=JP-kaDa890-9XqBSPp6YdssAexdxv-MqQ__WfVYdgeg,132
128
+ fabricks/core/steps/_types.py,sha256=VxIrH3nFwmPlwG-UI8sDDP0AwK_9jlsy6yQp6YfgtqE,90
129
+ fabricks/core/steps/base.py,sha256=UTzVqdWaho9zgMrloq8ndqcWQ9R5Z-O1SInIrm9byB4,14403
130
+ fabricks/core/steps/get_step.py,sha256=8q4rEDdTTZNJsXB2l5XY-Ktoow8ZHsON_tx5yKMUIzg,284
131
+ fabricks/core/steps/get_step_conf.py,sha256=UPT3gB1Sh5yzawZ9qiVQlvVAKaxPX82gaWBDzxx75EM,633
132
+ fabricks/deploy/__init__.py,sha256=CD--5eD60OIUwKAIiay_L82rHWJz71KWmtSvCKq5D80,2648
133
+ fabricks/deploy/masks.py,sha256=umDRGoeyGnJ6_-Ug7926oDEtkdp9Ja5TtgznpQBUzBY,198
134
+ fabricks/deploy/notebooks.py,sha256=pfmD1K-TCOuuxttEK45Ti1RDc-_nIOmWlb2EWr1Vp28,1545
135
+ fabricks/deploy/schedules.py,sha256=rlT1PotBR0cig-k99Gi7a3DLjWoNujmnFPQn3iU0yLU,220
136
+ fabricks/deploy/tables.py,sha256=IF822oxOCy12r08Dz54YUK5luud6dtTPxJ4TUIHE-No,2621
137
+ fabricks/deploy/udfs.py,sha256=7fw3O5LgOOxDEhuS3s1yFdqybgFh65r_1IdfZUYeejs,597
138
+ fabricks/deploy/utils.py,sha256=V41r1zVT9KcsICqTLAzpb4ixRk2q2ybJMrGhkPOtG6k,5099
139
+ fabricks/deploy/views.py,sha256=Ket511q0v8QHgkNArb4qVPFDuuQnQn3v-fvr7KO6oxM,14360
140
+ fabricks/metastore/README.md,sha256=utPUGAxmjyNMGe43GfL0Gup4MjeTKKwyiUoNVSfMquI,51
141
+ fabricks/metastore/__init__.py,sha256=RhjY2CuqtZBg8fEizzzvW8qszqCM-vSCL1tQGuzoato,174
142
+ fabricks/metastore/_types.py,sha256=NXYxwQHP0sCllM0N6QBbaK4CdtM_m_rHFDxRNRfBcLU,1919
143
+ fabricks/metastore/database.py,sha256=23VAKKzjrwlEaj28DNNmiOhcfdKRzYk8eEfq-PzINbg,1924
144
+ fabricks/metastore/dbobject.py,sha256=ve8p48OqEpJYsqWNhgesGSE0emM--uY8QrvBRoR3j3g,1881
145
+ fabricks/metastore/pyproject.toml,sha256=6RZM9RMKMDF_EAequhORZ7TD0BQNk7aBCTWAv-sRcp0,519
146
+ fabricks/metastore/table.py,sha256=mos701lEU_EwjdwwGb2ey-RSebkg8Ubms2VcOJvIARA,27937
147
+ fabricks/metastore/utils.py,sha256=8SxhjDkz_aSH4IGUusel7hqOQxP9U8PNBCY0M7GH00Y,1355
148
+ fabricks/metastore/view.py,sha256=f7hKJWtnH1KmZym8dkoucKOTndntzai_f2YqferxHLs,1431
149
+ fabricks/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
150
+ fabricks/utils/_types.py,sha256=AuOhknlozqx5QdAdvZSA6xAWhU8k4nxG1vxIkOVgHeY,184
151
+ fabricks/utils/azure_queue.py,sha256=wtKAq_MD5QLxelerDO475dzL-SySIrxt9d5KGi-8vvw,3102
152
+ fabricks/utils/azure_table.py,sha256=7tCD1iM7UWREaSQVVmtgHCAebLtWPP9ZmuU5zDALyo0,5305
153
+ fabricks/utils/console.py,sha256=X4lLgL_UxCjoFRx-ZRCwzdBveRGPKlFYZDi6vl7uevQ,1017
154
+ fabricks/utils/fdict.py,sha256=cdnvNBSXKJIDKSdhQGJA4CGv0qLn5IVYKQ111l7nM9I,7978
155
+ fabricks/utils/helpers.py,sha256=h7SuOVpBP5qcgX1nM1suvkXG9BhiK5-257EBepCvrO8,7452
156
+ fabricks/utils/log.py,sha256=LCQEM81PhdojiyLrtEzv1QM__bWbaEhGddyd0IqyGXM,7985
157
+ fabricks/utils/path.py,sha256=Bs3PayWtg62-mrsDbvu8kh0VLZZhX7tU9YiyHFiYNhs,6698
158
+ fabricks/utils/pip.py,sha256=UHo7NTjFGJNghWBuuDow28xUkZYg2YrlbAP49IxZyXY,1522
159
+ fabricks/utils/pydantic.py,sha256=W0fiDLVMFrrInfQw2s5YPeSEvkN-4k864u3UyPoHaz4,2516
160
+ fabricks/utils/spark.py,sha256=QWVpbGwOvURIVBlR7ygt6NQ9MHUsIDvlquJ65iI8UBI,2007
161
+ fabricks/utils/sqlglot.py,sha256=Q3SOc4CJfGBIe4vr9laipJaPIknz_hnKGrJgLc7DumE,1439
162
+ fabricks/utils/read/__init__.py,sha256=a_5l60m1AyzQUII170bfRRuXR_ynC3EwfysidRy44GE,272
163
+ fabricks/utils/read/_types.py,sha256=_YY0Yp8IPNYvcRFNa9WMcIl20cN93Cdcgh3jddypnuk,76
164
+ fabricks/utils/read/read.py,sha256=el6GvynJrCl2Ap_bM7xY750fSXjrr9xmp14P92h-40g,8401
165
+ fabricks/utils/read/read_excel.py,sha256=TnirdvaVk3gFtEpTD20_U2v3KRnTAc_T85JzwqOHUwI,108
166
+ fabricks/utils/read/read_yaml.py,sha256=PEMpMaVCfqfQ792rg1mt33WDpy2lOFR3DBLt4q7unPg,869
167
+ fabricks/utils/schema/__init__.py,sha256=jWGhKohpxbeKE260n3GktiCnk11MnlVcbkkzjN-6ZJw,232
168
+ fabricks/utils/schema/get_json_schema_for_type.py,sha256=ePIxRkYvsxhQiN59Dln4ONtSVd1cK_KsEoAxdCHbb5k,5846
169
+ fabricks/utils/schema/get_schema_for_type.py,sha256=5k-R6zCgUAtapQgxT4turcx1IQ-b7DN9fjrwYqmCw0Q,3716
170
+ fabricks/utils/write/__init__.py,sha256=i0UnZenXj9Aq0b0_aU3s6882vg-Vu_AyKfQhl_dTp-g,200
171
+ fabricks/utils/write/delta.py,sha256=lTQ0CfUhcvn3xTCcT_Ns6PMDBsO5UEfa2S9XpJiLJ9c,1250
172
+ fabricks/utils/write/stream.py,sha256=wQBpAnQtYA6nl79sPKhVM6u5m-66suX7B6VQ6tW4TOs,622
173
+ fabricks-3.0.7.dist-info/METADATA,sha256=mxCXO3itBIJ1MORFhE0uEBoXIlEmEN5bsvGS53_8Gfk,761
174
+ fabricks-3.0.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
175
+ fabricks-3.0.7.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- # Databricks notebook source
2
- import os
3
- import sys
4
-
5
- # COMMAND ----------
6
-
7
- root = os.path.abspath("../../..")
8
-
9
- # COMMAND ----------
10
-
11
- sys.path.append(root)
12
-
13
- # COMMAND ----------