dasl-client 1.0.13__py3-none-any.whl → 1.0.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dasl-client might be problematic. Click here for more details.

dasl_client/client.py CHANGED
@@ -1,5 +1,7 @@
1
1
  from copy import deepcopy
2
- from typing import Any, Callable, Iterator, List, Optional, TypeVar
2
+ from typing import Any, Callable, Iterator, List, Optional, Tuple, TypeVar
3
+ from pydantic import Field
4
+ from pyspark.sql import DataFrame
3
5
 
4
6
  from dasl_api import (
5
7
  CoreV1Api,
@@ -9,8 +11,6 @@ from dasl_api import (
9
11
  WorkspaceV1CreateWorkspaceRequest,
10
12
  api,
11
13
  )
12
- from pydantic import Field
13
-
14
14
  from dasl_client.auth.auth import (
15
15
  Authorization,
16
16
  DatabricksSecretAuth,
@@ -569,6 +569,41 @@ class Client:
569
569
  )
570
570
  return Rule.from_api_obj(result)
571
571
 
572
+ def exec_rule(
573
+ self, rule_in: Rule, df: DataFrame
574
+ ) -> Tuple[DataFrame, Optional[DataFrame]]:
575
+ """
576
+ Locally execute a Rule. Must be run from within a Databricks
577
+ notebook or else an exception will be raised. This is intended
578
+ to facilitate Rule development.
579
+
580
+ :param rule_in: The specification of the Rule to execute.
581
+ :param df: The DataFrame to use as the input to the Rule.
582
+ :returns Tuple[DataFrame, Optional[DataFrame]]: The first
583
+ element of the tuple contains the notables produced by
584
+ the rule, and the second element contains the observables
585
+ or None if no observables were produced.
586
+ """
587
+ Helpers.ensure_databricks()
588
+ with error_handler():
589
+ result = self._core_client().core_v1_render_rule(
590
+ self._workspace(),
591
+ rule_in.to_api_obj(),
592
+ )
593
+
594
+ try:
595
+ import notebook_utils
596
+ except ImportError as e:
597
+ raise ImportError(
598
+ "Package 'notebook_utils' not found. "
599
+ "Install it within this this notebook using "
600
+ f"%pip install {result.notebook_utils_path}"
601
+ )
602
+
603
+ namespace = {}
604
+ exec(result.content, namespace)
605
+ return namespace["generate"](df)
606
+
572
607
  def adhoc_transform(
573
608
  self,
574
609
  warehouse: str,
dasl_client/helpers.py CHANGED
@@ -6,15 +6,18 @@ class Helpers:
6
6
  default_dasl_host = "https://api.prod.sl.antimatter.io"
7
7
 
8
8
  @staticmethod
9
- def databricks_context():
10
- # This import raises an exception if outside a notebook context, so only
11
- # import if this method is called
9
+ def ensure_databricks():
12
10
  if "DATABRICKS_RUNTIME_VERSION" not in os.environ:
13
11
  raise Exception(
14
12
  "attempted to access databricks context outside "
15
13
  + "of databricks notebook"
16
14
  )
17
15
 
16
+ @staticmethod
17
+ def databricks_context():
18
+ # This import raises an exception if outside a notebook context, so only
19
+ # import if this method is called
20
+ Helpers.ensure_databricks()
18
21
  from databricks.sdk.runtime import dbutils
19
22
 
20
23
  return dbutils.notebook.entry_point.getDbutils().notebook().getContext()
@@ -173,6 +173,8 @@ class DataSourcePreset(BaseModel):
173
173
  name: Optional[str] = None
174
174
  author: Optional[str] = None
175
175
  description: Optional[str] = None
176
+ title: Optional[str] = None
177
+ icon_url: Optional[str] = None
176
178
  autoloader: Optional[PresetAutoloader] = None
177
179
  silver: Optional[SilverPreset] = None
178
180
  gold: Optional[List[GoldPreset]] = None
@@ -188,6 +190,8 @@ class DataSourcePreset(BaseModel):
188
190
  name=obj.name,
189
191
  author=obj.author,
190
192
  description=obj.description,
193
+ title=obj.title,
194
+ icon_url=obj.icon_url,
191
195
  autoloader=PresetAutoloader.from_api_obj(obj.autoloader),
192
196
  silver=SilverPreset.from_api_obj(obj.silver),
193
197
  gold=[GoldPreset.from_api_obj(item) for item in obj.gold],
dasl_client/types/dbui.py CHANGED
@@ -11,7 +11,8 @@ from dasl_api import (
11
11
  DbuiV1TransformRequestTransformsInner,
12
12
  DbuiV1TransformRequestTransformsInnerPresetOverrides,
13
13
  DbuiV1TransformResponse,
14
- DbuiV1TransformResponseStagesInner,
14
+ DbuiV1TransformResponseStagesInner, ContentV1DatasourcePresetAutoloaderCloudFiles,
15
+ DbuiV1TransformRequestAutoloaderInput,
15
16
  )
16
17
 
17
18
  from .datasource import DataSource, FieldSpec, FieldUtils
@@ -131,7 +132,7 @@ class TransformRequest(BaseModel):
131
132
  Attributes:
132
133
  input (TransformRequest.Input):
133
134
  The input block containing the columns metadata and data.
134
- autoloader_input (DataSource.Autoloader):
135
+ autoloader_input (Autoloader):
135
136
  The autoloader input configuration.
136
137
  use_preset (str):
137
138
  Indicates which preset to use for the transforms.
@@ -172,6 +173,84 @@ class TransformRequest(BaseModel):
172
173
  data=self.data,
173
174
  )
174
175
 
176
+ class Autoloader(BaseModel):
177
+ """
178
+ Autoloader configuration for the DataSource.
179
+
180
+ Attributes:
181
+ format (Optional[str]):
182
+ The format of the data (e.g., json, parquet, csv, etc.).
183
+ location (str):
184
+ External location for the volume in Unity Catalog.
185
+ schema_file (Optional[str]):
186
+ An optional file containing the schema of the data source.
187
+ cloud_files (Optional[Autoloader.CloudFiles]):
188
+ CloudFiles configuration.
189
+ """
190
+
191
+ class CloudFiles(BaseModel):
192
+ """
193
+ CloudFiles configuration for the Autoloader.
194
+
195
+ Attributes:
196
+ schema_hints_file (Optional[str]):
197
+ schema_hints (Optional[str]):
198
+ """
199
+
200
+ schema_hints_file: Optional[str] = None
201
+ schema_hints: Optional[str] = None
202
+
203
+ @staticmethod
204
+ def from_api_obj(
205
+ obj: Optional[ContentV1DatasourcePresetAutoloaderCloudFiles],
206
+ ) -> "TransformRequest.Autoloader.CloudFiles":
207
+ if obj is None:
208
+ return None
209
+ return TransformRequest.Autoloader.CloudFiles(
210
+ schema_hints_file=obj.schema_hints_file,
211
+ schema_hints=obj.schema_hints,
212
+ )
213
+
214
+ def to_api_obj(self) -> ContentV1DatasourcePresetAutoloaderCloudFiles:
215
+ return ContentV1DatasourcePresetAutoloaderCloudFiles(
216
+ schema_hints_file=self.schema_hints_file,
217
+ schema_hints=self.schema_hints,
218
+ )
219
+
220
+ format: Optional[str] = None
221
+ location: str
222
+ schema_file: Optional[str] = None
223
+ schema: Optional[str] = None
224
+ cloud_files: Optional["TransformRequest.Autoloader.CloudFiles"] = None
225
+ row_count: Optional[int] = None
226
+ row_offset: Optional[int] = None
227
+
228
+ @staticmethod
229
+ def from_api_obj(
230
+ obj: Optional[DbuiV1TransformRequestAutoloaderInput],
231
+ ) -> "Optional[TransformRequest.Autoloader]":
232
+ if obj is None:
233
+ return None
234
+ return TransformRequest.Autoloader(
235
+ format=obj.format,
236
+ location=obj.location,
237
+ schema_file=obj.schema_file,
238
+ cloud_files=TransformRequest.Autoloader.CloudFiles.from_api_obj(obj.cloud_files),
239
+ row_count=obj.row_count,
240
+ row_offset=obj.row_offset,
241
+ )
242
+ def to_api_obj(self) -> DbuiV1TransformRequestAutoloaderInput:
243
+ return DbuiV1TransformRequestAutoloaderInput(
244
+ format=self.format,
245
+ location=self.location,
246
+ schemaFile=self.schema_file,
247
+ schema=self.schema_file,
248
+ cloudFiles=Helpers.maybe(lambda o: o.to_api_obj(), self.cloud_files),
249
+ rowCount=self.row_count,
250
+ rowOffset=self.row_offset
251
+ )
252
+
253
+
175
254
  class Transform(BaseModel):
176
255
  """
177
256
  A transform configuration to apply to the data.
@@ -273,7 +352,7 @@ class TransformRequest(BaseModel):
273
352
  )
274
353
 
275
354
  input: Optional["TransformRequest.Input"] = None
276
- autoloader_input: Optional[DataSource.Autoloader] = None
355
+ autoloader_input: Optional["TransformRequest.Autoloader"] = None
277
356
  use_preset: Optional[str] = None
278
357
  transforms: List["TransformRequest.Transform"]
279
358
 
@@ -281,7 +360,7 @@ class TransformRequest(BaseModel):
281
360
  def from_api_obj(obj: DbuiV1TransformRequest) -> "TransformRequest":
282
361
  return TransformRequest(
283
362
  input=TransformRequest.Input.from_api_obj(obj.input),
284
- autoloader_input=DataSource.Autoloader.from_api_obj(obj.autoloader_input),
363
+ autoloader_input=TransformRequest.Autoloader.from_api_obj(obj.autoloader_input),
285
364
  use_preset=obj.use_preset,
286
365
  transforms=[
287
366
  TransformRequest.Transform.from_api_obj(item) for item in obj.transforms
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dasl_client
3
- Version: 1.0.13
3
+ Version: 1.0.14
4
4
  Summary: The DASL client library used for interacting with the DASL workspace
5
5
  Home-page: https://github.com/antimatter/asl
6
6
  Author: Antimatter Team
@@ -8,9 +8,10 @@ Author-email: Antimatter Team <support@antimatter.io>
8
8
  Requires-Python: >=3.8
9
9
  Description-Content-Type: text/markdown
10
10
  License-File: LICENSE
11
- Requires-Dist: dasl-api ==0.1.15
11
+ Requires-Dist: dasl-api ==0.1.17
12
12
  Requires-Dist: databricks-sdk >=0.41.0
13
13
  Requires-Dist: pydantic >=2
14
+ Requires-Dist: pyspark >=3.5.5
14
15
  Requires-Dist: typing-extensions ==4.10.0
15
16
 
16
17
  # DASL Client Library
@@ -1,6 +1,6 @@
1
1
  dasl_client/__init__.py,sha256=E6gOgO8qg96Y38JKA-4LyNBvc2ytQPEfhdniYsCWBxA,127
2
- dasl_client/client.py,sha256=jiILjQOB4OuXXj7J0ZBYEN-pnHm8VQjj1Jqf54eJvS8,25419
3
- dasl_client/helpers.py,sha256=hi_SrFhEqBuLWOteuQlv__Atzq2VMCgY7A8xSt3ztuA,1035
2
+ dasl_client/client.py,sha256=Eb_A7UKjRaTqmD0Q5Keq-JMA4MGGtKFv1Oft9vXE4Yo,26831
3
+ dasl_client/helpers.py,sha256=L7ycxrqyG28glRRGZgsrVBdCJzXYCW7DB0hAvupGMuA,1118
4
4
  dasl_client/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  dasl_client/auth/auth.py,sha256=yTeijYYpfJVJ_wYyq0U6kAntg4xz5MzIR37_CpVR57k,7277
6
6
  dasl_client/conn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -15,15 +15,15 @@ dasl_client/preset_development/preview_parameters.py,sha256=KRk3lTImyvJTeEcepwOS
15
15
  dasl_client/preset_development/stage.py,sha256=NiDuFiKvoXgqVPPjzB3TZQN1dQeVff6he6oaSVGwkCs,20154
16
16
  dasl_client/types/__init__.py,sha256=GsXC3eWuv21VTLPLPH9pzM95JByaKnKrPjJkh2rlZfQ,170
17
17
  dasl_client/types/admin_config.py,sha256=Kmx3Kuai9_LWMeO2NpWasRUgLihYSEXGtuYVfG0FkjU,2200
18
- dasl_client/types/content.py,sha256=b_4-6rcA6uP0aPN-nBd1QonwQ5Bc4lnrHlL6MKYnl8U,7311
18
+ dasl_client/types/content.py,sha256=uZAO-Vm_orvqsH2CkiwBUHgn6fWGXR90hOGKc256lcA,7442
19
19
  dasl_client/types/datasource.py,sha256=-ABmBh5yZwHeY-PKQMnNCNa9FSzod5n1O817m8ZCL6o,52519
20
- dasl_client/types/dbui.py,sha256=RZV_YxCc5KIHLcDLO5Gb1t3KnS8JKN4PbhnYGsVJiws,13200
20
+ dasl_client/types/dbui.py,sha256=CLGEaBudddi7AmXoykHw3Mxv8wU2cWm20DZuM-bAjQA,16302
21
21
  dasl_client/types/helpers.py,sha256=gLGTvrssAKrdkQT9h80twEosld2egwhvj-zAudxWFPs,109
22
22
  dasl_client/types/rule.py,sha256=BqhWhT8Eh95UXNytd0PxVcjqYuWQcdN1tfKjUB4Tk74,25781
23
23
  dasl_client/types/types.py,sha256=DeUOfdYGOhUGEy7yKOfo0OYTXYRrs57yYgNLUbu7Tlc,8806
24
24
  dasl_client/types/workspace_config.py,sha256=RThg_THS_4leITWdzBPTWdR2ytq5Uk36m6nIOUMzFCM,24878
25
- dasl_client-1.0.13.dist-info/LICENSE,sha256=M35UepUPyKmFkvENlkweeaMElheQqNoM5Emh8ADO-rs,4
26
- dasl_client-1.0.13.dist-info/METADATA,sha256=fXnfud7SFjMSDN_6MTVTwjpDmhV3Rio_ECGDZ36RZIA,741
27
- dasl_client-1.0.13.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
28
- dasl_client-1.0.13.dist-info/top_level.txt,sha256=kIv8ox_2oJPjGB8_yuey5vvuPCyfY8kywG138f9oSOY,12
29
- dasl_client-1.0.13.dist-info/RECORD,,
25
+ dasl_client-1.0.14.dist-info/LICENSE,sha256=M35UepUPyKmFkvENlkweeaMElheQqNoM5Emh8ADO-rs,4
26
+ dasl_client-1.0.14.dist-info/METADATA,sha256=G12U4jgWU43fwrPGNh392IArKnjuAO33s8Wvb9nPTvQ,772
27
+ dasl_client-1.0.14.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
28
+ dasl_client-1.0.14.dist-info/top_level.txt,sha256=kIv8ox_2oJPjGB8_yuey5vvuPCyfY8kywG138f9oSOY,12
29
+ dasl_client-1.0.14.dist-info/RECORD,,