dasl-client 1.0.26__tar.gz → 1.0.27__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dasl-client might be problematic. Click here for more details.

Files changed (41) hide show
  1. dasl_client-1.0.27/PKG-INFO +144 -0
  2. dasl_client-1.0.27/README.md +129 -0
  3. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/client.py +2 -2
  4. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/helpers.py +1 -1
  5. dasl_client-1.0.27/dasl_client/regions.json +4 -0
  6. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/datasource.py +3 -0
  7. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/dbui.py +138 -33
  8. dasl_client-1.0.27/dasl_client.egg-info/PKG-INFO +144 -0
  9. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client.egg-info/SOURCES.txt +1 -1
  10. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client.egg-info/requires.txt +1 -1
  11. {dasl_client-1.0.26 → dasl_client-1.0.27}/pyproject.toml +2 -2
  12. dasl_client-1.0.26/PKG-INFO +0 -19
  13. dasl_client-1.0.26/dasl_client/regions.json +0 -3
  14. dasl_client-1.0.26/dasl_client.egg-info/PKG-INFO +0 -19
  15. dasl_client-1.0.26/setup.py +0 -16
  16. {dasl_client-1.0.26 → dasl_client-1.0.27}/LICENSE +0 -0
  17. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/__init__.py +0 -0
  18. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/auth/__init__.py +0 -0
  19. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/auth/auth.py +0 -0
  20. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/conn/__init__.py +0 -0
  21. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/conn/client_identifier.py +0 -0
  22. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/conn/conn.py +0 -0
  23. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/errors/__init__.py +0 -0
  24. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/errors/errors.py +0 -0
  25. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/exec_rule.py +0 -0
  26. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/preset_development/__init__.py +0 -0
  27. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/preset_development/errors.py +0 -0
  28. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/preset_development/preview_engine.py +0 -0
  29. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/preset_development/preview_parameters.py +0 -0
  30. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/preset_development/stage.py +0 -0
  31. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/regions.py +0 -0
  32. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/__init__.py +0 -0
  33. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/admin_config.py +0 -0
  34. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/content.py +0 -0
  35. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/helpers.py +0 -0
  36. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/rule.py +0 -0
  37. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/types.py +0 -0
  38. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client/types/workspace_config.py +0 -0
  39. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client.egg-info/dependency_links.txt +0 -0
  40. {dasl_client-1.0.26 → dasl_client-1.0.27}/dasl_client.egg-info/top_level.txt +0 -0
  41. {dasl_client-1.0.26 → dasl_client-1.0.27}/setup.cfg +0 -0
@@ -0,0 +1,144 @@
1
+ Metadata-Version: 2.4
2
+ Name: dasl_client
3
+ Version: 1.0.27
4
+ Summary: The DASL client library used for interacting with the DASL workspace
5
+ Author-email: Antimatter Team <support@antimatter.io>
6
+ Requires-Python: >=3.8
7
+ Description-Content-Type: text/markdown
8
+ License-File: LICENSE
9
+ Requires-Dist: dasl_api==0.1.26
10
+ Requires-Dist: databricks-sdk>=0.41.0
11
+ Requires-Dist: pydantic>=2
12
+ Requires-Dist: typing_extensions>=4.10.0
13
+ Requires-Dist: pyyaml==6.0.2
14
+ Dynamic: license-file
15
+
16
+ # DASL Client Library
17
+
18
+ The DASL (Databricks Antimatter Security Lakehouse) Client Library is a Python SDK for interacting with DASL services.
19
+ This library provides an interface for interacting with DASL services, allowing you to manage
20
+ datasources, rules, workspace configurations, and more from Databricks notebooks.
21
+
22
+ ## Features
23
+
24
+ * **Simple Authentication**: Automatic workspace detection in Databricks notebooks
25
+ * **Datasource Management**: Create, update, list, and delete datasources
26
+ * **Rule Management**: Define and manage security detection rules to identify threats
27
+ * **Workspace Configuration**: Update and retrieve DASL's workspace-level settings
28
+
29
+ ## Installation
30
+
31
+ Install from PyPI:
32
+
33
+ ```bash
34
+ pip install dasl-client
35
+ ```
36
+
37
+ ## Quick Start
38
+
39
+ ### Databricks Notebook Environment (Recommended)
40
+
41
+ The DASL client works best in Databricks notebooks with automatic authentication:
42
+
43
+ ```python
44
+ from dasl_client import Client
45
+
46
+ # Automatically detects Databricks context and authenticates
47
+ client = Client.for_workspace()
48
+ print("Connected to DASL!")
49
+
50
+ # List existing datasources
51
+ print("Existing datasources:")
52
+ for datasource in client.list_datasources():
53
+ print(f" - {datasource.metadata.name}")
54
+
55
+ # List detection rules
56
+ print("Existing detection rules:")
57
+ for rule in client.list_rules():
58
+ print(f" - {rule.metadata.name}")
59
+ ```
60
+
61
+ ### Creating a Datasource
62
+
63
+ ```python
64
+ from dasl_client import DataSource, Schedule, BronzeSpec, SilverSpec
65
+
66
+ # Create a new datasource
67
+ datasource = Datasource(
68
+ source="aws",
69
+ source_type="cloudtrail",
70
+ autoloader=Autoloader(
71
+ enabled=True,
72
+ schedule=Schedule(
73
+ at_least_every="1h",
74
+ enabled=True
75
+ )
76
+ ),
77
+ bronze=BronzeSpec(
78
+ bronze_table="security_logs_bronze",
79
+ skip_bronze_loading=False
80
+ ),
81
+ silver=SilverSpec(
82
+ # Configure silver layer here, see the API reference for more details
83
+ ),
84
+ gold=GoldSpec(
85
+ # Configure gold layer here, see the API reference for more details
86
+ )
87
+ )
88
+
89
+ # Create the datasource
90
+ created_datasource = client.create_datasource(datasource)
91
+ print(f"Created datasource: {created.metadata.name}")
92
+ ```
93
+
94
+ ### Creating a Detection Rule
95
+
96
+ ```python
97
+ from dasl_client.types import Rule, Schedule
98
+
99
+ # Create a new detection rule to detect failed logins
100
+ rule = Rule(
101
+ schedule=Schedule(
102
+ at_least_every="2h",
103
+ enabled=True,
104
+ ),
105
+ input=Rule.Input(
106
+ stream=Rule.Input.Stream(
107
+ tables=[
108
+ Rule.Input.Stream.Table(name="http_activity"),
109
+ ],
110
+ filter="disposition = 'Blocked'",
111
+ starting_timestamp=datetime(2025, 7, 8, 16, 47, 30),
112
+ ),
113
+ ),
114
+ output=Rule.Output(
115
+ summary="record was blocked",
116
+ ),
117
+ )
118
+
119
+ try:
120
+ created_rule = client.create_rule("Detect Blocked HTTP Activity", rule)
121
+ print(f"Successfully created rule: {created_rule.metadata.name}")
122
+ except Exception as e:
123
+ print(f"Error creating rule: {e}")
124
+ ```
125
+
126
+ ## Requirements
127
+
128
+ - Python 3.8+
129
+ - Access to a Databricks workspace with DASL enabled
130
+ - `databricks-sdk>=0.41.0`
131
+ - `pydantic>=2`
132
+
133
+ ## Documentation
134
+
135
+ For complete DASL Client documentation, examples, and API reference:
136
+
137
+ - [DASL Client Documentation](https://antimatter-dasl-client.readthedocs-hosted.com/)
138
+ - [API Reference](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/api-reference/)
139
+ - [Quickstart Guide](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/quickstart.html)
140
+
141
+ ## Support
142
+
143
+ - **Email**: support@antimatter.io
144
+ - **Documentation**: [DASL Documentation](https://docs.sl.antimatter.io)
@@ -0,0 +1,129 @@
1
+ # DASL Client Library
2
+
3
+ The DASL (Databricks Antimatter Security Lakehouse) Client Library is a Python SDK for interacting with DASL services.
4
+ This library provides an interface for interacting with DASL services, allowing you to manage
5
+ datasources, rules, workspace configurations, and more from Databricks notebooks.
6
+
7
+ ## Features
8
+
9
+ * **Simple Authentication**: Automatic workspace detection in Databricks notebooks
10
+ * **Datasource Management**: Create, update, list, and delete datasources
11
+ * **Rule Management**: Define and manage security detection rules to identify threats
12
+ * **Workspace Configuration**: Update and retrieve DASL's workspace-level settings
13
+
14
+ ## Installation
15
+
16
+ Install from PyPI:
17
+
18
+ ```bash
19
+ pip install dasl-client
20
+ ```
21
+
22
+ ## Quick Start
23
+
24
+ ### Databricks Notebook Environment (Recommended)
25
+
26
+ The DASL client works best in Databricks notebooks with automatic authentication:
27
+
28
+ ```python
29
+ from dasl_client import Client
30
+
31
+ # Automatically detects Databricks context and authenticates
32
+ client = Client.for_workspace()
33
+ print("Connected to DASL!")
34
+
35
+ # List existing datasources
36
+ print("Existing datasources:")
37
+ for datasource in client.list_datasources():
38
+ print(f" - {datasource.metadata.name}")
39
+
40
+ # List detection rules
41
+ print("Existing detection rules:")
42
+ for rule in client.list_rules():
43
+ print(f" - {rule.metadata.name}")
44
+ ```
45
+
46
+ ### Creating a Datasource
47
+
48
+ ```python
49
+ from dasl_client import DataSource, Schedule, BronzeSpec, SilverSpec
50
+
51
+ # Create a new datasource
52
+ datasource = Datasource(
53
+ source="aws",
54
+ source_type="cloudtrail",
55
+ autoloader=Autoloader(
56
+ enabled=True,
57
+ schedule=Schedule(
58
+ at_least_every="1h",
59
+ enabled=True
60
+ )
61
+ ),
62
+ bronze=BronzeSpec(
63
+ bronze_table="security_logs_bronze",
64
+ skip_bronze_loading=False
65
+ ),
66
+ silver=SilverSpec(
67
+ # Configure silver layer here, see the API reference for more details
68
+ ),
69
+ gold=GoldSpec(
70
+ # Configure gold layer here, see the API reference for more details
71
+ )
72
+ )
73
+
74
+ # Create the datasource
75
+ created_datasource = client.create_datasource(datasource)
76
+ print(f"Created datasource: {created.metadata.name}")
77
+ ```
78
+
79
+ ### Creating a Detection Rule
80
+
81
+ ```python
82
+ from dasl_client.types import Rule, Schedule
83
+
84
+ # Create a new detection rule to detect failed logins
85
+ rule = Rule(
86
+ schedule=Schedule(
87
+ at_least_every="2h",
88
+ enabled=True,
89
+ ),
90
+ input=Rule.Input(
91
+ stream=Rule.Input.Stream(
92
+ tables=[
93
+ Rule.Input.Stream.Table(name="http_activity"),
94
+ ],
95
+ filter="disposition = 'Blocked'",
96
+ starting_timestamp=datetime(2025, 7, 8, 16, 47, 30),
97
+ ),
98
+ ),
99
+ output=Rule.Output(
100
+ summary="record was blocked",
101
+ ),
102
+ )
103
+
104
+ try:
105
+ created_rule = client.create_rule("Detect Blocked HTTP Activity", rule)
106
+ print(f"Successfully created rule: {created_rule.metadata.name}")
107
+ except Exception as e:
108
+ print(f"Error creating rule: {e}")
109
+ ```
110
+
111
+ ## Requirements
112
+
113
+ - Python 3.8+
114
+ - Access to a Databricks workspace with DASL enabled
115
+ - `databricks-sdk>=0.41.0`
116
+ - `pydantic>=2`
117
+
118
+ ## Documentation
119
+
120
+ For complete DASL Client documentation, examples, and API reference:
121
+
122
+ - [DASL Client Documentation](https://antimatter-dasl-client.readthedocs-hosted.com/)
123
+ - [API Reference](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/api-reference/)
124
+ - [Quickstart Guide](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/quickstart.html)
125
+
126
+ ## Support
127
+
128
+ - **Email**: support@antimatter.io
129
+ - **Documentation**: [DASL Documentation](https://docs.sl.antimatter.io)
@@ -9,7 +9,7 @@ from dasl_api import (
9
9
  CoreV1Api,
10
10
  DbuiV1Api,
11
11
  DbuiV1QueryExtendRequest,
12
- DbuiV1QueryExtendRequestTimeRange,
12
+ CoreV1QueryExtendRequestDateRange,
13
13
  DbuiV1QueryGenerateRequest,
14
14
  DbuiV1QueryGenerateRequestTimeRange,
15
15
  DbuiV1QueryGenerateStatus,
@@ -835,7 +835,7 @@ class Client:
835
835
  """
836
836
  time_range = None
837
837
  if start_date is not None or end_date is not None:
838
- time_range = DbuiV1QueryExtendRequestTimeRange(
838
+ time_range = CoreV1QueryExtendRequestDateRange(
839
839
  startDate=start_date,
840
840
  endDate=end_date,
841
841
  )
@@ -3,7 +3,7 @@ import os
3
3
 
4
4
 
5
5
  class Helpers:
6
- default_region = "us-east-1"
6
+ default_region = "aws-us-east-1"
7
7
 
8
8
  @staticmethod
9
9
  def ensure_databricks():
@@ -0,0 +1,4 @@
1
+ {
2
+ "aws-us-east-1": "https://api.sl.us-east-1.cloud.databricks.com",
3
+ "aws-us-west-2": "https://api.sl.us-west-2.cloud.databricks.com"
4
+ }
@@ -610,6 +610,7 @@ class SilverSpec(BaseModel):
610
610
  post_filter: Optional[str] = None
611
611
  preset_overrides: Optional["SilverSpec.PreTransform.PresetOverrides"] = None
612
612
  add_fields: Optional[List[FieldSpec]] = None
613
+ utils: Optional[FieldUtils] = None
613
614
 
614
615
  @staticmethod
615
616
  def from_api_obj(
@@ -630,6 +631,7 @@ class SilverSpec(BaseModel):
630
631
  obj.preset_overrides
631
632
  ),
632
633
  add_fields=add_fields,
634
+ utils=FieldUtils.from_api_obj(obj.utils),
633
635
  )
634
636
 
635
637
  def to_api_obj(self) -> CoreV1DataSourceSpecSilverPreTransform:
@@ -645,6 +647,7 @@ class SilverSpec(BaseModel):
645
647
  post_filter=self.post_filter,
646
648
  preset_overrides=Helpers.maybe(to_api_obj, self.preset_overrides),
647
649
  add_fields=add_fields,
650
+ utils=Helpers.maybe(to_api_obj, self.utils),
648
651
  )
649
652
 
650
653
  class Transform(BaseModel):
@@ -6,14 +6,16 @@ from dasl_api import (
6
6
  DbuiV1ObservableEventsListItemsInnerNotable,
7
7
  DbuiV1ObservableEventsListItemsInner,
8
8
  DbuiV1TransformRequest,
9
- DbuiV1TransformRequestInput,
9
+ DbuiV1TransformRequestIngestion,
10
+ DbuiV1TransformRequestIngestionInput,
11
+ DbuiV1TransformRequestIngestionAutoloaderInput,
12
+ DbuiV1TransformRequestIngestionAdditionalInputTablesInner,
10
13
  DbuiV1TableColumnDetails,
11
14
  DbuiV1TransformRequestTransformsInner,
12
15
  DbuiV1TransformRequestTransformsInnerPresetOverrides,
13
16
  DbuiV1TransformResponse,
14
17
  DbuiV1TransformResponseStagesInner,
15
18
  ContentV1DatasourcePresetAutoloaderCloudFiles,
16
- DbuiV1TransformRequestAutoloaderInput,
17
19
  )
18
20
 
19
21
  from .datasource import DataSource, FieldSpec, FieldUtils
@@ -124,18 +126,16 @@ class Dbui(BaseModel):
124
126
 
125
127
  class TransformRequest(BaseModel):
126
128
  """
127
- The transform request identifies the starting data (either with an
128
- autoloader spec or an input block) and then specifies a chain of
129
- transforms to be performed on the data. The response includes the data
130
- at each intermediate stage (e.g. input/autoloaded data, pre-transform,
131
- silver).
129
+ The transform request identifies the starting data through an ingestion
130
+ configuration and then specifies a chain of transforms to be performed
131
+ on the data. The response includes the data at each intermediate stage
132
+ (e.g. input/autoloaded data, pre-transform, silver).
132
133
 
133
134
  Attributes:
134
- input (TransformRequest.Input):
135
- The input block containing the columns metadata and data.
136
- autoloader_input (Autoloader):
137
- The autoloader input configuration.
138
- use_preset (str):
135
+ ingestion (Optional[TransformRequest.Ingestion]):
136
+ Ingestion (bronze) layer configuration for acquiring and wrangling
137
+ data before silver and gold transformations.
138
+ use_preset (Optional[str]):
139
139
  Indicates which preset to use for the transforms.
140
140
  transforms (List[TransformRequest.Transform]):
141
141
  A list of transform configurations.
@@ -157,7 +157,7 @@ class TransformRequest(BaseModel):
157
157
 
158
158
  @staticmethod
159
159
  def from_api_obj(
160
- obj: Optional[DbuiV1TransformRequestInput],
160
+ obj: Optional[DbuiV1TransformRequestIngestionInput],
161
161
  ) -> Optional["TransformRequest.Input"]:
162
162
  if obj is None:
163
163
  return None
@@ -168,8 +168,8 @@ class TransformRequest(BaseModel):
168
168
  data=obj.data,
169
169
  )
170
170
 
171
- def to_api_obj(self) -> DbuiV1TransformRequestInput:
172
- return DbuiV1TransformRequestInput(
171
+ def to_api_obj(self) -> DbuiV1TransformRequestIngestionInput:
172
+ return DbuiV1TransformRequestIngestionInput(
173
173
  columns=[item.to_api_obj() for item in self.columns],
174
174
  data=self.data,
175
175
  )
@@ -204,7 +204,7 @@ class TransformRequest(BaseModel):
204
204
  @staticmethod
205
205
  def from_api_obj(
206
206
  obj: Optional[ContentV1DatasourcePresetAutoloaderCloudFiles],
207
- ) -> "TransformRequest.Autoloader.CloudFiles":
207
+ ) -> Optional["TransformRequest.Autoloader.CloudFiles"]:
208
208
  if obj is None:
209
209
  return None
210
210
  return TransformRequest.Autoloader.CloudFiles(
@@ -228,7 +228,7 @@ class TransformRequest(BaseModel):
228
228
 
229
229
  @staticmethod
230
230
  def from_api_obj(
231
- obj: Optional[DbuiV1TransformRequestAutoloaderInput],
231
+ obj: Optional[DbuiV1TransformRequestIngestionAutoloaderInput],
232
232
  ) -> "Optional[TransformRequest.Autoloader]":
233
233
  if obj is None:
234
234
  return None
@@ -244,15 +244,124 @@ class TransformRequest(BaseModel):
244
244
  row_offset=obj.row_offset,
245
245
  )
246
246
 
247
- def to_api_obj(self) -> DbuiV1TransformRequestAutoloaderInput:
248
- return DbuiV1TransformRequestAutoloaderInput(
247
+ def to_api_obj(self) -> DbuiV1TransformRequestIngestionAutoloaderInput:
248
+ return DbuiV1TransformRequestIngestionAutoloaderInput(
249
249
  format=self.format,
250
250
  location=self.location,
251
- schemaFile=self.schema_file,
252
- schema=self.var_schema,
253
- cloudFiles=Helpers.maybe(lambda o: o.to_api_obj(), self.cloud_files),
254
- rowCount=self.row_count,
255
- rowOffset=self.row_offset,
251
+ schema_file=self.schema_file,
252
+ var_schema=self.var_schema,
253
+ cloud_files=Helpers.maybe(lambda o: o.to_api_obj(), self.cloud_files),
254
+ row_count=self.row_count,
255
+ row_offset=self.row_offset,
256
+ )
257
+
258
+ class AdditionalInputTable(BaseModel):
259
+ """
260
+ Configuration for additional input tables used for lookup or enrichment.
261
+
262
+ Attributes:
263
+ name (str):
264
+ The name of the table.
265
+ alias (str):
266
+ Alias name for the table.
267
+ join_type (str):
268
+ How to join to the preceding table.
269
+ join_expr (str):
270
+ The join condition expression to join with the preceding table.
271
+ """
272
+
273
+ name: str
274
+ alias: str
275
+ join_type: str
276
+ join_expr: str
277
+
278
+ @staticmethod
279
+ def from_api_obj(
280
+ obj: Optional[DbuiV1TransformRequestIngestionAdditionalInputTablesInner],
281
+ ) -> Optional["TransformRequest.AdditionalInputTable"]:
282
+ if obj is None:
283
+ return None
284
+ return TransformRequest.AdditionalInputTable(
285
+ name=obj.name,
286
+ alias=obj.alias,
287
+ join_type=obj.join_type,
288
+ join_expr=obj.join_expr,
289
+ )
290
+
291
+ def to_api_obj(
292
+ self,
293
+ ) -> DbuiV1TransformRequestIngestionAdditionalInputTablesInner:
294
+ return DbuiV1TransformRequestIngestionAdditionalInputTablesInner(
295
+ name=self.name,
296
+ alias=self.alias,
297
+ join_type=self.join_type,
298
+ join_expr=self.join_expr,
299
+ )
300
+
301
+ class Ingestion(BaseModel):
302
+ """
303
+ Ingestion (bronze) layer configuration for acquiring and wrangling data
304
+ before silver and gold transformations.
305
+
306
+ Attributes:
307
+ input (Optional[TransformRequest.Input]):
308
+ Provides static data for adhoc transform processing.
309
+ autoloader_input (Optional[TransformRequest.Autoloader]):
310
+ Configures ingestion from an external data source using Databricks Auto Loader.
311
+ load_as_single_variant (Optional[bool]):
312
+ Whether to ingest the data as a single variant column called data.
313
+ additional_input_tables (Optional[List[TransformRequest.AdditionalInputTable]]):
314
+ A list of existing tables that are joined with the input data.
315
+ pre_transform (Optional[List[List[str]]]):
316
+ A set of SQL expressions to apply before writing the Auto Loader data to bronze.
317
+ """
318
+
319
+ input: Optional["TransformRequest.Input"] = None
320
+ autoloader_input: Optional["TransformRequest.Autoloader"] = None
321
+ load_as_single_variant: Optional[bool] = None
322
+ additional_input_tables: Optional[
323
+ List["TransformRequest.AdditionalInputTable"]
324
+ ] = None
325
+ pre_transform: Optional[List[List[str]]] = None
326
+
327
+ @staticmethod
328
+ def from_api_obj(
329
+ obj: Optional[DbuiV1TransformRequestIngestion],
330
+ ) -> Optional["TransformRequest.Ingestion"]:
331
+ if obj is None:
332
+ return None
333
+
334
+ additional_input_tables = None
335
+ if obj.additional_input_tables is not None:
336
+ additional_input_tables = [
337
+ TransformRequest.AdditionalInputTable.from_api_obj(item)
338
+ for item in obj.additional_input_tables
339
+ ]
340
+
341
+ return TransformRequest.Ingestion(
342
+ input=TransformRequest.Input.from_api_obj(obj.input),
343
+ autoloader_input=TransformRequest.Autoloader.from_api_obj(
344
+ obj.autoloader_input
345
+ ),
346
+ load_as_single_variant=obj.load_as_single_variant,
347
+ additional_input_tables=additional_input_tables,
348
+ pre_transform=obj.pre_transform,
349
+ )
350
+
351
+ def to_api_obj(self) -> DbuiV1TransformRequestIngestion:
352
+ to_api_obj = lambda o: o.to_api_obj()
353
+ additional_input_tables = None
354
+ if self.additional_input_tables is not None:
355
+ additional_input_tables = [
356
+ item.to_api_obj() for item in self.additional_input_tables
357
+ ]
358
+
359
+ return DbuiV1TransformRequestIngestion(
360
+ input=Helpers.maybe(to_api_obj, self.input),
361
+ autoloader_input=Helpers.maybe(to_api_obj, self.autoloader_input),
362
+ load_as_single_variant=self.load_as_single_variant,
363
+ additional_input_tables=additional_input_tables,
364
+ pre_transform=self.pre_transform,
256
365
  )
257
366
 
258
367
  class Transform(BaseModel):
@@ -350,18 +459,14 @@ class TransformRequest(BaseModel):
350
459
  utils=Helpers.maybe(to_api_obj, self.utils),
351
460
  )
352
461
 
353
- input: Optional["TransformRequest.Input"] = None
354
- autoloader_input: Optional["TransformRequest.Autoloader"] = None
462
+ ingestion: Optional["TransformRequest.Ingestion"] = None
355
463
  use_preset: Optional[str] = None
356
464
  transforms: List["TransformRequest.Transform"]
357
465
 
358
466
  @staticmethod
359
467
  def from_api_obj(obj: DbuiV1TransformRequest) -> "TransformRequest":
360
468
  return TransformRequest(
361
- input=TransformRequest.Input.from_api_obj(obj.input),
362
- autoloader_input=TransformRequest.Autoloader.from_api_obj(
363
- obj.autoloader_input
364
- ),
469
+ ingestion=TransformRequest.Ingestion.from_api_obj(obj.ingestion),
365
470
  use_preset=obj.use_preset,
366
471
  transforms=[
367
472
  TransformRequest.Transform.from_api_obj(item) for item in obj.transforms
@@ -369,10 +474,10 @@ class TransformRequest(BaseModel):
369
474
  )
370
475
 
371
476
  def to_api_obj(self) -> DbuiV1TransformRequest:
372
- to_api_obj = lambda o: o.to_api_obj()
373
477
  return DbuiV1TransformRequest(
374
- input=Helpers.maybe(to_api_obj, self.input),
375
- autoloader_input=Helpers.maybe(to_api_obj, self.autoloader_input),
478
+ ingestion=(
479
+ self.ingestion.to_api_obj() if self.ingestion is not None else None
480
+ ),
376
481
  use_preset=self.use_preset,
377
482
  transforms=[item.to_api_obj() for item in self.transforms],
378
483
  )
@@ -0,0 +1,144 @@
1
+ Metadata-Version: 2.4
2
+ Name: dasl_client
3
+ Version: 1.0.27
4
+ Summary: The DASL client library used for interacting with the DASL workspace
5
+ Author-email: Antimatter Team <support@antimatter.io>
6
+ Requires-Python: >=3.8
7
+ Description-Content-Type: text/markdown
8
+ License-File: LICENSE
9
+ Requires-Dist: dasl_api==0.1.26
10
+ Requires-Dist: databricks-sdk>=0.41.0
11
+ Requires-Dist: pydantic>=2
12
+ Requires-Dist: typing_extensions>=4.10.0
13
+ Requires-Dist: pyyaml==6.0.2
14
+ Dynamic: license-file
15
+
16
+ # DASL Client Library
17
+
18
+ The DASL (Databricks Antimatter Security Lakehouse) Client Library is a Python SDK for interacting with DASL services.
19
+ This library provides an interface for interacting with DASL services, allowing you to manage
20
+ datasources, rules, workspace configurations, and more from Databricks notebooks.
21
+
22
+ ## Features
23
+
24
+ * **Simple Authentication**: Automatic workspace detection in Databricks notebooks
25
+ * **Datasource Management**: Create, update, list, and delete datasources
26
+ * **Rule Management**: Define and manage security detection rules to identify threats
27
+ * **Workspace Configuration**: Update and retrieve DASL's workspace-level settings
28
+
29
+ ## Installation
30
+
31
+ Install from PyPI:
32
+
33
+ ```bash
34
+ pip install dasl-client
35
+ ```
36
+
37
+ ## Quick Start
38
+
39
+ ### Databricks Notebook Environment (Recommended)
40
+
41
+ The DASL client works best in Databricks notebooks with automatic authentication:
42
+
43
+ ```python
44
+ from dasl_client import Client
45
+
46
+ # Automatically detects Databricks context and authenticates
47
+ client = Client.for_workspace()
48
+ print("Connected to DASL!")
49
+
50
+ # List existing datasources
51
+ print("Existing datasources:")
52
+ for datasource in client.list_datasources():
53
+ print(f" - {datasource.metadata.name}")
54
+
55
+ # List detection rules
56
+ print("Existing detection rules:")
57
+ for rule in client.list_rules():
58
+ print(f" - {rule.metadata.name}")
59
+ ```
60
+
61
+ ### Creating a Datasource
62
+
63
+ ```python
64
+ from dasl_client import DataSource, Schedule, BronzeSpec, SilverSpec
65
+
66
+ # Create a new datasource
67
+ datasource = Datasource(
68
+ source="aws",
69
+ source_type="cloudtrail",
70
+ autoloader=Autoloader(
71
+ enabled=True,
72
+ schedule=Schedule(
73
+ at_least_every="1h",
74
+ enabled=True
75
+ )
76
+ ),
77
+ bronze=BronzeSpec(
78
+ bronze_table="security_logs_bronze",
79
+ skip_bronze_loading=False
80
+ ),
81
+ silver=SilverSpec(
82
+ # Configure silver layer here, see the API reference for more details
83
+ ),
84
+ gold=GoldSpec(
85
+ # Configure gold layer here, see the API reference for more details
86
+ )
87
+ )
88
+
89
+ # Create the datasource
90
+ created_datasource = client.create_datasource(datasource)
91
+ print(f"Created datasource: {created.metadata.name}")
92
+ ```
93
+
94
+ ### Creating a Detection Rule
95
+
96
+ ```python
97
+ from dasl_client.types import Rule, Schedule
98
+
99
+ # Create a new detection rule to detect failed logins
100
+ rule = Rule(
101
+ schedule=Schedule(
102
+ at_least_every="2h",
103
+ enabled=True,
104
+ ),
105
+ input=Rule.Input(
106
+ stream=Rule.Input.Stream(
107
+ tables=[
108
+ Rule.Input.Stream.Table(name="http_activity"),
109
+ ],
110
+ filter="disposition = 'Blocked'",
111
+ starting_timestamp=datetime(2025, 7, 8, 16, 47, 30),
112
+ ),
113
+ ),
114
+ output=Rule.Output(
115
+ summary="record was blocked",
116
+ ),
117
+ )
118
+
119
+ try:
120
+ created_rule = client.create_rule("Detect Blocked HTTP Activity", rule)
121
+ print(f"Successfully created rule: {created_rule.metadata.name}")
122
+ except Exception as e:
123
+ print(f"Error creating rule: {e}")
124
+ ```
125
+
126
+ ## Requirements
127
+
128
+ - Python 3.8+
129
+ - Access to a Databricks workspace with DASL enabled
130
+ - `databricks-sdk>=0.41.0`
131
+ - `pydantic>=2`
132
+
133
+ ## Documentation
134
+
135
+ For complete DASL Client documentation, examples, and API reference:
136
+
137
+ - [DASL Client Documentation](https://antimatter-dasl-client.readthedocs-hosted.com/)
138
+ - [API Reference](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/api-reference/)
139
+ - [Quickstart Guide](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/quickstart.html)
140
+
141
+ ## Support
142
+
143
+ - **Email**: support@antimatter.io
144
+ - **Documentation**: [DASL Documentation](https://docs.sl.antimatter.io)
@@ -1,6 +1,6 @@
1
1
  LICENSE
2
+ README.md
2
3
  pyproject.toml
3
- setup.py
4
4
  dasl_client/__init__.py
5
5
  dasl_client/client.py
6
6
  dasl_client/exec_rule.py
@@ -1,4 +1,4 @@
1
- dasl_api==0.1.25
1
+ dasl_api==0.1.26
2
2
  databricks-sdk>=0.41.0
3
3
  pydantic>=2
4
4
  typing_extensions>=4.10.0
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "dasl_client"
7
- version = "1.0.26"
7
+ version = "1.0.27"
8
8
  description = "The DASL client library used for interacting with the DASL workspace"
9
9
  readme = "README.md"
10
10
  authors = [
@@ -13,7 +13,7 @@ authors = [
13
13
  requires-python = ">=3.8"
14
14
 
15
15
  dependencies = [
16
- "dasl_api==0.1.25",
16
+ "dasl_api==0.1.26",
17
17
  "databricks-sdk>=0.41.0",
18
18
  "pydantic>=2",
19
19
  "typing_extensions>=4.10.0",
@@ -1,19 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: dasl_client
3
- Version: 1.0.26
4
- Summary: The DASL client library used for interacting with the DASL workspace
5
- Home-page: https://github.com/antimatter/asl
6
- Author: Antimatter Team
7
- Author-email: Antimatter Team <support@antimatter.io>
8
- Requires-Python: >=3.8
9
- Description-Content-Type: text/markdown
10
- License-File: LICENSE
11
- Requires-Dist: dasl_api==0.1.25
12
- Requires-Dist: databricks-sdk>=0.41.0
13
- Requires-Dist: pydantic>=2
14
- Requires-Dist: typing_extensions>=4.10.0
15
- Requires-Dist: pyyaml==6.0.2
16
- Dynamic: author
17
- Dynamic: home-page
18
- Dynamic: license-file
19
- Dynamic: requires-python
@@ -1,3 +0,0 @@
1
- {
2
- "us-east-1": "https://api.sl.us-east-1.cloud.databricks.com"
3
- }
@@ -1,19 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: dasl_client
3
- Version: 1.0.26
4
- Summary: The DASL client library used for interacting with the DASL workspace
5
- Home-page: https://github.com/antimatter/asl
6
- Author: Antimatter Team
7
- Author-email: Antimatter Team <support@antimatter.io>
8
- Requires-Python: >=3.8
9
- Description-Content-Type: text/markdown
10
- License-File: LICENSE
11
- Requires-Dist: dasl_api==0.1.25
12
- Requires-Dist: databricks-sdk>=0.41.0
13
- Requires-Dist: pydantic>=2
14
- Requires-Dist: typing_extensions>=4.10.0
15
- Requires-Dist: pyyaml==6.0.2
16
- Dynamic: author
17
- Dynamic: home-page
18
- Dynamic: license-file
19
- Dynamic: requires-python
@@ -1,16 +0,0 @@
1
- # setup.py
2
-
3
- from setuptools import setup, find_packages
4
-
5
- setup(
6
- name="dasl-client",
7
- version="0.0.0",
8
- author="Antimatter Team",
9
- author_email="support@antimatter.io",
10
- description="The DASL client library used for interacting with the DASL client.",
11
- long_description="TODO: Link to docs page or README.md.",
12
- long_description_content_type="text/markdown",
13
- url="https://github.com/antimatter/asl",
14
- packages=find_packages(),
15
- python_requires=">=3.8",
16
- )
File without changes
File without changes