dasl-client 1.0.1__tar.gz → 1.0.31__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dasl_client-1.0.31/PKG-INFO +144 -0
- dasl_client-1.0.31/README.md +129 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/__init__.py +1 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/auth/auth.py +70 -59
- dasl_client-1.0.31/dasl_client/client.py +1061 -0
- dasl_client-1.0.31/dasl_client/conn/client_identifier.py +23 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/conn/conn.py +6 -4
- dasl_client-1.0.31/dasl_client/errors/errors.py +257 -0
- dasl_client-1.0.31/dasl_client/exec_rule.py +92 -0
- dasl_client-1.0.31/dasl_client/helpers.py +37 -0
- dasl_client-1.0.31/dasl_client/metadata.py +106 -0
- dasl_client-1.0.31/dasl_client/preset_development/__init__.py +4 -0
- dasl_client-1.0.31/dasl_client/preset_development/errors.py +224 -0
- dasl_client-1.0.31/dasl_client/preset_development/preview_engine.py +576 -0
- dasl_client-1.0.31/dasl_client/preset_development/preview_parameters.py +714 -0
- dasl_client-1.0.31/dasl_client/preset_development/stage.py +715 -0
- dasl_client-1.0.31/dasl_client/regions.json +6 -0
- dasl_client-1.0.31/dasl_client/regions.py +18 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/__init__.py +1 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/admin_config.py +10 -7
- dasl_client-1.0.31/dasl_client/types/content.py +245 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/datasource.py +247 -154
- dasl_client-1.0.31/dasl_client/types/dbui.py +544 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/rule.py +187 -106
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/types.py +72 -52
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/workspace_config.py +290 -136
- dasl_client-1.0.31/dasl_client.egg-info/PKG-INFO +144 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client.egg-info/SOURCES.txt +12 -5
- dasl_client-1.0.31/dasl_client.egg-info/requires.txt +5 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/pyproject.toml +15 -4
- dasl_client-1.0.1/PKG-INFO +0 -33
- dasl_client-1.0.1/README.md +0 -19
- dasl_client-1.0.1/dasl_client/client.py +0 -588
- dasl_client-1.0.1/dasl_client/conn/user_agent.py +0 -11
- dasl_client-1.0.1/dasl_client/errors/errors.py +0 -138
- dasl_client-1.0.1/dasl_client/helpers.py +0 -24
- dasl_client-1.0.1/dasl_client/types/dbui.py +0 -64
- dasl_client-1.0.1/dasl_client.egg-info/PKG-INFO +0 -33
- dasl_client-1.0.1/dasl_client.egg-info/requires.txt +0 -3
- dasl_client-1.0.1/setup.py +0 -16
- dasl_client-1.0.1/test/test_api_surface.py +0 -254
- dasl_client-1.0.1/test/test_marshaling.py +0 -736
- {dasl_client-1.0.1 → dasl_client-1.0.31}/LICENSE +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/auth/__init__.py +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/conn/__init__.py +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/errors/__init__.py +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client/types/helpers.py +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client.egg-info/dependency_links.txt +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/dasl_client.egg-info/top_level.txt +0 -0
- {dasl_client-1.0.1 → dasl_client-1.0.31}/setup.cfg +0 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: dasl_client
|
|
3
|
+
Version: 1.0.31
|
|
4
|
+
Summary: The DASL client library used for interacting with the DASL workspace
|
|
5
|
+
Author-email: Antimatter Team <support@antimatter.io>
|
|
6
|
+
Requires-Python: >=3.8
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Requires-Dist: dasl_api==0.1.28
|
|
10
|
+
Requires-Dist: databricks-sdk>=0.41.0
|
|
11
|
+
Requires-Dist: pydantic>=2
|
|
12
|
+
Requires-Dist: typing_extensions>=4.10.0
|
|
13
|
+
Requires-Dist: pyyaml==6.0.2
|
|
14
|
+
Dynamic: license-file
|
|
15
|
+
|
|
16
|
+
# DASL Client Library
|
|
17
|
+
|
|
18
|
+
The DASL (Databricks Antimatter Security Lakehouse) Client Library is a Python SDK for interacting with DASL services.
|
|
19
|
+
This library provides an interface for interacting with DASL services, allowing you to manage
|
|
20
|
+
datasources, rules, workspace configurations, and more from Databricks notebooks.
|
|
21
|
+
|
|
22
|
+
## Features
|
|
23
|
+
|
|
24
|
+
* **Simple Authentication**: Automatic workspace detection in Databricks notebooks
|
|
25
|
+
* **Datasource Management**: Create, update, list, and delete datasources
|
|
26
|
+
* **Rule Management**: Define and manage security detection rules to identify threats
|
|
27
|
+
* **Workspace Configuration**: Update and retrieve DASL's workspace-level settings
|
|
28
|
+
|
|
29
|
+
## Installation
|
|
30
|
+
|
|
31
|
+
Install from PyPI:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install dasl-client
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Quick Start
|
|
38
|
+
|
|
39
|
+
### Databricks Notebook Environment (Recommended)
|
|
40
|
+
|
|
41
|
+
The DASL client works best in Databricks notebooks with automatic authentication:
|
|
42
|
+
|
|
43
|
+
```python
|
|
44
|
+
from dasl_client import Client
|
|
45
|
+
|
|
46
|
+
# Automatically detects Databricks context and authenticates
|
|
47
|
+
client = Client.for_workspace()
|
|
48
|
+
print("Connected to DASL!")
|
|
49
|
+
|
|
50
|
+
# List existing datasources
|
|
51
|
+
print("Existing datasources:")
|
|
52
|
+
for datasource in client.list_datasources():
|
|
53
|
+
print(f" - {datasource.metadata.name}")
|
|
54
|
+
|
|
55
|
+
# List detection rules
|
|
56
|
+
print("Existing detection rules:")
|
|
57
|
+
for rule in client.list_rules():
|
|
58
|
+
print(f" - {rule.metadata.name}")
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### Creating a Datasource
|
|
62
|
+
|
|
63
|
+
```python
|
|
64
|
+
from dasl_client import DataSource, Schedule, BronzeSpec, SilverSpec
|
|
65
|
+
|
|
66
|
+
# Create a new datasource
|
|
67
|
+
datasource = Datasource(
|
|
68
|
+
source="aws",
|
|
69
|
+
source_type="cloudtrail",
|
|
70
|
+
autoloader=Autoloader(
|
|
71
|
+
enabled=True,
|
|
72
|
+
schedule=Schedule(
|
|
73
|
+
at_least_every="1h",
|
|
74
|
+
enabled=True
|
|
75
|
+
)
|
|
76
|
+
),
|
|
77
|
+
bronze=BronzeSpec(
|
|
78
|
+
bronze_table="security_logs_bronze",
|
|
79
|
+
skip_bronze_loading=False
|
|
80
|
+
),
|
|
81
|
+
silver=SilverSpec(
|
|
82
|
+
# Configure silver layer here, see the API reference for more details
|
|
83
|
+
),
|
|
84
|
+
gold=GoldSpec(
|
|
85
|
+
# Configure gold layer here, see the API reference for more details
|
|
86
|
+
)
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Create the datasource
|
|
90
|
+
created_datasource = client.create_datasource(datasource)
|
|
91
|
+
print(f"Created datasource: {created.metadata.name}")
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### Creating a Detection Rule
|
|
95
|
+
|
|
96
|
+
```python
|
|
97
|
+
from dasl_client.types import Rule, Schedule
|
|
98
|
+
|
|
99
|
+
# Create a new detection rule to detect failed logins
|
|
100
|
+
rule = Rule(
|
|
101
|
+
schedule=Schedule(
|
|
102
|
+
at_least_every="2h",
|
|
103
|
+
enabled=True,
|
|
104
|
+
),
|
|
105
|
+
input=Rule.Input(
|
|
106
|
+
stream=Rule.Input.Stream(
|
|
107
|
+
tables=[
|
|
108
|
+
Rule.Input.Stream.Table(name="http_activity"),
|
|
109
|
+
],
|
|
110
|
+
filter="disposition = 'Blocked'",
|
|
111
|
+
starting_timestamp=datetime(2025, 7, 8, 16, 47, 30),
|
|
112
|
+
),
|
|
113
|
+
),
|
|
114
|
+
output=Rule.Output(
|
|
115
|
+
summary="record was blocked",
|
|
116
|
+
),
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
try:
|
|
120
|
+
created_rule = client.create_rule("Detect Blocked HTTP Activity", rule)
|
|
121
|
+
print(f"Successfully created rule: {created_rule.metadata.name}")
|
|
122
|
+
except Exception as e:
|
|
123
|
+
print(f"Error creating rule: {e}")
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
## Requirements
|
|
127
|
+
|
|
128
|
+
- Python 3.8+
|
|
129
|
+
- Access to a Databricks workspace with DASL enabled
|
|
130
|
+
- `databricks-sdk>=0.41.0`
|
|
131
|
+
- `pydantic>=2`
|
|
132
|
+
|
|
133
|
+
## Documentation
|
|
134
|
+
|
|
135
|
+
For complete DASL Client documentation, examples, and API reference:
|
|
136
|
+
|
|
137
|
+
- [DASL Client Documentation](https://antimatter-dasl-client.readthedocs-hosted.com/)
|
|
138
|
+
- [API Reference](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/api-reference/)
|
|
139
|
+
- [Quickstart Guide](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/quickstart.html)
|
|
140
|
+
|
|
141
|
+
## Support
|
|
142
|
+
|
|
143
|
+
- **Email**: support@antimatter.io
|
|
144
|
+
- **Documentation**: [DASL Documentation](https://docs.sl.antimatter.io)
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# DASL Client Library
|
|
2
|
+
|
|
3
|
+
The DASL (Databricks Antimatter Security Lakehouse) Client Library is a Python SDK for interacting with DASL services.
|
|
4
|
+
This library provides an interface for interacting with DASL services, allowing you to manage
|
|
5
|
+
datasources, rules, workspace configurations, and more from Databricks notebooks.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
* **Simple Authentication**: Automatic workspace detection in Databricks notebooks
|
|
10
|
+
* **Datasource Management**: Create, update, list, and delete datasources
|
|
11
|
+
* **Rule Management**: Define and manage security detection rules to identify threats
|
|
12
|
+
* **Workspace Configuration**: Update and retrieve DASL's workspace-level settings
|
|
13
|
+
|
|
14
|
+
## Installation
|
|
15
|
+
|
|
16
|
+
Install from PyPI:
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
pip install dasl-client
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Quick Start
|
|
23
|
+
|
|
24
|
+
### Databricks Notebook Environment (Recommended)
|
|
25
|
+
|
|
26
|
+
The DASL client works best in Databricks notebooks with automatic authentication:
|
|
27
|
+
|
|
28
|
+
```python
|
|
29
|
+
from dasl_client import Client
|
|
30
|
+
|
|
31
|
+
# Automatically detects Databricks context and authenticates
|
|
32
|
+
client = Client.for_workspace()
|
|
33
|
+
print("Connected to DASL!")
|
|
34
|
+
|
|
35
|
+
# List existing datasources
|
|
36
|
+
print("Existing datasources:")
|
|
37
|
+
for datasource in client.list_datasources():
|
|
38
|
+
print(f" - {datasource.metadata.name}")
|
|
39
|
+
|
|
40
|
+
# List detection rules
|
|
41
|
+
print("Existing detection rules:")
|
|
42
|
+
for rule in client.list_rules():
|
|
43
|
+
print(f" - {rule.metadata.name}")
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
### Creating a Datasource
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
from dasl_client import DataSource, Schedule, BronzeSpec, SilverSpec
|
|
50
|
+
|
|
51
|
+
# Create a new datasource
|
|
52
|
+
datasource = Datasource(
|
|
53
|
+
source="aws",
|
|
54
|
+
source_type="cloudtrail",
|
|
55
|
+
autoloader=Autoloader(
|
|
56
|
+
enabled=True,
|
|
57
|
+
schedule=Schedule(
|
|
58
|
+
at_least_every="1h",
|
|
59
|
+
enabled=True
|
|
60
|
+
)
|
|
61
|
+
),
|
|
62
|
+
bronze=BronzeSpec(
|
|
63
|
+
bronze_table="security_logs_bronze",
|
|
64
|
+
skip_bronze_loading=False
|
|
65
|
+
),
|
|
66
|
+
silver=SilverSpec(
|
|
67
|
+
# Configure silver layer here, see the API reference for more details
|
|
68
|
+
),
|
|
69
|
+
gold=GoldSpec(
|
|
70
|
+
# Configure gold layer here, see the API reference for more details
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Create the datasource
|
|
75
|
+
created_datasource = client.create_datasource(datasource)
|
|
76
|
+
print(f"Created datasource: {created.metadata.name}")
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Creating a Detection Rule
|
|
80
|
+
|
|
81
|
+
```python
|
|
82
|
+
from dasl_client.types import Rule, Schedule
|
|
83
|
+
|
|
84
|
+
# Create a new detection rule to detect failed logins
|
|
85
|
+
rule = Rule(
|
|
86
|
+
schedule=Schedule(
|
|
87
|
+
at_least_every="2h",
|
|
88
|
+
enabled=True,
|
|
89
|
+
),
|
|
90
|
+
input=Rule.Input(
|
|
91
|
+
stream=Rule.Input.Stream(
|
|
92
|
+
tables=[
|
|
93
|
+
Rule.Input.Stream.Table(name="http_activity"),
|
|
94
|
+
],
|
|
95
|
+
filter="disposition = 'Blocked'",
|
|
96
|
+
starting_timestamp=datetime(2025, 7, 8, 16, 47, 30),
|
|
97
|
+
),
|
|
98
|
+
),
|
|
99
|
+
output=Rule.Output(
|
|
100
|
+
summary="record was blocked",
|
|
101
|
+
),
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
try:
|
|
105
|
+
created_rule = client.create_rule("Detect Blocked HTTP Activity", rule)
|
|
106
|
+
print(f"Successfully created rule: {created_rule.metadata.name}")
|
|
107
|
+
except Exception as e:
|
|
108
|
+
print(f"Error creating rule: {e}")
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Requirements
|
|
112
|
+
|
|
113
|
+
- Python 3.8+
|
|
114
|
+
- Access to a Databricks workspace with DASL enabled
|
|
115
|
+
- `databricks-sdk>=0.41.0`
|
|
116
|
+
- `pydantic>=2`
|
|
117
|
+
|
|
118
|
+
## Documentation
|
|
119
|
+
|
|
120
|
+
For complete DASL Client documentation, examples, and API reference:
|
|
121
|
+
|
|
122
|
+
- [DASL Client Documentation](https://antimatter-dasl-client.readthedocs-hosted.com/)
|
|
123
|
+
- [API Reference](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/api-reference/)
|
|
124
|
+
- [Quickstart Guide](https://antimatter-dasl-client.readthedocs-hosted.com/en/latest/quickstart.html)
|
|
125
|
+
|
|
126
|
+
## Support
|
|
127
|
+
|
|
128
|
+
- **Email**: support@antimatter.io
|
|
129
|
+
- **Documentation**: [DASL Documentation](https://docs.sl.antimatter.io)
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import abc
|
|
2
|
+
import base64
|
|
2
3
|
import time
|
|
3
4
|
from datetime import datetime
|
|
4
5
|
|
|
@@ -11,7 +12,8 @@ from dasl_api import (
|
|
|
11
12
|
from databricks.sdk.errors import ResourceDoesNotExist
|
|
12
13
|
|
|
13
14
|
from dasl_client.conn.conn import get_base_conn
|
|
14
|
-
from dasl_client.errors.errors import
|
|
15
|
+
from dasl_client.errors.errors import error_handler
|
|
16
|
+
|
|
15
17
|
from databricks.sdk import WorkspaceClient
|
|
16
18
|
from typing import Optional
|
|
17
19
|
|
|
@@ -66,23 +68,26 @@ class ServiceAccountKeyAuth(Authorization):
|
|
|
66
68
|
"""
|
|
67
69
|
return self._workspace
|
|
68
70
|
|
|
69
|
-
@handle_errors
|
|
70
71
|
def refresh(self):
|
|
71
72
|
"""
|
|
72
73
|
A helper function to refresh the bearer token used for authentication.
|
|
73
74
|
:return:
|
|
74
75
|
"""
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
76
|
+
workspace_url = f"https://{self._workspace}"
|
|
77
|
+
host = self._client.configuration.host
|
|
78
|
+
|
|
79
|
+
with error_handler(workspace_url=workspace_url, host=host):
|
|
80
|
+
req = WorkspaceV1AuthenticateRequest(
|
|
81
|
+
service_account_key=self._service_account_key
|
|
82
|
+
)
|
|
83
|
+
handler = api.WorkspaceV1Api(api_client=self._client)
|
|
79
84
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
85
|
+
resp = handler.workspace_v1_authenticate(
|
|
86
|
+
workspace=self._workspace, workspace_v1_authenticate_request=req
|
|
87
|
+
)
|
|
88
|
+
self._client.set_default_header("Authorization", f"Bearer {resp.token}")
|
|
89
|
+
verification = api.DbuiV1Api(self._client).dbui_v1_verify_auth()
|
|
90
|
+
self.expiry = verification.expiry
|
|
86
91
|
|
|
87
92
|
|
|
88
93
|
class DatabricksTokenAuth(Authorization):
|
|
@@ -115,23 +120,26 @@ class DatabricksTokenAuth(Authorization):
|
|
|
115
120
|
"""
|
|
116
121
|
return self._workspace
|
|
117
122
|
|
|
118
|
-
@handle_errors
|
|
119
123
|
def refresh(self):
|
|
120
124
|
"""
|
|
121
125
|
A helper function to refresh the bearer token used for authentication.
|
|
122
126
|
:return:
|
|
123
127
|
"""
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
+
workspace_url = f"https://{self._workspace}"
|
|
129
|
+
host = self._client.configuration.host
|
|
130
|
+
|
|
131
|
+
with error_handler(workspace_url=workspace_url, host=host):
|
|
132
|
+
req = WorkspaceV1AuthenticateRequest(
|
|
133
|
+
databricks_api_token=self._databricks_token
|
|
134
|
+
)
|
|
135
|
+
handler = api.WorkspaceV1Api(api_client=self._client)
|
|
128
136
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
137
|
+
resp = handler.workspace_v1_authenticate(
|
|
138
|
+
workspace=self._workspace, workspace_v1_authenticate_request=req
|
|
139
|
+
)
|
|
140
|
+
self._client.set_default_header("Authorization", f"Bearer {resp.token}")
|
|
141
|
+
verification = api.DbuiV1Api(self._client).dbui_v1_verify_auth()
|
|
142
|
+
self.expiry = verification.expiry
|
|
135
143
|
|
|
136
144
|
|
|
137
145
|
class DatabricksSecretAuth(Authorization):
|
|
@@ -163,44 +171,47 @@ class DatabricksSecretAuth(Authorization):
|
|
|
163
171
|
"""
|
|
164
172
|
return self._workspace
|
|
165
173
|
|
|
166
|
-
@handle_errors
|
|
167
174
|
def refresh(self):
|
|
168
175
|
"""
|
|
169
176
|
A helper function to refresh the bearer token used for authentication.
|
|
170
177
|
:return:
|
|
171
178
|
"""
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
179
|
+
workspace_url = f"https://{self._workspace}"
|
|
180
|
+
host = self._client.configuration.host
|
|
181
|
+
|
|
182
|
+
with error_handler(workspace_url=workspace_url, host=host):
|
|
183
|
+
# First we do a pre-authenticate call to refresh the secret. It doesn't really matter if we race
|
|
184
|
+
# here with others, as long as the secret ends up with a recent value in it. The secret can be used
|
|
185
|
+
# more than once, but it does expire
|
|
186
|
+
req = WorkspaceV1RequestSecretRequest(
|
|
187
|
+
principalName=self._principal,
|
|
188
|
+
)
|
|
189
|
+
handler = api.WorkspaceV1Api(api_client=self._client)
|
|
190
|
+
resp = handler.workspace_v1_request_secret(
|
|
191
|
+
workspace=self._workspace, workspace_v1_request_secret_request=req
|
|
192
|
+
)
|
|
193
|
+
secret_name = resp.secret_name
|
|
194
|
+
secret_value = ""
|
|
195
|
+
for tries in range(3):
|
|
196
|
+
try:
|
|
197
|
+
secret_value = (
|
|
198
|
+
WorkspaceClient().secrets.get_secret(secret_name, "token").value
|
|
199
|
+
)
|
|
200
|
+
break
|
|
201
|
+
except ResourceDoesNotExist:
|
|
202
|
+
# Maybe there is a race here, let's retry
|
|
203
|
+
time.sleep(0.5)
|
|
204
|
+
if len(secret_value) == 0:
|
|
205
|
+
raise RuntimeError(f"failed to complete secret auth")
|
|
206
|
+
|
|
207
|
+
req = WorkspaceV1AuthenticateRequest(
|
|
208
|
+
databricks_secret=base64.b64decode(secret_value).decode("utf-8"),
|
|
209
|
+
)
|
|
210
|
+
handler = api.WorkspaceV1Api(api_client=self._client)
|
|
211
|
+
|
|
212
|
+
resp = handler.workspace_v1_authenticate(
|
|
213
|
+
workspace=self._workspace, workspace_v1_authenticate_request=req
|
|
214
|
+
)
|
|
215
|
+
self._client.set_default_header("Authorization", f"Bearer {resp.token}")
|
|
216
|
+
verification = api.DbuiV1Api(self._client).dbui_v1_verify_auth()
|
|
217
|
+
self.expiry = verification.expiry
|