dasl-client 1.0.22__py3-none-any.whl → 1.0.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dasl-client might be problematic. Click here for more details.

@@ -32,6 +32,7 @@ from dasl_api import (
32
32
  CoreV1DataSourceFieldUtils,
33
33
  CoreV1DataSourceFieldUtilsUnreferencedColumns,
34
34
  CoreV1DataSourceFieldUtilsJsonExtractInner,
35
+ CoreV1DataSourcePrimaryKeySpec,
35
36
  )
36
37
 
37
38
  from .helpers import Helpers
@@ -354,6 +355,11 @@ class BronzeSpec(BaseModel):
354
355
  The name of the bronze table to create and hold the imported data.
355
356
  skip_bronze_loading (Optional[bool]):
356
357
  Indicates whether to skip the bronze loading step.
358
+ load_as_single_variant (Optional[bool]):
359
+ Indicates whether to ingest data into a single VARIANT-typed column called `data`
360
+ pre_transform (Optional[List[List[str]]]):
361
+ A list of pre-transform steps to execute.
362
+ The outer list form stages and the inner list contains SQL select expressions to be executed within each stage
357
363
  """
358
364
 
359
365
  class Clustering(BaseModel):
@@ -391,6 +397,8 @@ class BronzeSpec(BaseModel):
391
397
  clustering: Optional["BronzeSpec.Clustering"] = None
392
398
  bronze_table: Optional[str] = None
393
399
  skip_bronze_loading: Optional[bool] = None
400
+ load_as_single_variant: Optional[bool] = None
401
+ pre_transform: Optional[List[List[str]]] = None
394
402
 
395
403
  @staticmethod
396
404
  def from_api_obj(obj: Optional[CoreV1DataSourceSpecBronze]) -> "BronzeSpec":
@@ -400,6 +408,8 @@ class BronzeSpec(BaseModel):
400
408
  clustering=BronzeSpec.Clustering.from_api_obj(obj.clustering),
401
409
  bronze_table=obj.bronze_table,
402
410
  skip_bronze_loading=obj.skip_bronze_loading,
411
+ load_as_single_variant=obj.load_as_single_variant,
412
+ pre_transform=obj.pre_transform,
403
413
  )
404
414
 
405
415
  def to_api_obj(self) -> CoreV1DataSourceSpecBronze:
@@ -407,6 +417,8 @@ class BronzeSpec(BaseModel):
407
417
  clustering=Helpers.maybe(lambda o: o.to_api_obj(), self.clustering),
408
418
  bronze_table=self.bronze_table,
409
419
  skip_bronze_loading=self.skip_bronze_loading,
420
+ load_as_single_variant=self.load_as_single_variant,
421
+ pre_transform=self.pre_transform,
410
422
  )
411
423
 
412
424
 
@@ -1193,12 +1205,16 @@ class DataSource(BaseModel):
1193
1205
  The schedule for data ingestion.
1194
1206
  custom (Optional[DataSource.CustomNotebook]):
1195
1207
  A custom notebook for the datasource.
1208
+ primary_key (Optional[PrimaryKey]):
1209
+ Primary key configuration of the datasource.
1196
1210
  use_preset (Optional[str]):
1197
1211
  The name of the preset to use for this data source.
1198
1212
  autoloader (Optional[DataSource.Autoloader]):
1199
1213
  Autoloader configuration.
1200
1214
  bronze (Optional[BronzeSpec]):
1201
1215
  Bronze table configuration.
1216
+ compute_mode (Optional[str]):
1217
+ The compute mode to use for this datasource's job.
1202
1218
  silver (Optional[SilverSpec]):
1203
1219
  Silver transformation configuration.
1204
1220
  gold (Optional[GoldSpec]):
@@ -1233,6 +1249,35 @@ class DataSource(BaseModel):
1233
1249
  notebook=self.notebook,
1234
1250
  )
1235
1251
 
1252
+ class PrimaryKey(BaseModel):
1253
+ """
1254
+ PrimaryKey configuration for DataSource
1255
+
1256
+ Attributes:
1257
+ time_column (str): column name used as timestamp portion of the sortable synthetic key
1258
+ additionalColumns (List[str]): list of columns to compute hashkey over
1259
+ """
1260
+
1261
+ time_column: str
1262
+ additional_columns: List[str]
1263
+
1264
+ @staticmethod
1265
+ def from_api_obj(
1266
+ obj: Optional[CoreV1DataSourcePrimaryKeySpec],
1267
+ ) -> "DataSource.PrimaryKey":
1268
+ if obj is None:
1269
+ return None
1270
+ return DataSource.PrimaryKey(
1271
+ time_column=obj.time_column,
1272
+ additional_columns=obj.additional_columns,
1273
+ )
1274
+
1275
+ def to_api_obj(self) -> CoreV1DataSourcePrimaryKeySpec:
1276
+ return CoreV1DataSourcePrimaryKeySpec(
1277
+ timeColumn=self.time_column,
1278
+ additionalColumns=self.additional_columns,
1279
+ )
1280
+
1236
1281
  class Autoloader(BaseModel):
1237
1282
  """
1238
1283
  Autoloader configuration for the DataSource.
@@ -1310,9 +1355,11 @@ class DataSource(BaseModel):
1310
1355
  source_type: Optional[str] = None
1311
1356
  schedule: Optional[Schedule] = None
1312
1357
  custom: Optional["DataSource.CustomNotebook"] = None
1358
+ primary_key: Optional["DataSource.PrimaryKey"] = None
1313
1359
  use_preset: Optional[str] = None
1314
1360
  use_preset_version: Optional[int] = None
1315
1361
  autoloader: Optional["DataSource.Autoloader"] = None
1362
+ compute_mode: Optional[str] = None
1316
1363
  bronze: Optional[BronzeSpec] = None
1317
1364
  silver: Optional[SilverSpec] = None
1318
1365
  gold: Optional[GoldSpec] = None
@@ -1326,9 +1373,11 @@ class DataSource(BaseModel):
1326
1373
  source_type=obj.spec.source_type,
1327
1374
  schedule=Schedule.from_api_obj(obj.spec.schedule),
1328
1375
  custom=DataSource.CustomNotebook.from_api_obj(obj.spec.custom),
1376
+ primary_key=DataSource.PrimaryKey.from_api_obj(obj.spec.primary_key),
1329
1377
  use_preset=obj.spec.use_preset,
1330
1378
  use_preset_version=obj.spec.use_preset_version,
1331
1379
  autoloader=DataSource.Autoloader.from_api_obj(obj.spec.autoloader),
1380
+ compute_mode=obj.spec.compute_mode,
1332
1381
  bronze=BronzeSpec.from_api_obj(obj.spec.bronze),
1333
1382
  silver=SilverSpec.from_api_obj(obj.spec.silver),
1334
1383
  gold=GoldSpec.from_api_obj(obj.spec.gold),
@@ -1346,9 +1395,11 @@ class DataSource(BaseModel):
1346
1395
  source_type=self.source_type,
1347
1396
  schedule=Helpers.maybe(to_api_obj, self.schedule),
1348
1397
  custom=Helpers.maybe(to_api_obj, self.custom),
1398
+ primary_key=Helpers.maybe(to_api_obj, self.primary_key),
1349
1399
  use_preset=self.use_preset,
1350
1400
  use_preset_version=self.use_preset_version,
1351
1401
  autoloader=Helpers.maybe(to_api_obj, self.autoloader),
1402
+ compute_mode=self.compute_mode,
1352
1403
  bronze=Helpers.maybe(to_api_obj, self.bronze),
1353
1404
  silver=Helpers.maybe(to_api_obj, self.silver),
1354
1405
  gold=Helpers.maybe(to_api_obj, self.gold),
dasl_client/types/rule.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from pydantic import BaseModel
2
2
  from typing import Dict, List, Optional, Union
3
+ from datetime import datetime, timezone
3
4
 
4
5
  from dasl_api import (
5
6
  CoreV1Rule,
@@ -319,9 +320,25 @@ class Rule(BaseModel):
319
320
 
320
321
  Attributes:
321
322
  tables (Optional[List[Rule.Input.Stream.Table]]):
323
+ List of input tables and join rules.
322
324
  filter (Optional[str]):
325
+ A filter expression to be applied to the input stream.
326
+ Note that this cannot be used in conjunction with a
327
+ custom SQL expression (i.e. the sql field).
323
328
  sql (Optional[str]):
329
+ A custom SQL expression to apply to the input stream
330
+ before matching. Note that this cannot be used in
331
+ conjunction with a filter expression (i.e. the
332
+ filter member).
324
333
  custom (Optional[Rule.Input.CustomStream]):
334
+ starting_timestamp (Optional[datetime]):
335
+ Starting timestamp for streaming input data. If this
336
+ value is not specified, then the timestamp when this rule
337
+ was created will be used. This setting is used to determine
338
+ the starting point for streaming historical data, and only
339
+ applies on the first run of the rule. Once some data has
340
+ been streamed and a checkpoint has been created, this
341
+ setting no longer has any impact.
325
342
  """
326
343
 
327
344
  class Table(BaseModel):
@@ -427,6 +444,7 @@ class Rule(BaseModel):
427
444
  filter: Optional[str] = None
428
445
  sql: Optional[str] = None
429
446
  custom: Optional["Rule.Input.CustomStream"] = None
447
+ starting_timestamp: Optional[datetime] = None
430
448
 
431
449
  @staticmethod
432
450
  def from_api_obj(
@@ -440,22 +458,37 @@ class Rule(BaseModel):
440
458
  Rule.Input.Stream.Table.from_api_obj(item)
441
459
  for item in obj.tables
442
460
  ]
461
+
462
+ starting_timestamp = obj.starting_timestamp
463
+ if starting_timestamp is not None and starting_timestamp.tzinfo is None:
464
+ starting_timestamp = starting_timestamp.replace(tzinfo=timezone.utc)
465
+
443
466
  return Rule.Input.Stream(
444
467
  tables=tables,
445
468
  filter=obj.filter,
446
469
  sql=obj.sql,
447
470
  custom=Rule.Input.CustomStream.from_api_obj(obj.custom),
471
+ starting_timestamp=starting_timestamp,
448
472
  )
449
473
 
450
474
  def to_api_obj(self) -> CoreV1RuleSpecInputStream:
451
475
  tables = None
452
476
  if self.tables is not None:
453
477
  tables = [item.to_api_obj() for item in self.tables]
478
+
479
+ # tzinfo must be attached to the starting timestamp or else
480
+ # the serialization (without trailing time zone) will be
481
+ # rejected by the server.
482
+ starting_timestamp = self.starting_timestamp
483
+ if starting_timestamp is not None and starting_timestamp.tzinfo is None:
484
+ starting_timestamp = starting_timestamp.replace(tzinfo=timezone.utc)
485
+
454
486
  return CoreV1RuleSpecInputStream(
455
487
  tables=tables,
456
488
  filter=self.filter,
457
489
  sql=self.sql,
458
490
  custom=Helpers.maybe(lambda o: o.to_api_obj(), self.custom),
491
+ starting_timestamp=starting_timestamp,
459
492
  )
460
493
 
461
494
  class Batch(BaseModel):
@@ -8,19 +8,21 @@ from dasl_api import (
8
8
  WorkspaceV1ExportConfigWebhookConfigDestination,
9
9
  WorkspaceV1WorkspaceConfig,
10
10
  WorkspaceV1WorkspaceConfigSpec,
11
+ WorkspaceV1WorkspaceConfigSpecDatasources,
11
12
  WorkspaceV1WorkspaceConfigSpecDefaultConfig,
12
13
  WorkspaceV1WorkspaceConfigSpecDetectionRuleMetadata,
13
14
  WorkspaceV1WorkspaceConfigSpecManagedRetentionInner,
14
15
  WorkspaceV1WorkspaceConfigSpecManagedRetentionInnerOverridesInner,
15
16
  WorkspaceV1WorkspaceConfigSpecObservables,
16
17
  WorkspaceV1WorkspaceConfigSpecObservablesKindsInner,
18
+ WorkspaceV1WorkspaceConfigSpecRules,
17
19
  WorkspaceV1WorkspaceConfigSpecSystemTablesConfig,
18
20
  WorkspaceV1DefaultConfig,
19
21
  WorkspaceV1DefaultConfigComputeGroupOverridesValue,
20
22
  )
21
23
 
22
24
  from .helpers import Helpers
23
- from .types import Metadata, ResourceStatus
25
+ from .types import Metadata, ResourceStatus, Schedule
24
26
 
25
27
 
26
28
  class ExportConfig(BaseModel):
@@ -238,12 +240,96 @@ class WorkspaceConfigObservables(BaseModel):
238
240
  )
239
241
 
240
242
 
243
+ class DatasourcesConfig(BaseModel):
244
+ """
245
+ Configuration settings used by Datasources.
246
+
247
+ Attributes:
248
+ bronze_schema (Optional[str]):
249
+ Name of the bronze schema in the catalog.
250
+ silver_schema (Optional[str]):
251
+ Name of the silver schema in the catalog.
252
+ gold_schema (Optional[str]):
253
+ Name of the gold schema in the catalog.
254
+ catalog_name (Optional[str]):
255
+ The catalog name to use as the resource's default.
256
+ checkpoint_location (Optional[str]):
257
+ The base checkpoint location to use in Rule notebooks.
258
+ default_compute_mode (Optional[str]):
259
+ The default compute mode to use for datasource jobs.
260
+ """
261
+
262
+ catalog_name: Optional[str] = None
263
+ bronze_schema: Optional[str] = None
264
+ silver_schema: Optional[str] = None
265
+ gold_schema: Optional[str] = None
266
+ checkpoint_location: Optional[str] = None
267
+ default_compute_mode: Optional[str] = None
268
+
269
+ @staticmethod
270
+ def from_api_obj(
271
+ obj: Optional[WorkspaceV1WorkspaceConfigSpecDatasources],
272
+ ) -> Optional["DatasourcesConfig"]:
273
+ if obj is None:
274
+ return None
275
+
276
+ return DatasourcesConfig(
277
+ catalog_name=obj.catalog_name,
278
+ bronze_schema=obj.bronze_schema,
279
+ silver_schema=obj.silver_schema,
280
+ gold_schema=obj.gold_schema,
281
+ checkpoint_location=obj.checkpoint_location,
282
+ default_compute_mode=obj.default_compute_mode,
283
+ )
284
+
285
+ def to_api_obj(self) -> WorkspaceV1WorkspaceConfigSpecDatasources:
286
+ return WorkspaceV1WorkspaceConfigSpecDatasources(
287
+ catalog_name=self.catalog_name,
288
+ bronze_schema=self.bronze_schema,
289
+ silver_schema=self.silver_schema,
290
+ gold_schema=self.gold_schema,
291
+ checkpoint_location=self.checkpoint_location,
292
+ default_compute_mode=self.default_compute_mode,
293
+ )
294
+
295
+
296
+ class RulesConfig(BaseModel):
297
+ """
298
+ Configuration settings used by Rules.
299
+
300
+ Attributes:
301
+ checkpoint_location (Optional[str]):
302
+ The location to store checkpoints for streaming writes. If
303
+ not provided, the daslStoragePath will be used.
304
+ """
305
+
306
+ checkpoint_location: Optional[str] = None
307
+
308
+ @staticmethod
309
+ def from_api_obj(
310
+ obj: Optional[WorkspaceV1WorkspaceConfigSpecRules],
311
+ ) -> "RulesConfig":
312
+ if obj is None:
313
+ return None
314
+
315
+ return RulesConfig(
316
+ checkpoint_location=obj.checkpoint_location,
317
+ )
318
+
319
+ def to_api_obj(self) -> WorkspaceV1WorkspaceConfigSpecRules:
320
+ return WorkspaceV1WorkspaceConfigSpecRules(
321
+ checkpoint_location=self.checkpoint_location,
322
+ )
323
+
324
+
241
325
  class DefaultConfig(BaseModel):
242
326
  """
243
- Configuration of the schemas, notebook storage locations, checkpoint
244
- storage locations, and so forth, for each concrete resource type and
245
- a global fallback that applies to resources which do not have a
246
- specified DefaultConfig.
327
+ (DEPRECATED) Configuration of the schemas, notebook storage locations,
328
+ checkpoint storage locations, and so forth, for each concrete resource
329
+ type and a global fallback that applies to resources which do not have a
330
+ specified DefaultConfig. While it does still work, this field is
331
+ deprecated and should not be used; see DatasourcesConfig and RulesConfig
332
+ for alternatives.
247
333
 
248
334
  Attributes:
249
335
  datasources (Optional[DefaultConfig.Config]):
@@ -369,8 +455,11 @@ class DefaultConfig(BaseModel):
369
455
 
370
456
  @staticmethod
371
457
  def from_api_obj(
372
- obj: WorkspaceV1WorkspaceConfigSpecDefaultConfig,
373
- ) -> "DefaultConfig":
458
+ obj: Optional[WorkspaceV1WorkspaceConfigSpecDefaultConfig],
459
+ ) -> Optional["DefaultConfig"]:
460
+ if obj is None:
461
+ return None
462
+
374
463
  return DefaultConfig(
375
464
  datasources=DefaultConfig.Config.from_api_obj(obj.datasources),
376
465
  transforms=DefaultConfig.Config.from_api_obj(obj.transforms),
@@ -578,9 +667,20 @@ class WorkspaceConfig(BaseModel):
578
667
  state.
579
668
  dasl_custom_presets_path (Optional[str]):
580
669
  An optional path to a directory containing user defined presets.
670
+ default_rule_schedule (Optional[str]):
671
+ A default schedule for detections. If a detection is created without a schedule,
672
+ it will inherit the schedule provided here. Note that, should this schedule be updated,
673
+ it will affect all detections inheriting it.
581
674
  default_config (Optional[DefaultConfig]):
582
- Configuration settings regarding storage of bronze, silver, and
583
- gold tables and related assets for each resource type.
675
+ (DEPRECATED) Configuration settings regarding storage of bronze,
676
+ silver, and gold tables and related assets for each resource type.
677
+ default_custom_notebook_location (Optional[str]):
678
+ The storage location for custom user-provided notebooks. Also
679
+ used as the prefix for relative paths to custom notebooks.
680
+ datasources (Optional[DatasourcesConfig]):
681
+ Configuration items that apply specifically to datasources.
682
+ rules (Optional[RulesConfig]):
683
+ Configuration items that apply specifically to rules.
584
684
  managed_retention (Optional[List[ManagedRetention]]):
585
685
  Configuration of regular cleanup (i.e. pruning) jobs for various
586
686
  catalogs, schemas, and tables.
@@ -597,7 +697,11 @@ class WorkspaceConfig(BaseModel):
597
697
  observables: Optional[WorkspaceConfigObservables] = None
598
698
  dasl_storage_path: Optional[str] = None
599
699
  dasl_custom_presets_path: Optional[str] = None
700
+ default_rule_schedule: Optional[Schedule] = None
600
701
  default_config: Optional[DefaultConfig] = None
702
+ default_custom_notebook_location: Optional[str] = None
703
+ datasources: Optional[DatasourcesConfig] = None
704
+ rules: Optional[RulesConfig] = None
601
705
  managed_retention: Optional[List[ManagedRetention]] = None
602
706
  status: Optional[ResourceStatus] = None
603
707
 
@@ -627,7 +731,11 @@ class WorkspaceConfig(BaseModel):
627
731
  observables=WorkspaceConfigObservables.from_api_obj(spec.observables),
628
732
  dasl_storage_path=spec.dasl_storage_path,
629
733
  dasl_custom_presets_path=spec.dasl_custom_presets_path,
734
+ default_rule_schedule=spec.default_rule_schedule,
630
735
  default_config=DefaultConfig.from_api_obj(spec.default_config),
736
+ default_custom_notebook_location=spec.default_custom_notebook_location,
737
+ datasources=DatasourcesConfig.from_api_obj(spec.datasources),
738
+ rules=RulesConfig.from_api_obj(spec.rules),
631
739
  managed_retention=managed_retention,
632
740
  status=ResourceStatus.from_api_obj(obj.status),
633
741
  )
@@ -657,7 +765,11 @@ class WorkspaceConfig(BaseModel):
657
765
  observables=Helpers.maybe(to_api_obj, self.observables),
658
766
  dasl_storage_path=self.dasl_storage_path,
659
767
  dasl_custom_presets_path=self.dasl_custom_presets_path,
768
+ default_rule_schedule=self.default_rule_schedule,
660
769
  default_config=Helpers.maybe(to_api_obj, self.default_config),
770
+ default_custom_notebook_location=self.default_custom_notebook_location,
771
+ datasources=Helpers.maybe(to_api_obj, self.datasources),
772
+ rules=Helpers.maybe(to_api_obj, self.rules),
661
773
  managed_retention=managed_retention,
662
774
  ),
663
775
  status=Helpers.maybe(to_api_obj, self.status),
@@ -0,0 +1,18 @@
1
+ Metadata-Version: 2.4
2
+ Name: dasl_client
3
+ Version: 1.0.24
4
+ Summary: The DASL client library used for interacting with the DASL workspace
5
+ Home-page: https://github.com/antimatter/asl
6
+ Author: Antimatter Team
7
+ Author-email: Antimatter Team <support@antimatter.io>
8
+ Requires-Python: >=3.8
9
+ Description-Content-Type: text/markdown
10
+ License-File: LICENSE
11
+ Requires-Dist: dasl_api==0.1.24
12
+ Requires-Dist: databricks-sdk>=0.41.0
13
+ Requires-Dist: pydantic>=2
14
+ Requires-Dist: typing_extensions>=4.10.0
15
+ Dynamic: author
16
+ Dynamic: home-page
17
+ Dynamic: license-file
18
+ Dynamic: requires-python
@@ -0,0 +1,32 @@
1
+ dasl_client/__init__.py,sha256=MuaH74tnEHwfymHtlK6GqeMHRRWaSDfS6hSzbMrd7iQ,150
2
+ dasl_client/client.py,sha256=08QU-XfEUmpH-LmkI4CdZkZi8JIhRvReCwRAAiAfm8E,36541
3
+ dasl_client/exec_rule.py,sha256=kn-Yo-9L0fjxbulyAghiIKO1SYcqv2XHZn45F8FvUzE,3599
4
+ dasl_client/helpers.py,sha256=aXqI3uM4k9T3qTXSXVHZKtwxSlzTZ-vtvUfWnqizmiE,1091
5
+ dasl_client/regions.json,sha256=PyMfnvPlpTSAP-a97FFP-yc-BrJdQWg_wVd8Sre12Z4,54
6
+ dasl_client/regions.py,sha256=dHYW_IuxFQXkwdbXCeQWMRaYYxyF_j6eEglmfIXUIZo,442
7
+ dasl_client/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ dasl_client/auth/auth.py,sha256=yTeijYYpfJVJ_wYyq0U6kAntg4xz5MzIR37_CpVR57k,7277
9
+ dasl_client/conn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ dasl_client/conn/client_identifier.py,sha256=kPrX0wPa6y7ifhKSb6dQriDSlIgPOUhBq7OoC73E7NU,657
11
+ dasl_client/conn/conn.py,sha256=7o-2qeoVhzULQGW5l6OLYE5dZ60_8OU080ebV_-AW9Q,1433
12
+ dasl_client/errors/__init__.py,sha256=lpH2HGF5kCRTk6MxpPEyY9ulTvsLBFKb4NnLuFFLZZA,40
13
+ dasl_client/errors/errors.py,sha256=u-B8dR8zlxdNVeEdHi6UozX178jwJJ5ZJOGl9YjONRc,4008
14
+ dasl_client/preset_development/__init__.py,sha256=9yC4gmQfombvYLThzo0pSfT5JMolfNVWFVQIuIg_XUA,131
15
+ dasl_client/preset_development/errors.py,sha256=9tLYugMWH2oGxpibKPEVOnRWUuvo-ZS_wVEQuSY6YX4,7013
16
+ dasl_client/preset_development/preview_engine.py,sha256=Y3NHYWpK0e8AWdwgTeDA2S5Su5CTPZSwFyNnkXXjAes,18031
17
+ dasl_client/preset_development/preview_parameters.py,sha256=aZrpCkB2JrqZCp7Lqb2Tz5DMJOFc0qDMdmDxAI5tv98,20183
18
+ dasl_client/preset_development/stage.py,sha256=z2DJmvdnxa-i-TS1yIDdS0rB-QrxQaxspTBm8LfmlZE,26577
19
+ dasl_client/types/__init__.py,sha256=GsXC3eWuv21VTLPLPH9pzM95JByaKnKrPjJkh2rlZfQ,170
20
+ dasl_client/types/admin_config.py,sha256=Kmx3Kuai9_LWMeO2NpWasRUgLihYSEXGtuYVfG0FkjU,2200
21
+ dasl_client/types/content.py,sha256=HegSq2i7w78CH4kq4XwDyeaB8LuZWWwFZkg0tYTY8d0,7349
22
+ dasl_client/types/datasource.py,sha256=GiTxkwsofPBZIVtiInmtVnnPnmxstlUSQtXP61FcgpE,55094
23
+ dasl_client/types/dbui.py,sha256=k2WXNjfrEjXa-5iBlZ17pvFAs_jgbd-ir5NJl5sXvpA,16097
24
+ dasl_client/types/helpers.py,sha256=gLGTvrssAKrdkQT9h80twEosld2egwhvj-zAudxWFPs,109
25
+ dasl_client/types/rule.py,sha256=L19QLg07UWRayw9HYuVsSVCUw_LaGnty_9WqPZ6nHdE,27755
26
+ dasl_client/types/types.py,sha256=DeUOfdYGOhUGEy7yKOfo0OYTXYRrs57yYgNLUbu7Tlc,8806
27
+ dasl_client/types/workspace_config.py,sha256=w0f0paZ42yURiIFREJ6fG7zKaoIT7BU-Zw3nZMuFNMw,29426
28
+ dasl_client-1.0.24.dist-info/licenses/LICENSE,sha256=M35UepUPyKmFkvENlkweeaMElheQqNoM5Emh8ADO-rs,4
29
+ dasl_client-1.0.24.dist-info/METADATA,sha256=J2cWOcCXDSjKOo35RUKkLM2qfuwiuKb3_oPO8hj5mY0,562
30
+ dasl_client-1.0.24.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
31
+ dasl_client-1.0.24.dist-info/top_level.txt,sha256=kIv8ox_2oJPjGB8_yuey5vvuPCyfY8kywG138f9oSOY,12
32
+ dasl_client-1.0.24.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,34 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: dasl_client
3
- Version: 1.0.22
4
- Summary: The DASL client library used for interacting with the DASL workspace
5
- Home-page: https://github.com/antimatter/asl
6
- Author: Antimatter Team
7
- Author-email: Antimatter Team <support@antimatter.io>
8
- Requires-Python: >=3.8
9
- Description-Content-Type: text/markdown
10
- License-File: LICENSE
11
- Requires-Dist: dasl-api ==0.1.20
12
- Requires-Dist: databricks-sdk >=0.41.0
13
- Requires-Dist: pydantic >=2
14
- Requires-Dist: typing-extensions >=4.10.0
15
-
16
- # DASL Client Library
17
-
18
- This client library is used for interacting with the DASL services in python.
19
-
20
- ## Requirements
21
- Python:
22
- - wheel
23
- - setuptools
24
- - asl_api
25
-
26
- Other:
27
- - Earthly
28
-
29
- ## Build
30
-
31
- To build manually here:
32
- ```bash
33
- python setup.py sdist bdist_wheel
34
- ```
@@ -1,36 +0,0 @@
1
- dasl_client/__init__.py,sha256=E6gOgO8qg96Y38JKA-4LyNBvc2ytQPEfhdniYsCWBxA,127
2
- dasl_client/client.py,sha256=yNXomAWzvMWdbKuomVsTh_VSx9WwZiXVJ3Mq8bbpqew,27821
3
- dasl_client/helpers.py,sha256=L7ycxrqyG28glRRGZgsrVBdCJzXYCW7DB0hAvupGMuA,1118
4
- dasl_client/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- dasl_client/auth/auth.py,sha256=yTeijYYpfJVJ_wYyq0U6kAntg4xz5MzIR37_CpVR57k,7277
6
- dasl_client/conn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- dasl_client/conn/client_identifier.py,sha256=kPrX0wPa6y7ifhKSb6dQriDSlIgPOUhBq7OoC73E7NU,657
8
- dasl_client/conn/conn.py,sha256=7o-2qeoVhzULQGW5l6OLYE5dZ60_8OU080ebV_-AW9Q,1433
9
- dasl_client/errors/__init__.py,sha256=lpH2HGF5kCRTk6MxpPEyY9ulTvsLBFKb4NnLuFFLZZA,40
10
- dasl_client/errors/errors.py,sha256=u-B8dR8zlxdNVeEdHi6UozX178jwJJ5ZJOGl9YjONRc,4008
11
- dasl_client/preset_development/__init__.py,sha256=9yC4gmQfombvYLThzo0pSfT5JMolfNVWFVQIuIg_XUA,131
12
- dasl_client/preset_development/errors.py,sha256=jsqBFMZtl7uHi6O9bBHnOt0UQ4WM9KN9x0uYtf5c268,5482
13
- dasl_client/preset_development/preview_engine.py,sha256=D8e3Ohds0KtyPec-iWJknh9GvlPbwHufOFF1gtj62kE,15735
14
- dasl_client/preset_development/preview_parameters.py,sha256=YjSJ00mEUcqF5KkJEPW6Wif8I4iaMIMxJeUSuyIS4x0,14640
15
- dasl_client/preset_development/stage.py,sha256=2FPOZvb_bCVpjrY5TsYB05BD4KYbrhgfAe9uZCQFkOk,23397
16
- dasl_client/types/__init__.py,sha256=GsXC3eWuv21VTLPLPH9pzM95JByaKnKrPjJkh2rlZfQ,170
17
- dasl_client/types/admin_config.py,sha256=Kmx3Kuai9_LWMeO2NpWasRUgLihYSEXGtuYVfG0FkjU,2200
18
- dasl_client/types/content.py,sha256=HegSq2i7w78CH4kq4XwDyeaB8LuZWWwFZkg0tYTY8d0,7349
19
- dasl_client/types/datasource.py,sha256=1r55rYHeN8bmTU190dWcpEWPJ64pJoAwOJy_pVcMP8o,52848
20
- dasl_client/types/dbui.py,sha256=k2WXNjfrEjXa-5iBlZ17pvFAs_jgbd-ir5NJl5sXvpA,16097
21
- dasl_client/types/helpers.py,sha256=gLGTvrssAKrdkQT9h80twEosld2egwhvj-zAudxWFPs,109
22
- dasl_client/types/rule.py,sha256=BqhWhT8Eh95UXNytd0PxVcjqYuWQcdN1tfKjUB4Tk74,25781
23
- dasl_client/types/types.py,sha256=DeUOfdYGOhUGEy7yKOfo0OYTXYRrs57yYgNLUbu7Tlc,8806
24
- dasl_client/types/workspace_config.py,sha256=RThg_THS_4leITWdzBPTWdR2ytq5Uk36m6nIOUMzFCM,24878
25
- test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- test/conftest.py,sha256=ZfNULJxVQ609GHxw9UsreTcbQMl3gbcTP_DKT1oySwQ,440
27
- test/constants.py,sha256=ed3xiemWDJVBlHDwn-iQToCbcaXD3AN-5r8HkURCqBs,438
28
- test/test_api_changes.py,sha256=RzLauhCkwLmf_gK5yZZ7R7TI9803XCGr-YCyv_jSc94,3827
29
- test/test_api_surface.py,sha256=nOxoxg9mVSpHLtEDiK98qbAarXsUzC3zTIUZ4e4KLAI,10940
30
- test/test_databricks_secret_auth.py,sha256=P1seBBHOLcCzJPLdRZlJZxeG62GUFKFbjsY8c7gTT_8,3613
31
- test/test_marshaling.py,sha256=DLy5C1lBAon9oD55tzrh98cbcii6OmpTPP4CBm4cvu0,37816
32
- dasl_client-1.0.22.dist-info/LICENSE,sha256=M35UepUPyKmFkvENlkweeaMElheQqNoM5Emh8ADO-rs,4
33
- dasl_client-1.0.22.dist-info/METADATA,sha256=pmJrFcebmhZ5GXiENQrjQoR_aVYmLIoZNBO5xItlaDs,741
34
- dasl_client-1.0.22.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
35
- dasl_client-1.0.22.dist-info/top_level.txt,sha256=943P5S_qILHKZYxAvxPUeqOzM2yV18d5SBVKxzPw2OE,17
36
- dasl_client-1.0.22.dist-info/RECORD,,
test/__init__.py DELETED
File without changes
test/conftest.py DELETED
@@ -1,18 +0,0 @@
1
- import pytest
2
-
3
- from dasl_client import Client
4
-
5
- from .constants import *
6
-
7
-
8
- @pytest.fixture(scope="session")
9
- def api_client():
10
- client = Client.new_workspace(
11
- admin_email="test@antimatter.io",
12
- app_client_id=app_client_id,
13
- service_principal_id=databricks_client_id,
14
- service_principal_secret=databricks_client_secret,
15
- workspace_url=databricks_host,
16
- dasl_host=dasl_host,
17
- )
18
- yield client
test/constants.py DELETED
@@ -1,10 +0,0 @@
1
- import os
2
- from urllib.parse import urlparse
3
-
4
- dasl_host = os.environ["DASL_API_URL"]
5
- databricks_host = os.environ["DASL_DATABRICKS_HOST"]
6
- databricks_client_id = os.environ["DASL_DATABRICKS_CLIENT_ID"]
7
- databricks_client_secret = os.environ["DASL_DATABRICKS_CLIENT_SECRET"]
8
- workspace = urlparse(databricks_host).hostname
9
- app_client_id = "22853b93-68ba-4ae2-8e41-976417f501dd"
10
- alternate_app_client_id = "335ac0d3-e0ea-4732-ba93-0277423b5029"