duckrun 0.2.5.dev4__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
duckrun/core.py CHANGED
@@ -751,7 +751,6 @@ class WorkspaceConnection:
751
751
  lakehouses = response.json().get("value", [])
752
752
  lakehouse_names = [lh.get("displayName", "") for lh in lakehouses]
753
753
 
754
- print(f"Found {len(lakehouse_names)} lakehouses: {lakehouse_names}")
755
754
  return lakehouse_names
756
755
 
757
756
  except Exception as e:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: duckrun
3
- Version: 0.2.5.dev4
3
+ Version: 0.2.7
4
4
  Summary: Lakehouse task runner powered by DuckDB for Microsoft Fabric
5
5
  Author: mim
6
6
  License: MIT
@@ -26,11 +26,10 @@ A helper package for stuff that made my life easier when working with Fabric Pyt
26
26
 
27
27
  **Requirements:**
28
28
  - Lakehouse must have a schema (e.g., `dbo`, `sales`, `analytics`)
29
- - Workspace and lakehouse names cannot contain spaces
29
+ - **Workspace names with spaces are fully supported!** ✅
30
30
 
31
- **Delta Lake Version:** This package uses an older version of deltalake to maintain row size control capabilities, which is crucial for Power BI performance optimization. The newer Rust-based deltalake versions don't yet support the row group size parameters that are essential for optimal DirectLake performance.
32
31
 
33
- **Why no spaces?** Duckrun uses simple name-based paths instead of GUIDs. This keeps the code clean and readable, which is perfect for data engineering workspaces where naming conventions are already well-established. Just use underscores or hyphens instead: `my_workspace` or `my-lakehouse`.
32
+ **Delta Lake Version:** This package uses an older version of deltalake to maintain row size control capabilities, which is crucial for Power BI performance optimization. The newer Rust-based deltalake versions don't yet support the row group size parameters that are essential for optimal DirectLake performance.
34
33
 
35
34
  ## What It Does
36
35
 
@@ -49,23 +48,48 @@ pip install duckrun[local]
49
48
 
50
49
  ## Quick Start
51
50
 
51
+ ### Simple Example for New Users
52
+
53
+ ```python
54
+ import duckrun
55
+
56
+ # Connect to a workspace and manage lakehouses
57
+ con = duckrun.connect('My Workspace')
58
+ con.list_lakehouses() # See what lakehouses exist
59
+ con.create_lakehouse_if_not_exists('data') # Create if needed
60
+
61
+ # Connect to a specific lakehouse and query data
62
+ con = duckrun.connect("My Workspace/data.lakehouse/dbo")
63
+ con.sql("SELECT * FROM my_table LIMIT 10").show()
64
+ ```
65
+
66
+ ### Full Feature Overview
67
+
52
68
  ```python
53
69
  import duckrun
54
70
 
55
- # Connect to your Fabric lakehouse with a specific schema
56
- con = duckrun.connect("my_workspace/my_lakehouse.lakehouse/dbo")
71
+ # 1. Workspace Management (list and create lakehouses)
72
+ ws = duckrun.connect("My Workspace")
73
+ lakehouses = ws.list_lakehouses() # Returns list of lakehouse names
74
+ ws.create_lakehouse_if_not_exists("New_Lakehouse")
75
+
76
+ # 2. Connect to lakehouse with a specific schema
77
+ con = duckrun.connect("My Workspace/MyLakehouse.lakehouse/dbo")
78
+
79
+ # Workspace names with spaces are supported!
80
+ con = duckrun.connect("Data Analytics/SalesData.lakehouse/analytics")
57
81
 
58
82
  # Schema defaults to 'dbo' if not specified (scans all schemas)
59
83
  # ⚠️ WARNING: Scanning all schemas can be slow for large lakehouses!
60
- con = duckrun.connect("my_workspace/my_lakehouse.lakehouse")
84
+ con = duckrun.connect("My Workspace/My_Lakehouse.lakehouse")
61
85
 
62
- # Explore data
86
+ # 3. Explore data
63
87
  con.sql("SELECT * FROM my_table LIMIT 10").show()
64
88
 
65
- # Write to Delta tables (Spark-style API)
89
+ # 4. Write to Delta tables (Spark-style API)
66
90
  con.sql("SELECT * FROM source").write.mode("overwrite").saveAsTable("target")
67
91
 
68
- # Upload/download files to/from OneLake Files
92
+ # 5. Upload/download files to/from OneLake Files
69
93
  con.copy("./local_folder", "target_folder") # Upload files
70
94
  con.download("target_folder", "./downloaded") # Download files
71
95
  ```
@@ -75,15 +99,23 @@ That's it! No `sql_folder` needed for data exploration.
75
99
  ## Connection Format
76
100
 
77
101
  ```python
78
- # With schema (recommended for better performance)
79
- con = duckrun.connect("workspace/lakehouse.lakehouse/schema")
102
+ # Workspace management (list and create lakehouses)
103
+ ws = duckrun.connect("My Workspace")
104
+ ws.list_lakehouses() # Returns: ['lakehouse1', 'lakehouse2', ...]
105
+ ws.create_lakehouse_if_not_exists("New Lakehouse")
106
+
107
+ # Lakehouse connection with schema (recommended for best performance)
108
+ con = duckrun.connect("My Workspace/My Lakehouse.lakehouse/dbo")
109
+
110
+ # Supports workspace names with spaces!
111
+ con = duckrun.connect("Data Analytics/Sales Data.lakehouse/analytics")
80
112
 
81
113
  # Without schema (defaults to 'dbo', scans all schemas)
82
114
  # ⚠️ This can be slow for large lakehouses!
83
- con = duckrun.connect("workspace/lakehouse.lakehouse")
115
+ con = duckrun.connect("My Workspace/My Lakehouse.lakehouse")
84
116
 
85
- # With options
86
- con = duckrun.connect("workspace/lakehouse.lakehouse/dbo", sql_folder="./sql")
117
+ # With SQL folder for pipeline orchestration
118
+ con = duckrun.connect("My Workspace/My Lakehouse.lakehouse/dbo", sql_folder="./sql")
87
119
  ```
88
120
 
89
121
  ### Multi-Schema Support
@@ -0,0 +1,12 @@
1
+ duckrun/__init__.py,sha256=XA85pL2vK1AkmBic8e7WxeqNvcd6SjFX4zsQpImDO6E,230
2
+ duckrun/core.py,sha256=Y4-5H83Xw0mZa12QM5pcC7qOPidrDFASLcGIoUW3zwY,39394
3
+ duckrun/files.py,sha256=piWRU5w9jHrW-wuV4Gf-SKY_jhFv9eflxgWO8AZCQTI,10495
4
+ duckrun/lakehouse.py,sha256=j--Z3zo8AOWt1GF9VzRosmmTAy6ey2D0LVubti58twU,14109
5
+ duckrun/runner.py,sha256=XsQqWlesFD2cuhH2gsQj3Astg0XN7xhW15WPmr8D65I,13797
6
+ duckrun/stats.py,sha256=2FTqoQNVjD84-H1HjStHxZkOpAGKXS79M55B00pOlok,9804
7
+ duckrun/writer.py,sha256=eWrGtDQTbXi8H3sSt2WucYTdEQUjK97KmQxzCbqAuMs,6221
8
+ duckrun-0.2.7.dist-info/licenses/LICENSE,sha256=-DeQQwdbCbkB4507ZF3QbocysB-EIjDtaLexvqRkGZc,1083
9
+ duckrun-0.2.7.dist-info/METADATA,sha256=fIwgvoj3Hw4ByOcwCmG87zpLF0qnlzK8GAotup5km40,19272
10
+ duckrun-0.2.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
11
+ duckrun-0.2.7.dist-info/top_level.txt,sha256=BknMEwebbUHrVAp3SC92ps8MPhK7XSYsaogTvi_DmEU,8
12
+ duckrun-0.2.7.dist-info/RECORD,,
@@ -1,12 +0,0 @@
1
- duckrun/__init__.py,sha256=XA85pL2vK1AkmBic8e7WxeqNvcd6SjFX4zsQpImDO6E,230
2
- duckrun/core.py,sha256=6Us3dRuQFCziK12r0j2CuwgcDQeV78iEeLcA40IwIiA,39476
3
- duckrun/files.py,sha256=piWRU5w9jHrW-wuV4Gf-SKY_jhFv9eflxgWO8AZCQTI,10495
4
- duckrun/lakehouse.py,sha256=j--Z3zo8AOWt1GF9VzRosmmTAy6ey2D0LVubti58twU,14109
5
- duckrun/runner.py,sha256=XsQqWlesFD2cuhH2gsQj3Astg0XN7xhW15WPmr8D65I,13797
6
- duckrun/stats.py,sha256=2FTqoQNVjD84-H1HjStHxZkOpAGKXS79M55B00pOlok,9804
7
- duckrun/writer.py,sha256=eWrGtDQTbXi8H3sSt2WucYTdEQUjK97KmQxzCbqAuMs,6221
8
- duckrun-0.2.5.dev4.dist-info/licenses/LICENSE,sha256=-DeQQwdbCbkB4507ZF3QbocysB-EIjDtaLexvqRkGZc,1083
9
- duckrun-0.2.5.dev4.dist-info/METADATA,sha256=HE5rFI5A227bw-prSq3cHv9wR9-etSYJD31lIrKjpzs,18344
10
- duckrun-0.2.5.dev4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
11
- duckrun-0.2.5.dev4.dist-info/top_level.txt,sha256=BknMEwebbUHrVAp3SC92ps8MPhK7XSYsaogTvi_DmEU,8
12
- duckrun-0.2.5.dev4.dist-info/RECORD,,