duckrun 0.2.4__py3-none-any.whl → 0.2.5.dev2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
duckrun/__init__.py CHANGED
@@ -4,7 +4,7 @@ from duckrun.core import Duckrun
4
4
 
5
5
  __version__ = "0.1.0"
6
6
 
7
- # Expose connect at module level for: import duckrun as dr
7
+ # Expose unified connect method at module level
8
8
  connect = Duckrun.connect
9
9
 
10
10
  __all__ = ["Duckrun", "connect"]
duckrun/core.py CHANGED
@@ -49,17 +49,37 @@ class Duckrun:
49
49
  ]
50
50
  """
51
51
 
52
- def __init__(self, workspace: str, lakehouse_name: str, schema: str = "dbo",
52
+ def __init__(self, workspace_id: str, lakehouse_id: str, schema: str = "dbo",
53
53
  sql_folder: Optional[str] = None, compaction_threshold: int = 10,
54
54
  scan_all_schemas: bool = False, storage_account: str = "onelake"):
55
- self.workspace = workspace
56
- self.lakehouse_name = lakehouse_name
55
+ # Store GUIDs for internal use
56
+ self.workspace_id = workspace_id
57
+ self.lakehouse_id = lakehouse_id
57
58
  self.schema = schema
58
59
  self.sql_folder = sql_folder.strip() if sql_folder else None
59
60
  self.compaction_threshold = compaction_threshold
60
61
  self.scan_all_schemas = scan_all_schemas
61
62
  self.storage_account = storage_account
62
- self.table_base_url = f'abfss://{workspace}@{storage_account}.dfs.fabric.microsoft.com/{lakehouse_name}.Lakehouse/Tables/'
63
+
64
+ # Construct proper ABFSS URLs
65
+ import re
66
+ guid_pattern = re.compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', re.IGNORECASE)
67
+ # If lakehouse_id is a GUID, use as-is
68
+ if guid_pattern.match(lakehouse_id):
69
+ lakehouse_url_part = lakehouse_id
70
+ else:
71
+ # If workspace name has no spaces, always append .lakehouse unless already present
72
+ if " " not in workspace_id and not lakehouse_id.endswith('.lakehouse'):
73
+ lakehouse_url_part = f'{lakehouse_id}.lakehouse'
74
+ else:
75
+ lakehouse_url_part = lakehouse_id
76
+ self.table_base_url = f'abfss://{workspace_id}@{storage_account}.dfs.fabric.microsoft.com/{lakehouse_url_part}/Tables/'
77
+ self.files_base_url = f'abfss://{workspace_id}@{storage_account}.dfs.fabric.microsoft.com/{lakehouse_url_part}/Files/'
78
+
79
+ # Keep legacy properties for backward compatibility
80
+ self.workspace = workspace_id
81
+ self.lakehouse_name = lakehouse_id
82
+
63
83
  self.con = duckdb.connect()
64
84
  self.con.sql("SET preserve_insertion_order = false")
65
85
  self._attach_lakehouse()
@@ -68,57 +88,218 @@ class Duckrun:
68
88
  def connect(cls, connection_string: str, sql_folder: Optional[str] = None,
69
89
  compaction_threshold: int = 100, storage_account: str = "onelake"):
70
90
  """
71
- Create and connect to lakehouse.
91
+ Create and connect to lakehouse or workspace.
72
92
 
73
- Uses compact format: connect("ws/lh.lakehouse/schema") or connect("ws/lh.lakehouse")
93
+ Smart detection based on connection string format:
94
+ - "workspace" → workspace management only
95
+ - "ws/lh.lakehouse/schema" → full lakehouse connection
96
+ - "ws/lh.lakehouse" → lakehouse connection (defaults to dbo schema)
74
97
 
75
98
  Args:
76
- connection_string: OneLake path "ws/lh.lakehouse/schema" or "ws/lh.lakehouse"
77
- sql_folder: Optional path or URL to SQL files folder
99
+ connection_string: OneLake path or workspace name
100
+ sql_folder: Optional path or URL to SQL files folder
78
101
  compaction_threshold: File count threshold for compaction
79
102
  storage_account: Storage account name (default: "onelake")
80
103
 
81
104
  Examples:
82
- dr = Duckrun.connect("ws/lh.lakehouse/schema", sql_folder="./sql")
83
- dr = Duckrun.connect("ws/lh.lakehouse/schema") # no SQL folder
84
- dr = Duckrun.connect("ws/lh.lakehouse") # defaults to dbo schema
85
- dr = Duckrun.connect("ws/lh.lakehouse", storage_account="xxx-onelake") # custom storage
105
+ # Workspace management only (supports spaces in names)
106
+ ws = Duckrun.connect("My Workspace Name")
107
+ ws.list_lakehouses()
108
+ ws.create_lakehouse_if_not_exists("New Lakehouse")
109
+
110
+ # Full lakehouse connections (supports spaces in names)
111
+ dr = Duckrun.connect("My Workspace/My Lakehouse.lakehouse/schema", sql_folder="./sql")
112
+ dr = Duckrun.connect("Data Workspace/Sales Data.lakehouse/analytics") # spaces supported
113
+ dr = Duckrun.connect("My Workspace/My Lakehouse.lakehouse") # defaults to dbo schema
114
+ dr = Duckrun.connect("workspace/lakehouse.lakehouse", storage_account="xxx-onelake") # custom storage
115
+
116
+ Note:
117
+ Internally resolves friendly names (with spaces) to GUIDs and constructs proper ABFSS URLs:
118
+ "My Workspace/My Lakehouse.lakehouse/schema" becomes
119
+ "abfss://workspace_guid@onelake.dfs.fabric.microsoft.com/lakehouse_guid/Tables/schema"
86
120
  """
121
+
122
+ # Check if it's a workspace-only connection (no "/" means workspace name only)
123
+ if "/" not in connection_string:
124
+ print(f"Connecting to workspace '{connection_string}' for management operations...")
125
+ return WorkspaceConnection(connection_string)
126
+
87
127
  print("Connecting to Lakehouse...")
88
128
 
89
129
  scan_all_schemas = False
90
130
 
91
- # Only support compact format: "ws/lh.lakehouse/schema" or "ws/lh.lakehouse"
92
- if not connection_string or "/" not in connection_string:
93
- raise ValueError(
94
- "Invalid connection string format. "
95
- "Expected format: 'workspace/lakehouse.lakehouse/schema' or 'workspace/lakehouse.lakehouse'"
96
- )
97
-
131
+ # Parse lakehouse connection string: "ws/lh.lakehouse/schema" or "ws/lh.lakehouse"
132
+ # Support workspace and lakehouse names with spaces
98
133
  parts = connection_string.split("/")
99
134
  if len(parts) == 2:
100
- workspace, lakehouse_name = parts
135
+ workspace_name, lakehouse_name = parts
101
136
  scan_all_schemas = True
102
137
  schema = "dbo"
103
138
  elif len(parts) == 3:
104
- workspace, lakehouse_name, schema = parts
139
+ workspace_name, lakehouse_name, schema = parts
105
140
  else:
106
141
  raise ValueError(
107
142
  f"Invalid connection string format: '{connection_string}'. "
108
- "Expected format: 'workspace/lakehouse.lakehouse' or 'workspace/lakehouse.lakehouse/schema'"
143
+ "Expected formats:\n"
144
+ " 'workspace name' (workspace management only)\n"
145
+ " 'workspace name/lakehouse name.lakehouse' (lakehouse with dbo schema)\n"
146
+ " 'workspace name/lakehouse name.lakehouse/schema' (lakehouse with specific schema)"
109
147
  )
110
148
 
111
149
  if lakehouse_name.endswith(".lakehouse"):
112
150
  lakehouse_name = lakehouse_name[:-10]
113
151
 
114
- if not workspace or not lakehouse_name:
152
+ if not workspace_name or not lakehouse_name:
153
+ raise ValueError(
154
+ "Missing required parameters. Use one of these formats:\n"
155
+ " connect('workspace name') # workspace management\n"
156
+ " connect('workspace name/lakehouse name.lakehouse/schema') # full lakehouse\n"
157
+ " connect('workspace name/lakehouse name.lakehouse') # defaults to dbo"
158
+ )
159
+
160
+ # Resolve friendly names to GUIDs and construct proper ABFSS path
161
+ workspace_id, lakehouse_id = cls._resolve_names_to_guids(workspace_name, lakehouse_name)
162
+
163
+ return cls(workspace_id, lakehouse_id, schema, sql_folder, compaction_threshold, scan_all_schemas, storage_account)
164
+
165
+ @classmethod
166
+ def _resolve_names_to_guids(cls, workspace_name: str, lakehouse_name: str) -> tuple[str, str]:
167
+ """
168
+ Resolve friendly workspace and lakehouse names to their GUIDs.
169
+
170
+ Optimization: If names don't contain spaces, use them directly (no API calls needed).
171
+ Only resolve to GUIDs when names contain spaces or are already GUIDs.
172
+
173
+ Args:
174
+ workspace_name: Display name of the workspace (can contain spaces)
175
+ lakehouse_name: Display name of the lakehouse (can contain spaces)
176
+
177
+ Returns:
178
+ Tuple of (workspace_id, lakehouse_id) - either resolved GUIDs or original names
179
+ """
180
+
181
+ # Check if names are already GUIDs first
182
+ import re
183
+ guid_pattern = re.compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', re.IGNORECASE)
184
+
185
+ if guid_pattern.match(workspace_name) and guid_pattern.match(lakehouse_name):
186
+ print(f"✅ Names are already GUIDs: workspace={workspace_name}, lakehouse={lakehouse_name}")
187
+ return workspace_name, lakehouse_name
188
+
189
+ # Optimization: If workspace name has no spaces, use both names directly (old behavior)
190
+ # Note: Lakehouse names cannot contain spaces in Microsoft Fabric, only workspace names can
191
+ if " " not in workspace_name:
192
+ print(f"✅ Using names directly (workspace has no spaces): workspace={workspace_name}, lakehouse={lakehouse_name}")
193
+ return workspace_name, lakehouse_name
194
+
195
+ # Workspace name contains spaces - need to resolve both to GUIDs for proper ABFSS URLs
196
+ print(f"🔍 Resolving '{workspace_name}' workspace and '{lakehouse_name}' lakehouse to GUIDs (workspace has spaces)...")
197
+
198
+ try:
199
+ # Get authentication token (try notebook environment first, then azure-identity)
200
+ try:
201
+ import notebookutils # type: ignore
202
+ token = notebookutils.credentials.getToken("pbi")
203
+ current_workspace_id = notebookutils.runtime.context.get("workspaceId")
204
+ except ImportError:
205
+ current_workspace_id = None
206
+ # Fallback to azure-identity for external environments
207
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
208
+ credential = ChainedTokenCredential(AzureCliCredential(), InteractiveBrowserCredential())
209
+ token_obj = credential.get_token("https://api.fabric.microsoft.com/.default")
210
+ token = token_obj.token
211
+
212
+ # Resolve workspace name to ID
213
+ if current_workspace_id:
214
+ # In notebook environment, we could use current workspace ID
215
+ # but we should validate it matches the requested workspace name
216
+ workspace_id = cls._resolve_workspace_id_by_name(token, workspace_name)
217
+ if not workspace_id:
218
+ # Fallback to current workspace if name resolution fails
219
+ print(f"⚠️ Could not validate workspace name '{workspace_name}', using current workspace")
220
+ workspace_id = current_workspace_id
221
+ else:
222
+ # External environment - must resolve by name
223
+ workspace_id = cls._resolve_workspace_id_by_name(token, workspace_name)
224
+ if not workspace_id:
225
+ raise ValueError(f"Workspace '{workspace_name}' not found")
226
+
227
+ # Resolve lakehouse name to ID (required for ABFSS URLs with spaces)
228
+ lakehouse_id = cls._resolve_lakehouse_id_by_name(token, workspace_id, lakehouse_name)
229
+ if not lakehouse_id:
230
+ raise ValueError(f"Lakehouse '{lakehouse_name}' not found in workspace '{workspace_name}'")
231
+
232
+ print(f"✅ Resolved: {workspace_name} → {workspace_id}, {lakehouse_name} → {lakehouse_id}")
233
+ return workspace_id, lakehouse_id
234
+
235
+ except Exception as e:
236
+ print(f"❌ Failed to resolve names to GUIDs: {e}")
237
+ print(f"❌ Cannot use friendly names with spaces '{workspace_name}'/'{lakehouse_name}' in ABFSS URLs without GUID resolution")
238
+ print("❌ Microsoft Fabric requires actual workspace and lakehouse GUIDs for ABFSS access when names contain spaces")
115
239
  raise ValueError(
116
- "Missing required parameters. Use compact format:\n"
117
- " connect('workspace/lakehouse.lakehouse/schema', 'sql_folder')\n"
118
- " connect('workspace/lakehouse.lakehouse') # defaults to dbo"
240
+ f"Unable to resolve workspace '{workspace_name}' and lakehouse '{lakehouse_name}' to GUIDs. "
241
+ f"ABFSS URLs require actual GUIDs when names contain spaces. "
242
+ f"Please ensure you have proper authentication and the workspace/lakehouse names are correct."
119
243
  )
244
+
245
+ @classmethod
246
+ def _resolve_workspace_id_by_name(cls, token: str, workspace_name: str) -> Optional[str]:
247
+ """Get workspace ID from display name"""
248
+ try:
249
+ import requests
250
+ url = "https://api.fabric.microsoft.com/v1/workspaces"
251
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
252
+
253
+ response = requests.get(url, headers=headers)
254
+ response.raise_for_status()
255
+
256
+ workspaces = response.json().get("value", [])
257
+ for workspace in workspaces:
258
+ if workspace.get("displayName") == workspace_name:
259
+ return workspace.get("id")
260
+
261
+ return None
262
+ except Exception:
263
+ return None
264
+
265
+ @classmethod
266
+ def _resolve_lakehouse_id_by_name(cls, token: str, workspace_id: str, lakehouse_name: str) -> Optional[str]:
267
+ """Get lakehouse ID from display name within a workspace"""
268
+ try:
269
+ import requests
270
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/lakehouses"
271
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
272
+
273
+ response = requests.get(url, headers=headers)
274
+ response.raise_for_status()
275
+
276
+ lakehouses = response.json().get("value", [])
277
+ for lakehouse in lakehouses:
278
+ if lakehouse.get("displayName") == lakehouse_name:
279
+ return lakehouse.get("id")
280
+
281
+ return None
282
+ except Exception:
283
+ return None
284
+
285
+ @classmethod
286
+ def connect_workspace(cls, workspace_name: str):
287
+ """
288
+ Connect to a workspace without a specific lakehouse.
289
+ Used for lakehouse management operations.
120
290
 
121
- return cls(workspace, lakehouse_name, schema, sql_folder, compaction_threshold, scan_all_schemas, storage_account)
291
+ Args:
292
+ workspace_name: Name of the workspace
293
+
294
+ Returns:
295
+ WorkspaceConnection object with lakehouse management methods
296
+
297
+ Example:
298
+ con = duckrun.connect_workspace("MyWorkspace")
299
+ con.list_lakehouses()
300
+ con.create_lakehouse_if_not_exists("newlakehouse")
301
+ """
302
+ return WorkspaceConnection(workspace_name)
122
303
 
123
304
  def _get_storage_token(self):
124
305
  return os.environ.get("AZURE_STORAGE_TOKEN", "PLACEHOLDER_TOKEN_TOKEN_NOT_AVAILABLE")
@@ -155,7 +336,19 @@ class Duckrun:
155
336
  url = f"abfss://{self.workspace}@{self.storage_account}.dfs.fabric.microsoft.com/"
156
337
  store = AzureStore.from_url(url, bearer_token=token)
157
338
 
158
- base_path = f"{self.lakehouse_name}.Lakehouse/Tables/"
339
+ # Use the same lakehouse URL part logic as in __init__ to ensure .lakehouse suffix is added when needed
340
+ import re
341
+ guid_pattern = re.compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', re.IGNORECASE)
342
+ if guid_pattern.match(self.lakehouse_id):
343
+ lakehouse_url_part = self.lakehouse_id
344
+ else:
345
+ # If workspace name has no spaces, always append .lakehouse unless already present
346
+ if " " not in self.workspace_id and not self.lakehouse_id.endswith('.lakehouse'):
347
+ lakehouse_url_part = f'{self.lakehouse_id}.lakehouse'
348
+ else:
349
+ lakehouse_url_part = self.lakehouse_id
350
+
351
+ base_path = f"{lakehouse_url_part}/Tables/"
159
352
  tables_found = []
160
353
 
161
354
  if self.scan_all_schemas:
@@ -198,9 +391,9 @@ class Duckrun:
198
391
 
199
392
  if not tables:
200
393
  if self.scan_all_schemas:
201
- print(f"No Delta tables found in {self.lakehouse_name}.Lakehouse/Tables/")
394
+ print(f"No Delta tables found in {self.lakehouse_name}/Tables/")
202
395
  else:
203
- print(f"No Delta tables found in {self.lakehouse_name}.Lakehouse/Tables/{self.schema}/")
396
+ print(f"No Delta tables found in {self.lakehouse_name}/Tables/{self.schema}/")
204
397
  return
205
398
 
206
399
  # Group tables by schema for display
@@ -358,8 +551,268 @@ class Duckrun:
358
551
  """
359
552
  return _get_stats(self, source)
360
553
 
554
+ def list_lakehouses(self) -> List[str]:
555
+ """
556
+ List all lakehouses in the current workspace.
557
+
558
+ Returns:
559
+ List of lakehouse names
560
+ """
561
+ try:
562
+ # Try to get token from notebook environment first
563
+ try:
564
+ import notebookutils # type: ignore
565
+ token = notebookutils.credentials.getToken("pbi")
566
+ workspace_id = notebookutils.runtime.context.get("workspaceId")
567
+ except ImportError:
568
+ # Fallback to azure-identity
569
+ print("Getting authentication token...")
570
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
571
+ credential = ChainedTokenCredential(AzureCliCredential(), InteractiveBrowserCredential())
572
+ token_obj = credential.get_token("https://api.fabric.microsoft.com/.default")
573
+ token = token_obj.token
574
+
575
+ # Get workspace ID by name
576
+ workspace_id = self._get_workspace_id_by_name(token, self.workspace)
577
+ if not workspace_id:
578
+ print(f"Workspace '{self.workspace}' not found")
579
+ return []
580
+
581
+ # List lakehouses
582
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/lakehouses"
583
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
584
+
585
+ response = requests.get(url, headers=headers)
586
+ response.raise_for_status()
587
+
588
+ lakehouses = response.json().get("value", [])
589
+ lakehouse_names = [lh.get("displayName", "") for lh in lakehouses]
590
+
591
+ print(f"Found {len(lakehouse_names)} lakehouses: {lakehouse_names}")
592
+ return lakehouse_names
593
+
594
+ except Exception as e:
595
+ print(f"Error listing lakehouses: {e}")
596
+ return []
597
+
598
+ def create_lakehouse_if_not_exists(self, lakehouse_name: str) -> bool:
599
+ """
600
+ Create a lakehouse if it doesn't already exist.
601
+
602
+ Args:
603
+ lakehouse_name: Name of the lakehouse to create
604
+
605
+ Returns:
606
+ True if lakehouse exists or was created successfully, False otherwise
607
+ """
608
+ try:
609
+ # Try to get token from notebook environment first
610
+ try:
611
+ import notebookutils # type: ignore
612
+ token = notebookutils.credentials.getToken("pbi")
613
+ workspace_id = notebookutils.runtime.context.get("workspaceId")
614
+ except ImportError:
615
+ # Fallback to azure-identity
616
+ print("Getting authentication token...")
617
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
618
+ credential = ChainedTokenCredential(AzureCliCredential(), InteractiveBrowserCredential())
619
+ token_obj = credential.get_token("https://api.fabric.microsoft.com/.default")
620
+ token = token_obj.token
621
+
622
+ # Get workspace ID by name
623
+ workspace_id = self._get_workspace_id_by_name(token, self.workspace)
624
+ if not workspace_id:
625
+ print(f"Workspace '{self.workspace}' not found")
626
+ return False
627
+
628
+ # Check if lakehouse already exists
629
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/lakehouses"
630
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
631
+
632
+ response = requests.get(url, headers=headers)
633
+ response.raise_for_status()
634
+
635
+ lakehouses = response.json().get("value", [])
636
+ existing_names = [lh.get("displayName", "") for lh in lakehouses]
637
+
638
+ if lakehouse_name in existing_names:
639
+ print(f"Lakehouse '{lakehouse_name}' already exists")
640
+ return True
641
+
642
+ # Create lakehouse
643
+ print(f"Creating lakehouse '{lakehouse_name}'...")
644
+ payload = {
645
+ "displayName": lakehouse_name,
646
+ "description": f"Lakehouse {lakehouse_name} created via duckrun"
647
+ }
648
+
649
+ response = requests.post(url, headers=headers, json=payload)
650
+ response.raise_for_status()
651
+
652
+ print(f"✅ Lakehouse '{lakehouse_name}' created successfully")
653
+ return True
654
+
655
+ except Exception as e:
656
+ print(f"❌ Error creating lakehouse '{lakehouse_name}': {e}")
657
+ return False
658
+
659
+ def _get_workspace_id_by_name(self, token: str, workspace_name: str) -> Optional[str]:
660
+ """Helper method to get workspace ID from name"""
661
+ try:
662
+ url = "https://api.fabric.microsoft.com/v1/workspaces"
663
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
664
+
665
+ response = requests.get(url, headers=headers)
666
+ response.raise_for_status()
667
+
668
+ workspaces = response.json().get("value", [])
669
+ for workspace in workspaces:
670
+ if workspace.get("displayName") == workspace_name:
671
+ return workspace.get("id")
672
+
673
+ return None
674
+
675
+ except Exception:
676
+ return None
677
+
361
678
  def close(self):
362
679
  """Close DuckDB connection"""
363
680
  if self.con:
364
681
  self.con.close()
365
- print("Connection closed")
682
+ print("Connection closed")
683
+
684
+
685
+ class WorkspaceConnection:
686
+ """
687
+ Simple workspace connection for lakehouse management operations.
688
+ """
689
+
690
+ def __init__(self, workspace_name: str):
691
+ self.workspace_name = workspace_name
692
+
693
+ def list_lakehouses(self) -> List[str]:
694
+ """
695
+ List all lakehouses in the workspace.
696
+
697
+ Returns:
698
+ List of lakehouse names
699
+ """
700
+ try:
701
+ # Try to get token from notebook environment first
702
+ try:
703
+ import notebookutils # type: ignore
704
+ token = notebookutils.credentials.getToken("pbi")
705
+ workspace_id = notebookutils.runtime.context.get("workspaceId")
706
+ except ImportError:
707
+ # Fallback to azure-identity
708
+ print("Getting authentication token...")
709
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
710
+ credential = ChainedTokenCredential(AzureCliCredential(), InteractiveBrowserCredential())
711
+ token_obj = credential.get_token("https://api.fabric.microsoft.com/.default")
712
+ token = token_obj.token
713
+
714
+ # Get workspace ID by name
715
+ workspace_id = self._get_workspace_id_by_name(token, self.workspace_name)
716
+ if not workspace_id:
717
+ print(f"Workspace '{self.workspace_name}' not found")
718
+ return []
719
+
720
+ # List lakehouses
721
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/lakehouses"
722
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
723
+
724
+ response = requests.get(url, headers=headers)
725
+ response.raise_for_status()
726
+
727
+ lakehouses = response.json().get("value", [])
728
+ lakehouse_names = [lh.get("displayName", "") for lh in lakehouses]
729
+
730
+ print(f"Found {len(lakehouse_names)} lakehouses: {lakehouse_names}")
731
+ return lakehouse_names
732
+
733
+ except Exception as e:
734
+ print(f"Error listing lakehouses: {e}")
735
+ return []
736
+
737
+ def create_lakehouse_if_not_exists(self, lakehouse_name: str) -> bool:
738
+ """
739
+ Create a lakehouse if it doesn't already exist.
740
+
741
+ Args:
742
+ lakehouse_name: Name of the lakehouse to create
743
+
744
+ Returns:
745
+ True if lakehouse exists or was created successfully, False otherwise
746
+ """
747
+ try:
748
+ # Try to get token from notebook environment first
749
+ try:
750
+ import notebookutils # type: ignore
751
+ token = notebookutils.credentials.getToken("pbi")
752
+ workspace_id = notebookutils.runtime.context.get("workspaceId")
753
+ except ImportError:
754
+ # Fallback to azure-identity
755
+ print("Getting authentication token...")
756
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
757
+ credential = ChainedTokenCredential(AzureCliCredential(), InteractiveBrowserCredential())
758
+ token_obj = credential.get_token("https://api.fabric.microsoft.com/.default")
759
+ token = token_obj.token
760
+
761
+ # Get workspace ID by name
762
+ workspace_id = self._get_workspace_id_by_name(token, self.workspace_name)
763
+ if not workspace_id:
764
+ print(f"Workspace '{self.workspace_name}' not found")
765
+ return False
766
+
767
+ # Check if lakehouse already exists
768
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}/lakehouses"
769
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
770
+
771
+ response = requests.get(url, headers=headers)
772
+ response.raise_for_status()
773
+
774
+ lakehouses = response.json().get("value", [])
775
+ existing_names = [lh.get("displayName", "") for lh in lakehouses]
776
+
777
+ if lakehouse_name in existing_names:
778
+ print(f"Lakehouse '{lakehouse_name}' already exists")
779
+ return True
780
+
781
+ # Create lakehouse
782
+ print(f"Creating lakehouse '{lakehouse_name}'...")
783
+ payload = {
784
+ "displayName": lakehouse_name,
785
+ "description": f"Lakehouse {lakehouse_name} created via duckrun",
786
+ "creationPayload": {
787
+ "enableSchemas": True
788
+ }
789
+ }
790
+
791
+ response = requests.post(url, headers=headers, json=payload)
792
+ response.raise_for_status()
793
+
794
+ print(f"✅ Lakehouse '{lakehouse_name}' created successfully")
795
+ return True
796
+
797
+ except Exception as e:
798
+ print(f"❌ Error creating lakehouse '{lakehouse_name}': {e}")
799
+ return False
800
+
801
+ def _get_workspace_id_by_name(self, token: str, workspace_name: str) -> Optional[str]:
802
+ """Helper method to get workspace ID from name"""
803
+ try:
804
+ url = "https://api.fabric.microsoft.com/v1/workspaces"
805
+ headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
806
+
807
+ response = requests.get(url, headers=headers)
808
+ response.raise_for_status()
809
+
810
+ workspaces = response.json().get("value", [])
811
+ for workspace in workspaces:
812
+ if workspace.get("displayName") == workspace_name:
813
+ return workspace.get("id")
814
+
815
+ return None
816
+
817
+ except Exception:
818
+ return None
duckrun/files.py CHANGED
@@ -51,8 +51,8 @@ def copy(duckrun_instance, local_folder: str, remote_folder: str,
51
51
  token = token_obj.token
52
52
  os.environ["AZURE_STORAGE_TOKEN"] = token
53
53
 
54
- # Setup OneLake Files URL (not Tables)
55
- files_base_url = f'abfss://{duckrun_instance.workspace}@{duckrun_instance.storage_account}.dfs.fabric.microsoft.com/{duckrun_instance.lakehouse_name}.Lakehouse/Files/'
54
+ # Setup OneLake Files URL (use correct format without .Lakehouse suffix)
55
+ files_base_url = duckrun_instance.files_base_url
56
56
  store = AzureStore.from_url(files_base_url, bearer_token=token)
57
57
 
58
58
  # Collect files to upload
@@ -160,8 +160,8 @@ def download(duckrun_instance, remote_folder: str = "", local_folder: str = "./d
160
160
  token = token_obj.token
161
161
  os.environ["AZURE_STORAGE_TOKEN"] = token
162
162
 
163
- # Setup OneLake Files URL (not Tables)
164
- files_base_url = f'abfss://{duckrun_instance.workspace}@{duckrun_instance.storage_account}.dfs.fabric.microsoft.com/{duckrun_instance.lakehouse_name}.Lakehouse/Files/'
163
+ # Setup OneLake Files URL (use correct format without .Lakehouse suffix)
164
+ files_base_url = duckrun_instance.files_base_url
165
165
  store = AzureStore.from_url(files_base_url, bearer_token=token)
166
166
 
167
167
  # Create local directory
duckrun/lakehouse.py ADDED
@@ -0,0 +1,402 @@
1
+ import requests
2
+ import time
3
+ from typing import Optional
4
+
5
+ class FabricLakehouseManager:
6
+ """
7
+ Manage Microsoft Fabric Lakehouses using REST API only.
8
+ Works on any machine with Python and internet access.
9
+ """
10
+
11
+ def __init__(self, access_token: str):
12
+ """
13
+ Initialize with Azure AD access token.
14
+
15
+ Args:
16
+ access_token: Bearer token for Fabric API authentication
17
+ """
18
+ self.base_url = "https://api.fabric.microsoft.com/v1"
19
+ self.headers = {
20
+ "Authorization": f"Bearer {access_token}",
21
+ "Content-Type": "application/json"
22
+ }
23
+
24
+ def get_workspace_id(self, workspace_name: str) -> Optional[str]:
25
+ """
26
+ Get workspace ID from workspace name.
27
+
28
+ Args:
29
+ workspace_name: Name of the workspace
30
+
31
+ Returns:
32
+ Workspace ID if found, None otherwise
33
+ """
34
+ if not workspace_name:
35
+ return None
36
+
37
+ try:
38
+ url = f"{self.base_url}/workspaces"
39
+ response = requests.get(url, headers=self.headers)
40
+ response.raise_for_status()
41
+
42
+ workspaces = response.json().get("value", [])
43
+ for workspace in workspaces:
44
+ if workspace.get("displayName") == workspace_name:
45
+ return workspace.get("id")
46
+
47
+ print(f"Workspace '{workspace_name}' not found")
48
+ return None
49
+
50
+ except Exception as e:
51
+ print(f"Error getting workspace ID: {e}")
52
+ return None
53
+
54
+ def get_lakehouse(self, lakehouse_name: str, workspace_id: str) -> Optional[dict]:
55
+ """
56
+ Get lakehouse details if it exists.
57
+
58
+ Args:
59
+ lakehouse_name: Name of the lakehouse
60
+ workspace_id: ID of the workspace
61
+
62
+ Returns:
63
+ Lakehouse details if found, None otherwise
64
+ """
65
+ try:
66
+ url = f"{self.base_url}/workspaces/{workspace_id}/lakehouses"
67
+ response = requests.get(url, headers=self.headers)
68
+ response.raise_for_status()
69
+
70
+ lakehouses = response.json().get("value", [])
71
+ for lakehouse in lakehouses:
72
+ if lakehouse.get("displayName") == lakehouse_name:
73
+ return lakehouse
74
+
75
+ return None
76
+
77
+ except Exception as e:
78
+ print(f"Error getting lakehouse: {e}")
79
+ return None
80
+
81
+ def create_lakehouse(self, lakehouse_name: str, workspace_id: str,
82
+ enable_schemas: bool = True) -> Optional[dict]:
83
+ """
84
+ Create a new lakehouse.
85
+
86
+ Args:
87
+ lakehouse_name: Name of the lakehouse
88
+ workspace_id: ID of the workspace
89
+ enable_schemas: Whether to enable schemas
90
+
91
+ Returns:
92
+ Created lakehouse details if successful, None otherwise
93
+ """
94
+ try:
95
+ url = f"{self.base_url}/workspaces/{workspace_id}/lakehouses"
96
+ payload = {
97
+ "displayName": lakehouse_name,
98
+ "description": f"Lakehouse {lakehouse_name}"
99
+ }
100
+
101
+ if enable_schemas:
102
+ payload["creationPayload"] = {
103
+ "enableSchemas": True
104
+ }
105
+
106
+ response = requests.post(url, headers=self.headers, json=payload)
107
+ response.raise_for_status()
108
+
109
+ # Wait a bit for the lakehouse to be fully provisioned
110
+ time.sleep(2)
111
+
112
+ return response.json()
113
+
114
+ except Exception as e:
115
+ print(f"Error creating lakehouse: {e}")
116
+ if hasattr(e, 'response') and e.response is not None:
117
+ print(f"Response: {e.response.text}")
118
+ return None
119
+
120
+ def create_lakehouse_if_not_exists(self, lakehouse_name: str,
121
+ workspace_name: Optional[str] = None,
122
+ workspace_id: Optional[str] = None) -> int:
123
+ """
124
+ Create a lakehouse if it doesn't exist.
125
+
126
+ Args:
127
+ lakehouse_name: Name of the lakehouse
128
+ workspace_name: Optional workspace name
129
+ workspace_id: Optional workspace ID (takes precedence over workspace_name)
130
+
131
+ Returns:
132
+ 1 if successful (lakehouse exists or was created)
133
+ 0 if failed
134
+ """
135
+ # Resolve workspace ID
136
+ if workspace_id is None and workspace_name:
137
+ workspace_id = self.get_workspace_id(workspace_name)
138
+ if workspace_id is None:
139
+ print(f"Workspace '{workspace_name}' not found - returning 0")
140
+ return 0
141
+ elif workspace_id is None:
142
+ print("No workspace specified - returning 0")
143
+ return 0
144
+
145
+ print(f"Attempting to get lakehouse '{lakehouse_name}' in workspace '{workspace_id}'")
146
+
147
+ # Check if lakehouse exists
148
+ lakehouse = self.get_lakehouse(lakehouse_name, workspace_id)
149
+
150
+ if lakehouse:
151
+ print(f"Lakehouse '{lakehouse_name}' found - returning 1")
152
+ return 1
153
+
154
+ # Create lakehouse if it doesn't exist
155
+ print(f"Lakehouse not found, attempting to create...")
156
+ created = self.create_lakehouse(lakehouse_name, workspace_id)
157
+
158
+ if created:
159
+ # Verify creation
160
+ lakehouse = self.get_lakehouse(lakehouse_name, workspace_id)
161
+ if lakehouse:
162
+ print(f"Lakehouse '{lakehouse_name}' created successfully - returning 1")
163
+ return 1
164
+
165
+ print(f"Failed to create lakehouse '{lakehouse_name}' - returning 0")
166
+ return 0
167
+
168
+
169
+ # Example usage with Azure Identity:
170
+ def main():
171
+ """
172
+ Example of how to use the FabricLakehouseManager with azure-identity.
173
+ """
174
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
175
+
176
+ print("Authenticating with Azure (trying CLI, will fallback to browser if needed)...")
177
+
178
+ # Create credential chain (CLI first, then interactive browser)
179
+ credential = ChainedTokenCredential(
180
+ AzureCliCredential(),
181
+ InteractiveBrowserCredential()
182
+ )
183
+
184
+ # Get token for Fabric API (not storage!)
185
+ # Note: Use Fabric API scope, not storage scope
186
+ token = credential.get_token("https://api.fabric.microsoft.com/.default")
187
+
188
+ print("✓ Authentication successful!")
189
+
190
+ # Initialize manager with Fabric token
191
+ manager = FabricLakehouseManager(token.token)
192
+
193
+ # Create lakehouse if not exists
194
+ result = manager.create_lakehouse_if_not_exists(
195
+ lakehouse_name="MyLakehouse",
196
+ workspace_name="MyWorkspace"
197
+ )
198
+
199
+ if result == 1:
200
+ print("✓ Lakehouse operation successful!")
201
+ else:
202
+ print("✗ Lakehouse operation failed!")
203
+
204
+ return result
205
+
206
+
207
+ def get_fabric_token():
208
+ """
209
+ Helper function to get Fabric API token.
210
+ Returns the token string.
211
+ """
212
+ from azure.identity import AzureCliCredential, InteractiveBrowserCredential, ChainedTokenCredential
213
+
214
+ credential = ChainedTokenCredential(
215
+ AzureCliCredential(),
216
+ InteractiveBrowserCredential()
217
+ )
218
+
219
+ # Get token for Fabric API
220
+ token = credential.get_token("https://api.fabric.microsoft.com/.default")
221
+ return token.token
222
+
223
+
224
+ def create_lakehouse_in_notebook(lakehouse_name: str, workspace_name: Optional[str] = None) -> int:
225
+ """
226
+ Create a lakehouse in a Fabric notebook environment.
227
+ This function uses the notebook's built-in authentication.
228
+
229
+ Args:
230
+ lakehouse_name: Name of the lakehouse to create
231
+ workspace_name: Optional workspace name (uses current workspace if None)
232
+
233
+ Returns:
234
+ 1 if successful (lakehouse exists or was created)
235
+ 0 if failed
236
+ """
237
+ try:
238
+ # Try to import fabric notebook utilities (only available in Fabric notebooks)
239
+ import notebookutils # type: ignore
240
+
241
+ # Get authentication token from notebook environment
242
+ token = notebookutils.credentials.getToken("https://api.fabric.microsoft.com/.default")
243
+
244
+ # Initialize manager with notebook token
245
+ manager = FabricLakehouseManager(token)
246
+
247
+ # Get current workspace ID if no workspace specified
248
+ workspace_id = None
249
+ if workspace_name:
250
+ workspace_id = manager.get_workspace_id(workspace_name)
251
+ else:
252
+ # In Fabric notebooks, we can get the current workspace from context
253
+ try:
254
+ workspace_id = notebookutils.runtime.context.get("workspaceId")
255
+ except:
256
+ print("Could not get current workspace ID from notebook context")
257
+ return 0
258
+
259
+ if not workspace_id:
260
+ print(f"Could not resolve workspace ID")
261
+ return 0
262
+
263
+ # Create lakehouse if not exists
264
+ return manager.create_lakehouse_if_not_exists(
265
+ lakehouse_name=lakehouse_name,
266
+ workspace_id=workspace_id
267
+ )
268
+
269
+ except ImportError:
270
+ print("notebookutils not available - not running in Fabric notebook environment")
271
+ print("Use FabricLakehouseManager class directly with proper authentication")
272
+ return 0
273
+ except Exception as e:
274
+ print(f"Error creating lakehouse in notebook: {e}")
275
+ return 0
276
+
277
+
278
+ def create_lakehouse_simple(lakehouse_name: str, access_token: str, workspace_id: str) -> dict:
279
+ """
280
+ Simple function to create a lakehouse with minimal dependencies.
281
+ Perfect for Fabric notebook environments.
282
+
283
+ Args:
284
+ lakehouse_name: Name of the lakehouse to create
285
+ access_token: Bearer token for authentication
286
+ workspace_id: ID of the target workspace
287
+
288
+ Returns:
289
+ Dictionary with creation result
290
+ """
291
+ import requests
292
+ import time
293
+
294
+ base_url = "https://api.fabric.microsoft.com/v1"
295
+ headers = {
296
+ "Authorization": f"Bearer {access_token}",
297
+ "Content-Type": "application/json"
298
+ }
299
+
300
+ try:
301
+ # First check if lakehouse already exists
302
+ list_url = f"{base_url}/workspaces/{workspace_id}/lakehouses"
303
+ response = requests.get(list_url, headers=headers)
304
+ response.raise_for_status()
305
+
306
+ lakehouses = response.json().get("value", [])
307
+ for lakehouse in lakehouses:
308
+ if lakehouse.get("displayName") == lakehouse_name:
309
+ return {
310
+ "success": True,
311
+ "message": f"Lakehouse '{lakehouse_name}' already exists",
312
+ "lakehouse": lakehouse,
313
+ "created": False
314
+ }
315
+
316
+ # Create new lakehouse
317
+ create_url = f"{base_url}/workspaces/{workspace_id}/lakehouses"
318
+ payload = {
319
+ "displayName": lakehouse_name,
320
+ "description": f"Lakehouse {lakehouse_name} created via API"
321
+ }
322
+
323
+ response = requests.post(create_url, headers=headers, json=payload)
324
+ response.raise_for_status()
325
+
326
+ # Wait for provisioning
327
+ time.sleep(3)
328
+
329
+ created_lakehouse = response.json()
330
+ return {
331
+ "success": True,
332
+ "message": f"Lakehouse '{lakehouse_name}' created successfully",
333
+ "lakehouse": created_lakehouse,
334
+ "created": True
335
+ }
336
+
337
+ except requests.exceptions.RequestException as e:
338
+ error_msg = f"HTTP error creating lakehouse: {e}"
339
+ if hasattr(e, 'response') and e.response is not None:
340
+ error_msg += f" Response: {e.response.text}"
341
+
342
+ return {
343
+ "success": False,
344
+ "message": error_msg,
345
+ "lakehouse": None,
346
+ "created": False
347
+ }
348
+ except Exception as e:
349
+ return {
350
+ "success": False,
351
+ "message": f"Unexpected error: {e}",
352
+ "lakehouse": None,
353
+ "created": False
354
+ }
355
+
356
+
357
+ if __name__ == "__main__":
358
+ # Uncomment to run the example
359
+ # main()
360
+ pass
361
+
362
+
363
+ # Usage Examples:
364
+ """
365
+ # Example 1: In a Fabric Notebook (simplest approach)
366
+ from duckrun.lakehouse import create_lakehouse_in_notebook
367
+
368
+ result = create_lakehouse_in_notebook("MyNewLakehouse")
369
+ if result == 1:
370
+ print("Lakehouse created or already exists!")
371
+
372
+ # Example 2: In a Fabric Notebook with explicit token
373
+ import notebookutils
374
+ from duckrun.lakehouse import create_lakehouse_simple
375
+
376
+ token = notebookutils.credentials.getToken("https://api.fabric.microsoft.com/.default")
377
+ workspace_id = notebookutils.runtime.context.get("workspaceId")
378
+
379
+ result = create_lakehouse_simple("MyLakehouse", token, workspace_id)
380
+ print(f"Result: {result['message']}")
381
+
382
+ # Example 3: Outside Fabric (requires azure-identity package)
383
+ from duckrun.lakehouse import FabricLakehouseManager, get_fabric_token
384
+
385
+ token = get_fabric_token()
386
+ manager = FabricLakehouseManager(token)
387
+ result = manager.create_lakehouse_if_not_exists("MyLakehouse", workspace_name="MyWorkspace")
388
+
389
+ # Example 4: With explicit workspace and lakehouse details
390
+ from duckrun.lakehouse import FabricLakehouseManager
391
+
392
+ # Get your token however you prefer
393
+ token = "your_bearer_token_here"
394
+ manager = FabricLakehouseManager(token)
395
+
396
+ # Create lakehouse in specific workspace
397
+ workspace_id = manager.get_workspace_id("Production Workspace")
398
+ lakehouse = manager.create_lakehouse("DataLake2024", workspace_id, enable_schemas=True)
399
+
400
+ if lakehouse:
401
+ print(f"Created lakehouse with ID: {lakehouse['id']}")
402
+ """
duckrun/runner.py CHANGED
@@ -235,11 +235,28 @@ def _read_sql_file(duckrun_instance, table_name: str, params: Optional[Dict] = N
235
235
  print(f"SQL file is empty: {table_name}.sql")
236
236
  return None
237
237
 
238
+ import re
239
+ # Determine if lakehouse_name is a GUID
240
+ guid_pattern = re.compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', re.IGNORECASE)
241
+ lakehouse_is_guid = bool(guid_pattern.match(duckrun_instance.lakehouse_name))
242
+
243
+ # Smart substitution for ${lh}.Lakehouse
244
+ # If template contains ${lh}.Lakehouse, replace with correct value
245
+ if '${lh}.Lakehouse' in content:
246
+ if lakehouse_is_guid:
247
+ # If GUID, use just the GUID
248
+ content = content.replace('${lh}.Lakehouse', duckrun_instance.lakehouse_name)
249
+ else:
250
+ # If not GUID, use legacy format
251
+ content = content.replace('${lh}.Lakehouse', f'{duckrun_instance.lakehouse_name}.Lakehouse')
252
+
238
253
  full_params = {
239
254
  'ws': duckrun_instance.workspace,
240
255
  'lh': duckrun_instance.lakehouse_name,
241
256
  'schema': duckrun_instance.schema,
242
- 'storage_account': duckrun_instance.storage_account
257
+ 'storage_account': duckrun_instance.storage_account,
258
+ 'tables_url': duckrun_instance.table_base_url,
259
+ 'files_url': duckrun_instance.files_base_url
243
260
  }
244
261
  if params:
245
262
  full_params.update(params)
@@ -247,6 +264,10 @@ def _read_sql_file(duckrun_instance, table_name: str, params: Optional[Dict] = N
247
264
  try:
248
265
  template = Template(content)
249
266
  content = template.substitute(full_params)
267
+ # After substitution, remove .Lakehouse if it follows a GUID in any ABFSS URL
268
+ import re
269
+ # Pattern: GUID.Lakehouse or GUID.lakehouse (in URLs)
270
+ content = re.sub(r'([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})\.(Lakehouse|lakehouse)', r'\1', content)
250
271
  except KeyError as e:
251
272
  print(f"Missing parameter in SQL file: ${e}")
252
273
  return None
duckrun/stats.py CHANGED
@@ -142,8 +142,8 @@ def get_stats(duckrun_instance, source: str):
142
142
  print(f"Processing {len(list_tables)} tables: {list_tables}")
143
143
 
144
144
  for idx, tbl in enumerate(list_tables):
145
- # Construct lakehouse path using ABFSS URL
146
- table_path = f"abfss://{duckrun_instance.workspace}@{duckrun_instance.storage_account}.dfs.fabric.microsoft.com/{duckrun_instance.lakehouse_name}.Lakehouse/Tables/{schema_name}/{tbl}"
145
+ # Construct lakehouse path using correct ABFSS URL format (no .Lakehouse suffix)
146
+ table_path = f"{duckrun_instance.table_base_url}{schema_name}/{tbl}"
147
147
 
148
148
  try:
149
149
  dt = DeltaTable(table_path)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: duckrun
3
- Version: 0.2.4
3
+ Version: 0.2.5.dev2
4
4
  Summary: Lakehouse task runner powered by DuckDB for Microsoft Fabric
5
5
  Author: mim
6
6
  License: MIT
@@ -0,0 +1,12 @@
1
+ duckrun/__init__.py,sha256=XA85pL2vK1AkmBic8e7WxeqNvcd6SjFX4zsQpImDO6E,230
2
+ duckrun/core.py,sha256=UgBE90zTFvnieTrUEb4tDA2cWSwFfh24M_e46FTmFvg,38345
3
+ duckrun/files.py,sha256=piWRU5w9jHrW-wuV4Gf-SKY_jhFv9eflxgWO8AZCQTI,10495
4
+ duckrun/lakehouse.py,sha256=j--Z3zo8AOWt1GF9VzRosmmTAy6ey2D0LVubti58twU,14109
5
+ duckrun/runner.py,sha256=lfwNoU1CZXh6bPTHvGWVaUWjzG5crvT7Pzq4onMEVjw,12576
6
+ duckrun/stats.py,sha256=2FTqoQNVjD84-H1HjStHxZkOpAGKXS79M55B00pOlok,9804
7
+ duckrun/writer.py,sha256=eWrGtDQTbXi8H3sSt2WucYTdEQUjK97KmQxzCbqAuMs,6221
8
+ duckrun-0.2.5.dev2.dist-info/licenses/LICENSE,sha256=-DeQQwdbCbkB4507ZF3QbocysB-EIjDtaLexvqRkGZc,1083
9
+ duckrun-0.2.5.dev2.dist-info/METADATA,sha256=wiB12-pG_jlyUbghVNMyE3KHcwt8GFJd04VNZ2C4VwE,18344
10
+ duckrun-0.2.5.dev2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
11
+ duckrun-0.2.5.dev2.dist-info/top_level.txt,sha256=BknMEwebbUHrVAp3SC92ps8MPhK7XSYsaogTvi_DmEU,8
12
+ duckrun-0.2.5.dev2.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- duckrun/__init__.py,sha256=L0jRtD9Ld8Ti4e6GRvPDdHvkQCFAPHM43GSP7ARh6EM,241
2
- duckrun/core.py,sha256=m_9DuSZNZ5DOETnkjNGn8HJBYheCgs_7NewcbM9VECI,16500
3
- duckrun/files.py,sha256=xba0juMEQPgaznDudmXcwaGH0wv-6aCoHmV_cNF6Y7I,10665
4
- duckrun/runner.py,sha256=X5g-57OCHQZ7USKpcBbhYGUcZwLQny2x147DLKrV32c,11417
5
- duckrun/stats.py,sha256=B9UfGOndRNfcB2AhOVjuSqgfmF2x-uRmdmBn3usx_jQ,9881
6
- duckrun/writer.py,sha256=eWrGtDQTbXi8H3sSt2WucYTdEQUjK97KmQxzCbqAuMs,6221
7
- duckrun-0.2.4.dist-info/licenses/LICENSE,sha256=-DeQQwdbCbkB4507ZF3QbocysB-EIjDtaLexvqRkGZc,1083
8
- duckrun-0.2.4.dist-info/METADATA,sha256=2t7-pNzcPCeseXTjp6Bc18_V41MpjDarG0z-2IzY-Lk,18339
9
- duckrun-0.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
- duckrun-0.2.4.dist-info/top_level.txt,sha256=BknMEwebbUHrVAp3SC92ps8MPhK7XSYsaogTvi_DmEU,8
11
- duckrun-0.2.4.dist-info/RECORD,,