duckrun 0.2.9.dev3__tar.gz → 0.2.9.dev5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/PKG-INFO +1 -1
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/__init__.py +1 -1
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/semantic_model.py +71 -39
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun.egg-info/PKG-INFO +1 -1
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/pyproject.toml +1 -1
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/LICENSE +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/README.md +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/auth.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/core.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/files.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/lakehouse.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/runner.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/stats.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun/writer.py +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun.egg-info/SOURCES.txt +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun.egg-info/dependency_links.txt +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun.egg-info/requires.txt +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/duckrun.egg-info/top_level.txt +0 -0
- {duckrun-0.2.9.dev3 → duckrun-0.2.9.dev5}/setup.cfg +0 -0
@@ -129,9 +129,12 @@ def check_dataset_exists(dataset_name, workspace_id, client):
|
|
129
129
|
return False
|
130
130
|
|
131
131
|
|
132
|
-
def refresh_dataset(dataset_name, workspace_id, client):
|
133
|
-
"""Refresh a dataset and monitor progress"""
|
134
|
-
|
132
|
+
def refresh_dataset(dataset_name, workspace_id, client, dataset_id=None):
|
133
|
+
"""Refresh a dataset and monitor progress using Power BI API"""
|
134
|
+
|
135
|
+
# If dataset_id not provided, look it up by name
|
136
|
+
if not dataset_id:
|
137
|
+
dataset_id = get_dataset_id(dataset_name, workspace_id, client)
|
135
138
|
|
136
139
|
payload = {
|
137
140
|
"type": "full",
|
@@ -141,39 +144,46 @@ def refresh_dataset(dataset_name, workspace_id, client):
|
|
141
144
|
"objects": []
|
142
145
|
}
|
143
146
|
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
147
|
+
# Use Power BI API for refresh (not Fabric API)
|
148
|
+
powerbi_url = f"https://api.powerbi.com/v1.0/myorg/datasets/{dataset_id}/refreshes"
|
149
|
+
headers = client._get_headers()
|
150
|
+
|
151
|
+
response = requests.post(powerbi_url, headers=headers, json=payload)
|
148
152
|
|
149
153
|
if response.status_code in [200, 202]:
|
150
154
|
print(f"✓ Refresh initiated")
|
151
155
|
|
152
|
-
refresh_id
|
153
|
-
if
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
156
|
+
# For 202, get the refresh_id from the Location header
|
157
|
+
if response.status_code == 202:
|
158
|
+
location = response.headers.get('Location')
|
159
|
+
if location:
|
160
|
+
refresh_id = location.split('/')[-1]
|
161
|
+
print(" Monitoring refresh progress...")
|
162
|
+
max_attempts = 60
|
163
|
+
for attempt in range(max_attempts):
|
164
|
+
time.sleep(5)
|
165
|
+
|
166
|
+
# Check refresh status using Power BI API
|
167
|
+
status_url = f"https://api.powerbi.com/v1.0/myorg/datasets/{dataset_id}/refreshes/{refresh_id}"
|
168
|
+
status_response = requests.get(status_url, headers=headers)
|
169
|
+
status_response.raise_for_status()
|
170
|
+
status = status_response.json().get('status')
|
171
|
+
|
172
|
+
if status == 'Completed':
|
173
|
+
print(f"✓ Refresh completed successfully")
|
174
|
+
return
|
175
|
+
elif status == 'Failed':
|
176
|
+
error = status_response.json().get('serviceExceptionJson', '')
|
177
|
+
raise Exception(f"Refresh failed: {error}")
|
178
|
+
elif status == 'Cancelled':
|
179
|
+
raise Exception("Refresh was cancelled")
|
180
|
+
|
181
|
+
if attempt % 6 == 0:
|
182
|
+
print(f" Status: {status}...")
|
172
183
|
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
raise Exception(f"Refresh timed out")
|
184
|
+
raise Exception(f"Refresh timed out")
|
185
|
+
else:
|
186
|
+
response.raise_for_status()
|
177
187
|
|
178
188
|
|
179
189
|
def download_bim_from_github(url):
|
@@ -246,7 +256,7 @@ def update_bim_for_directlake(bim_content, workspace_id, lakehouse_id, schema_na
|
|
246
256
|
|
247
257
|
|
248
258
|
def create_dataset_from_bim(dataset_name, bim_content, workspace_id, client):
|
249
|
-
"""Create semantic model from BIM using Fabric REST API"""
|
259
|
+
"""Create semantic model from BIM using Fabric REST API and return the dataset ID"""
|
250
260
|
# Convert to base64
|
251
261
|
bim_json = json.dumps(bim_content, indent=2)
|
252
262
|
bim_base64 = base64.b64encode(bim_json.encode('utf-8')).decode('utf-8')
|
@@ -280,7 +290,7 @@ def create_dataset_from_bim(dataset_name, bim_content, workspace_id, client):
|
|
280
290
|
|
281
291
|
print(f"✓ Semantic model created")
|
282
292
|
|
283
|
-
# Handle long-running operation
|
293
|
+
# Handle long-running operation and return the dataset ID
|
284
294
|
if response.status_code == 202:
|
285
295
|
operation_id = response.headers.get('x-ms-operation-id')
|
286
296
|
print(f" Waiting for operation to complete...")
|
@@ -288,17 +298,38 @@ def create_dataset_from_bim(dataset_name, bim_content, workspace_id, client):
|
|
288
298
|
max_attempts = 30
|
289
299
|
for attempt in range(max_attempts):
|
290
300
|
time.sleep(2)
|
301
|
+
|
302
|
+
# Get operation result (not just status)
|
303
|
+
result_response = client.get(f"/v1/operations/{operation_id}/result")
|
304
|
+
|
305
|
+
# Check if operation is complete by getting the status
|
291
306
|
status_response = client.get(f"/v1/operations/{operation_id}")
|
292
307
|
status = status_response.json().get('status')
|
293
308
|
|
294
309
|
if status == 'Succeeded':
|
295
310
|
print(f"✓ Operation completed")
|
296
|
-
|
311
|
+
# Return the created dataset ID from the result
|
312
|
+
result_data = result_response.json()
|
313
|
+
dataset_id = result_data.get('id')
|
314
|
+
if dataset_id:
|
315
|
+
return dataset_id
|
316
|
+
else:
|
317
|
+
# Fallback: search for the dataset by name
|
318
|
+
return get_dataset_id(dataset_name, workspace_id, client)
|
297
319
|
elif status == 'Failed':
|
298
320
|
error = status_response.json().get('error', {})
|
299
321
|
raise Exception(f"Operation failed: {error.get('message')}")
|
300
322
|
elif attempt == max_attempts - 1:
|
301
323
|
raise Exception(f"Operation timed out")
|
324
|
+
|
325
|
+
# For non-async responses (status 200/201)
|
326
|
+
result_data = response.json()
|
327
|
+
dataset_id = result_data.get('id')
|
328
|
+
if dataset_id:
|
329
|
+
return dataset_id
|
330
|
+
else:
|
331
|
+
# Fallback: search for the dataset by name
|
332
|
+
return get_dataset_id(dataset_name, workspace_id, client)
|
302
333
|
|
303
334
|
|
304
335
|
def deploy_semantic_model(workspace_name_or_id, lakehouse_name_or_id, schema_name, dataset_name,
|
@@ -365,17 +396,18 @@ def deploy_semantic_model(workspace_name_or_id, lakehouse_name_or_id, schema_nam
|
|
365
396
|
modified_bim['name'] = dataset_name
|
366
397
|
modified_bim['id'] = dataset_name
|
367
398
|
|
368
|
-
# Step 5: Deploy
|
399
|
+
# Step 5: Deploy and get the dataset ID
|
369
400
|
print("\n[Step 5/6] Deploying semantic model...")
|
370
|
-
create_dataset_from_bim(dataset_name, modified_bim, workspace_id, client)
|
401
|
+
dataset_id = create_dataset_from_bim(dataset_name, modified_bim, workspace_id, client)
|
402
|
+
print(f" Dataset ID: {dataset_id}")
|
371
403
|
|
372
404
|
if wait_seconds > 0:
|
373
|
-
print(f" Waiting {wait_seconds} seconds
|
405
|
+
print(f" Waiting {wait_seconds} seconds before refresh...")
|
374
406
|
time.sleep(wait_seconds)
|
375
407
|
|
376
|
-
# Step 6: Refresh
|
408
|
+
# Step 6: Refresh using the dataset ID returned from creation
|
377
409
|
print("\n[Step 6/6] Refreshing semantic model...")
|
378
|
-
refresh_dataset(dataset_name, workspace_id, client)
|
410
|
+
refresh_dataset(dataset_name, workspace_id, client, dataset_id=dataset_id)
|
379
411
|
|
380
412
|
print("\n" + "=" * 70)
|
381
413
|
print("🎉 Deployment Completed!")
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|