duckrun 0.2.9.dev2__tar.gz → 0.2.9.dev4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/PKG-INFO +1 -1
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/__init__.py +1 -1
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/core.py +2 -2
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/semantic_model.py +35 -10
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun.egg-info/PKG-INFO +1 -1
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/pyproject.toml +1 -1
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/LICENSE +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/README.md +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/auth.py +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/files.py +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/lakehouse.py +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/runner.py +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/stats.py +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun/writer.py +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun.egg-info/SOURCES.txt +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun.egg-info/dependency_links.txt +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun.egg-info/requires.txt +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/duckrun.egg-info/top_level.txt +0 -0
- {duckrun-0.2.9.dev2 → duckrun-0.2.9.dev4}/setup.cfg +0 -0
@@ -727,8 +727,8 @@ class Duckrun:
|
|
727
727
|
|
728
728
|
# Call the deployment function (DirectLake only)
|
729
729
|
return deploy_semantic_model(
|
730
|
-
|
731
|
-
|
730
|
+
workspace_name_or_id=self.workspace,
|
731
|
+
lakehouse_name_or_id=self.lakehouse_name,
|
732
732
|
schema_name=self.schema,
|
733
733
|
dataset_name=dataset_name,
|
734
734
|
bim_url=bim_url,
|
@@ -129,9 +129,12 @@ def check_dataset_exists(dataset_name, workspace_id, client):
|
|
129
129
|
return False
|
130
130
|
|
131
131
|
|
132
|
-
def refresh_dataset(dataset_name, workspace_id, client):
|
132
|
+
def refresh_dataset(dataset_name, workspace_id, client, dataset_id=None):
|
133
133
|
"""Refresh a dataset and monitor progress"""
|
134
|
-
|
134
|
+
|
135
|
+
# If dataset_id not provided, look it up by name
|
136
|
+
if not dataset_id:
|
137
|
+
dataset_id = get_dataset_id(dataset_name, workspace_id, client)
|
135
138
|
|
136
139
|
payload = {
|
137
140
|
"type": "full",
|
@@ -246,7 +249,7 @@ def update_bim_for_directlake(bim_content, workspace_id, lakehouse_id, schema_na
|
|
246
249
|
|
247
250
|
|
248
251
|
def create_dataset_from_bim(dataset_name, bim_content, workspace_id, client):
|
249
|
-
"""Create semantic model from BIM using Fabric REST API"""
|
252
|
+
"""Create semantic model from BIM using Fabric REST API and return the dataset ID"""
|
250
253
|
# Convert to base64
|
251
254
|
bim_json = json.dumps(bim_content, indent=2)
|
252
255
|
bim_base64 = base64.b64encode(bim_json.encode('utf-8')).decode('utf-8')
|
@@ -280,7 +283,7 @@ def create_dataset_from_bim(dataset_name, bim_content, workspace_id, client):
|
|
280
283
|
|
281
284
|
print(f"✓ Semantic model created")
|
282
285
|
|
283
|
-
# Handle long-running operation
|
286
|
+
# Handle long-running operation and return the dataset ID
|
284
287
|
if response.status_code == 202:
|
285
288
|
operation_id = response.headers.get('x-ms-operation-id')
|
286
289
|
print(f" Waiting for operation to complete...")
|
@@ -288,17 +291,38 @@ def create_dataset_from_bim(dataset_name, bim_content, workspace_id, client):
|
|
288
291
|
max_attempts = 30
|
289
292
|
for attempt in range(max_attempts):
|
290
293
|
time.sleep(2)
|
294
|
+
|
295
|
+
# Get operation result (not just status)
|
296
|
+
result_response = client.get(f"/v1/operations/{operation_id}/result")
|
297
|
+
|
298
|
+
# Check if operation is complete by getting the status
|
291
299
|
status_response = client.get(f"/v1/operations/{operation_id}")
|
292
300
|
status = status_response.json().get('status')
|
293
301
|
|
294
302
|
if status == 'Succeeded':
|
295
303
|
print(f"✓ Operation completed")
|
296
|
-
|
304
|
+
# Return the created dataset ID from the result
|
305
|
+
result_data = result_response.json()
|
306
|
+
dataset_id = result_data.get('id')
|
307
|
+
if dataset_id:
|
308
|
+
return dataset_id
|
309
|
+
else:
|
310
|
+
# Fallback: search for the dataset by name
|
311
|
+
return get_dataset_id(dataset_name, workspace_id, client)
|
297
312
|
elif status == 'Failed':
|
298
313
|
error = status_response.json().get('error', {})
|
299
314
|
raise Exception(f"Operation failed: {error.get('message')}")
|
300
315
|
elif attempt == max_attempts - 1:
|
301
316
|
raise Exception(f"Operation timed out")
|
317
|
+
|
318
|
+
# For non-async responses (status 200/201)
|
319
|
+
result_data = response.json()
|
320
|
+
dataset_id = result_data.get('id')
|
321
|
+
if dataset_id:
|
322
|
+
return dataset_id
|
323
|
+
else:
|
324
|
+
# Fallback: search for the dataset by name
|
325
|
+
return get_dataset_id(dataset_name, workspace_id, client)
|
302
326
|
|
303
327
|
|
304
328
|
def deploy_semantic_model(workspace_name_or_id, lakehouse_name_or_id, schema_name, dataset_name,
|
@@ -365,17 +389,18 @@ def deploy_semantic_model(workspace_name_or_id, lakehouse_name_or_id, schema_nam
|
|
365
389
|
modified_bim['name'] = dataset_name
|
366
390
|
modified_bim['id'] = dataset_name
|
367
391
|
|
368
|
-
# Step 5: Deploy
|
392
|
+
# Step 5: Deploy and get the dataset ID
|
369
393
|
print("\n[Step 5/6] Deploying semantic model...")
|
370
|
-
create_dataset_from_bim(dataset_name, modified_bim, workspace_id, client)
|
394
|
+
dataset_id = create_dataset_from_bim(dataset_name, modified_bim, workspace_id, client)
|
395
|
+
print(f" Dataset ID: {dataset_id}")
|
371
396
|
|
372
397
|
if wait_seconds > 0:
|
373
|
-
print(f" Waiting {wait_seconds} seconds
|
398
|
+
print(f" Waiting {wait_seconds} seconds before refresh...")
|
374
399
|
time.sleep(wait_seconds)
|
375
400
|
|
376
|
-
# Step 6: Refresh
|
401
|
+
# Step 6: Refresh using the dataset ID returned from creation
|
377
402
|
print("\n[Step 6/6] Refreshing semantic model...")
|
378
|
-
refresh_dataset(dataset_name, workspace_id, client)
|
403
|
+
refresh_dataset(dataset_name, workspace_id, client, dataset_id=dataset_id)
|
379
404
|
|
380
405
|
print("\n" + "=" * 70)
|
381
406
|
print("🎉 Deployment Completed!")
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|