superset-showtime 0.4.9__tar.gz → 0.5.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of superset-showtime might be problematic. Click here for more details.
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/.claude/settings.local.json +2 -1
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/PKG-INFO +1 -1
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/__init__.py +1 -1
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/cli.py +125 -1
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/aws.py +62 -14
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/pull_request.py +80 -28
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/show.py +9 -9
- superset_showtime-0.5.1/tests/unit/test_sha_specific_logic.py +146 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/.gitignore +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/.pre-commit-config.yaml +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/CLAUDE.md +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/Makefile +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/README.md +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/dev-setup.sh +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/pypi-push.sh +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/pyproject.toml +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/requirements-dev.txt +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/requirements.txt +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/__main__.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/__init__.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/emojis.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/github.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/github_messages.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/core/label_colors.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/showtime/data/ecs-task-definition.json +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/tests/__init__.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/tests/unit/__init__.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/tests/unit/test_label_transitions.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/tests/unit/test_pull_request.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/tests/unit/test_show.py +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/uv.lock +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/workflows-reference/showtime-cleanup.yml +0 -0
- {superset_showtime-0.4.9 → superset_showtime-0.5.1}/workflows-reference/showtime-trigger.yml +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: superset-showtime
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.1
|
|
4
4
|
Summary: 🎪 Apache Superset ephemeral environment management with circus tent emoji state tracking
|
|
5
5
|
Project-URL: Homepage, https://github.com/apache/superset-showtime
|
|
6
6
|
Project-URL: Documentation, https://superset-showtime.readthedocs.io/
|
|
@@ -319,8 +319,10 @@ def list(
|
|
|
319
319
|
# Create table with full terminal width
|
|
320
320
|
table = Table(title="🎪 Environment List", expand=True)
|
|
321
321
|
table.add_column("PR", style="cyan", min_width=6)
|
|
322
|
+
table.add_column("Type", style="white", min_width=8)
|
|
322
323
|
table.add_column("Status", style="white", min_width=12)
|
|
323
324
|
table.add_column("SHA", style="green", min_width=11)
|
|
325
|
+
table.add_column("Created", style="dim white", min_width=12)
|
|
324
326
|
table.add_column("Superset URL", style="blue", min_width=25)
|
|
325
327
|
table.add_column("AWS Logs", style="dim blue", min_width=15)
|
|
326
328
|
table.add_column("TTL", style="yellow", min_width=6)
|
|
@@ -328,9 +330,23 @@ def list(
|
|
|
328
330
|
|
|
329
331
|
status_emoji = STATUS_DISPLAY
|
|
330
332
|
|
|
331
|
-
|
|
333
|
+
# Sort by PR number, then by show type (active first, then building, then orphaned)
|
|
334
|
+
type_priority = {"active": 1, "building": 2, "orphaned": 3}
|
|
335
|
+
sorted_envs = sorted(filtered_envs, key=lambda e: (e["pr_number"], type_priority.get(e["show"].get("show_type", "orphaned"), 3)))
|
|
336
|
+
|
|
337
|
+
for env in sorted_envs:
|
|
332
338
|
show_data = env["show"]
|
|
333
339
|
pr_number = env["pr_number"]
|
|
340
|
+
|
|
341
|
+
# Show type with appropriate styling (using single-width chars for alignment)
|
|
342
|
+
show_type = show_data.get("show_type", "orphaned")
|
|
343
|
+
if show_type == "active":
|
|
344
|
+
type_display = "* active"
|
|
345
|
+
elif show_type == "building":
|
|
346
|
+
type_display = "# building"
|
|
347
|
+
else:
|
|
348
|
+
type_display = "! orphaned"
|
|
349
|
+
|
|
334
350
|
# Make Superset URL clickable and show full URL
|
|
335
351
|
if show_data["ip"]:
|
|
336
352
|
full_url = f"http://{show_data['ip']}:8080"
|
|
@@ -348,10 +364,22 @@ def list(
|
|
|
348
364
|
pr_url = f"https://github.com/apache/superset/pull/{pr_number}"
|
|
349
365
|
clickable_pr = f"[link={pr_url}]{pr_number}[/link]"
|
|
350
366
|
|
|
367
|
+
# Format creation time for display
|
|
368
|
+
created_display = show_data.get("created_at", "-")
|
|
369
|
+
if created_display and created_display != "-":
|
|
370
|
+
# Convert 2025-08-25T05-18 to more readable format
|
|
371
|
+
try:
|
|
372
|
+
parts = created_display.replace("T", " ").replace("-", ":")
|
|
373
|
+
created_display = parts[-8:] # Show just HH:MM:SS
|
|
374
|
+
except:
|
|
375
|
+
pass # Keep original if parsing fails
|
|
376
|
+
|
|
351
377
|
table.add_row(
|
|
352
378
|
clickable_pr,
|
|
379
|
+
type_display,
|
|
353
380
|
f"{status_emoji.get(show_data['status'], '❓')} {show_data['status']}",
|
|
354
381
|
show_data["sha"],
|
|
382
|
+
created_display,
|
|
355
383
|
superset_url,
|
|
356
384
|
aws_logs_link,
|
|
357
385
|
show_data["ttl"],
|
|
@@ -615,6 +643,102 @@ def setup_labels(
|
|
|
615
643
|
p(f"🎪 [bold red]Error setting up labels:[/bold red] {e}")
|
|
616
644
|
|
|
617
645
|
|
|
646
|
+
@app.command()
|
|
647
|
+
def aws_cleanup(
|
|
648
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be cleaned"),
|
|
649
|
+
force: bool = typer.Option(False, "--force", help="Delete all showtime AWS resources"),
|
|
650
|
+
) -> None:
|
|
651
|
+
"""🧹 Clean up orphaned AWS resources without GitHub labels"""
|
|
652
|
+
try:
|
|
653
|
+
from .core.aws import AWSInterface
|
|
654
|
+
|
|
655
|
+
aws = AWSInterface()
|
|
656
|
+
|
|
657
|
+
p("🔍 [bold blue]Scanning for orphaned AWS resources...[/bold blue]")
|
|
658
|
+
|
|
659
|
+
# 1. Get all GitHub PRs with circus labels
|
|
660
|
+
github_services = set()
|
|
661
|
+
try:
|
|
662
|
+
all_pr_numbers = PullRequest.find_all_with_environments()
|
|
663
|
+
p(f"📋 Found {len(all_pr_numbers)} PRs with circus labels:")
|
|
664
|
+
|
|
665
|
+
for pr_number in all_pr_numbers:
|
|
666
|
+
pr = PullRequest.from_id(pr_number)
|
|
667
|
+
p(f" 🎪 PR #{pr_number}: {len(pr.shows)} shows, {len(pr.circus_labels)} circus labels")
|
|
668
|
+
|
|
669
|
+
for show in pr.shows:
|
|
670
|
+
service_name = show.ecs_service_name
|
|
671
|
+
github_services.add(service_name)
|
|
672
|
+
p(f" 📝 Expected service: {service_name}")
|
|
673
|
+
|
|
674
|
+
# Show labels for debugging
|
|
675
|
+
if not pr.shows:
|
|
676
|
+
p(f" ⚠️ No shows found, labels: {pr.circus_labels[:3]}...") # First 3 labels
|
|
677
|
+
|
|
678
|
+
except Exception as e:
|
|
679
|
+
p(f"⚠️ GitHub scan failed: {e}")
|
|
680
|
+
github_services = set()
|
|
681
|
+
|
|
682
|
+
# 2. Get all AWS ECS services matching showtime pattern
|
|
683
|
+
p("\n☁️ [bold blue]Scanning AWS ECS services...[/bold blue]")
|
|
684
|
+
try:
|
|
685
|
+
aws_services = aws.find_showtime_services()
|
|
686
|
+
p(f"🔍 Found {len(aws_services)} AWS services with pr-* pattern")
|
|
687
|
+
|
|
688
|
+
for service in aws_services:
|
|
689
|
+
p(f" ☁️ AWS: {service}")
|
|
690
|
+
except Exception as e:
|
|
691
|
+
p(f"❌ AWS scan failed: {e}")
|
|
692
|
+
return
|
|
693
|
+
|
|
694
|
+
# 3. Find orphaned services
|
|
695
|
+
orphaned = [service for service in aws_services if service not in github_services]
|
|
696
|
+
|
|
697
|
+
if not orphaned:
|
|
698
|
+
p("\n✅ [bold green]No orphaned AWS resources found![/bold green]")
|
|
699
|
+
return
|
|
700
|
+
|
|
701
|
+
p(f"\n🚨 [bold red]Found {len(orphaned)} orphaned AWS resources:[/bold red]")
|
|
702
|
+
for service in orphaned:
|
|
703
|
+
p(f" 💰 {service} (consuming resources)")
|
|
704
|
+
|
|
705
|
+
if dry_run:
|
|
706
|
+
p(f"\n🎪 [bold yellow]DRY RUN[/bold yellow] - Would delete {len(orphaned)} services")
|
|
707
|
+
return
|
|
708
|
+
|
|
709
|
+
if not force:
|
|
710
|
+
confirm = typer.confirm(f"Delete {len(orphaned)} orphaned AWS services?")
|
|
711
|
+
if not confirm:
|
|
712
|
+
p("🎪 Cancelled")
|
|
713
|
+
return
|
|
714
|
+
|
|
715
|
+
# 4. Delete orphaned resources
|
|
716
|
+
deleted_count = 0
|
|
717
|
+
for service in orphaned:
|
|
718
|
+
p(f"🗑️ Deleting {service}...")
|
|
719
|
+
try:
|
|
720
|
+
# Extract PR number for delete_environment call
|
|
721
|
+
pr_match = service.replace("pr-", "").replace("-service", "")
|
|
722
|
+
parts = pr_match.split("-")
|
|
723
|
+
if len(parts) >= 2:
|
|
724
|
+
pr_number = int(parts[0])
|
|
725
|
+
success = aws.delete_environment(service, pr_number)
|
|
726
|
+
if success:
|
|
727
|
+
p(f"✅ Deleted {service}")
|
|
728
|
+
deleted_count += 1
|
|
729
|
+
else:
|
|
730
|
+
p(f"❌ Failed to delete {service}")
|
|
731
|
+
else:
|
|
732
|
+
p(f"❌ Invalid service name format: {service}")
|
|
733
|
+
except Exception as e:
|
|
734
|
+
p(f"❌ Error deleting {service}: {e}")
|
|
735
|
+
|
|
736
|
+
p(f"\n🎪 ✅ Cleanup complete: deleted {deleted_count}/{len(orphaned)} services")
|
|
737
|
+
|
|
738
|
+
except Exception as e:
|
|
739
|
+
p(f"❌ AWS cleanup failed: {e}")
|
|
740
|
+
|
|
741
|
+
|
|
618
742
|
@app.command()
|
|
619
743
|
def cleanup(
|
|
620
744
|
dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be cleaned"),
|
|
@@ -175,36 +175,62 @@ class AWSInterface:
|
|
|
175
175
|
|
|
176
176
|
def delete_environment(self, service_name: str, pr_number: int) -> bool:
|
|
177
177
|
"""
|
|
178
|
-
Delete ephemeral environment
|
|
178
|
+
Delete ephemeral environment with proper verification
|
|
179
179
|
|
|
180
180
|
Steps:
|
|
181
|
-
1. Check if ECS service exists
|
|
182
|
-
2. Delete ECS service with
|
|
181
|
+
1. Check if ECS service exists
|
|
182
|
+
2. Delete ECS service with force and wait for completion
|
|
183
183
|
3. Delete ECR image tag
|
|
184
|
+
4. Verify deletion completed
|
|
184
185
|
"""
|
|
185
186
|
try:
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
187
|
+
ecs_service_name = f"{service_name}-service" if not service_name.endswith("-service") else service_name
|
|
188
|
+
print(f"🗑️ Deleting ECS service: {ecs_service_name}")
|
|
189
|
+
|
|
190
|
+
# Step 1: Check if service exists
|
|
191
|
+
if not self._service_exists(ecs_service_name):
|
|
192
|
+
print(f"✅ Service {ecs_service_name} already deleted")
|
|
193
|
+
return True
|
|
194
|
+
|
|
195
|
+
# Step 2: Delete ECS service (force delete) and wait
|
|
196
|
+
print(f"☁️ Initiating ECS service deletion...")
|
|
197
|
+
delete_response = self.ecs_client.delete_service(
|
|
198
|
+
cluster=self.cluster,
|
|
199
|
+
service=ecs_service_name,
|
|
200
|
+
force=True
|
|
201
|
+
)
|
|
202
|
+
print(f"🔄 Delete initiated: {delete_response.get('service', {}).get('status', 'unknown')}")
|
|
203
|
+
|
|
204
|
+
# Step 3: Wait for deletion to complete (crucial!)
|
|
205
|
+
print(f"⏳ Waiting for service deletion to complete...")
|
|
206
|
+
deletion_success = self._wait_for_service_deletion(ecs_service_name, timeout_minutes=10)
|
|
207
|
+
|
|
208
|
+
if not deletion_success:
|
|
209
|
+
print(f"⚠️ Service deletion timeout - service may still exist")
|
|
210
|
+
return False
|
|
192
211
|
|
|
193
|
-
# Step
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
212
|
+
# Step 4: Delete ECR image tag
|
|
213
|
+
print(f"🐳 Cleaning up Docker image...")
|
|
214
|
+
# Fix SHA extraction: pr-34831-ac533ec-service → ac533ec
|
|
215
|
+
# Remove "pr-" prefix and "-service" suffix, then get SHA (last part)
|
|
216
|
+
base_name = service_name.replace("pr-", "").replace("-service", "")
|
|
217
|
+
parts = base_name.split("-")
|
|
218
|
+
sha = parts[-1] if len(parts) > 1 else base_name # Last part is SHA
|
|
219
|
+
image_tag = f"pr-{pr_number}-{sha}-ci"
|
|
197
220
|
|
|
198
221
|
try:
|
|
199
222
|
self.ecr_client.batch_delete_image(
|
|
200
223
|
repositoryName=self.repository, imageIds=[{"imageTag": image_tag}]
|
|
201
224
|
)
|
|
225
|
+
print(f"✅ Deleted ECR image: {image_tag}")
|
|
202
226
|
except self.ecr_client.exceptions.ImageNotFoundException:
|
|
203
|
-
|
|
227
|
+
print(f"ℹ️ ECR image {image_tag} already deleted")
|
|
204
228
|
|
|
229
|
+
print(f"✅ Environment {service_name} fully deleted")
|
|
205
230
|
return True
|
|
206
231
|
|
|
207
232
|
except Exception as e:
|
|
233
|
+
print(f"❌ AWS deletion failed: {e}")
|
|
208
234
|
raise AWSError(
|
|
209
235
|
message=str(e), operation="delete_environment", resource=service_name
|
|
210
236
|
) from e
|
|
@@ -670,6 +696,28 @@ class AWSInterface:
|
|
|
670
696
|
print(f"❌ Failed to find expired services: {e}")
|
|
671
697
|
return []
|
|
672
698
|
|
|
699
|
+
def find_showtime_services(self) -> List[str]:
|
|
700
|
+
"""Find all ECS services managed by showtime (pr-* pattern)"""
|
|
701
|
+
try:
|
|
702
|
+
# List all services in cluster
|
|
703
|
+
response = self.ecs_client.list_services(cluster=self.cluster)
|
|
704
|
+
|
|
705
|
+
if not response.get("serviceArns"):
|
|
706
|
+
return []
|
|
707
|
+
|
|
708
|
+
# Extract service names and filter for showtime pattern
|
|
709
|
+
showtime_services = []
|
|
710
|
+
for service_arn in response["serviceArns"]:
|
|
711
|
+
service_name = service_arn.split("/")[-1] # Extract name from ARN
|
|
712
|
+
if service_name.startswith("pr-") and "-service" in service_name:
|
|
713
|
+
showtime_services.append(service_name)
|
|
714
|
+
|
|
715
|
+
return sorted(showtime_services)
|
|
716
|
+
|
|
717
|
+
except Exception as e:
|
|
718
|
+
print(f"❌ Failed to find showtime services: {e}")
|
|
719
|
+
return []
|
|
720
|
+
|
|
673
721
|
def _find_pr_services(self, pr_number: int) -> List[Dict[str, Any]]:
|
|
674
722
|
"""Find all ECS services for a specific PR"""
|
|
675
723
|
try:
|
|
@@ -356,55 +356,93 @@ class PullRequest:
|
|
|
356
356
|
all_environments = []
|
|
357
357
|
for pr_number in pr_numbers:
|
|
358
358
|
pr = cls.from_id(pr_number)
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
359
|
+
# Show ALL environments, not just current_show
|
|
360
|
+
for show in pr.shows:
|
|
361
|
+
# Determine show type based on pointer presence
|
|
362
|
+
show_type = "orphaned" # Default
|
|
363
|
+
|
|
364
|
+
# Check for active pointer
|
|
365
|
+
if any(label == f"🎪 🎯 {show.sha}" for label in pr.labels):
|
|
366
|
+
show_type = "active"
|
|
367
|
+
# Check for building pointer
|
|
368
|
+
elif any(label == f"🎪 🏗️ {show.sha}" for label in pr.labels):
|
|
369
|
+
show_type = "building"
|
|
370
|
+
# No pointer = orphaned
|
|
371
|
+
|
|
372
|
+
environment_data = {
|
|
373
|
+
"pr_number": pr_number,
|
|
374
|
+
"status": "active", # Keep for compatibility
|
|
375
|
+
"show": {
|
|
376
|
+
"sha": show.sha,
|
|
377
|
+
"status": show.status,
|
|
378
|
+
"ip": show.ip,
|
|
379
|
+
"ttl": show.ttl,
|
|
380
|
+
"requested_by": show.requested_by,
|
|
381
|
+
"created_at": show.created_at,
|
|
382
|
+
"aws_service_name": show.aws_service_name,
|
|
383
|
+
"show_type": show_type, # New field for display
|
|
384
|
+
},
|
|
385
|
+
}
|
|
386
|
+
all_environments.append(environment_data)
|
|
363
387
|
|
|
364
388
|
return all_environments
|
|
365
389
|
|
|
366
390
|
def _determine_action(self, target_sha: str) -> str:
|
|
367
|
-
"""Determine what sync action is needed"""
|
|
391
|
+
"""Determine what sync action is needed based on target SHA state"""
|
|
392
|
+
target_sha_short = target_sha[:7] # Ensure we're working with short SHA
|
|
393
|
+
|
|
394
|
+
# Get the specific show for the target SHA
|
|
395
|
+
target_show = self.get_show_by_sha(target_sha_short)
|
|
396
|
+
|
|
368
397
|
# Check for explicit trigger labels
|
|
369
398
|
trigger_labels = [label for label in self.labels if "showtime-trigger-" in label]
|
|
370
399
|
|
|
371
400
|
if trigger_labels:
|
|
372
401
|
for trigger in trigger_labels:
|
|
373
402
|
if "showtime-trigger-start" in trigger:
|
|
374
|
-
if
|
|
375
|
-
return "create_environment" #
|
|
376
|
-
elif
|
|
377
|
-
return "
|
|
378
|
-
elif
|
|
379
|
-
return "
|
|
403
|
+
if not target_show or target_show.status == "failed":
|
|
404
|
+
return "create_environment" # New SHA or failed SHA
|
|
405
|
+
elif target_show.status in ["building", "built", "deploying"]:
|
|
406
|
+
return "no_action" # Target SHA already in progress
|
|
407
|
+
elif target_show.status == "running":
|
|
408
|
+
return "create_environment" # Force rebuild with trigger
|
|
380
409
|
else:
|
|
381
|
-
return "create_environment"
|
|
410
|
+
return "create_environment" # Default for unknown states
|
|
382
411
|
elif "showtime-trigger-stop" in trigger:
|
|
383
412
|
return "destroy_environment"
|
|
384
413
|
|
|
385
|
-
# No explicit triggers - check
|
|
386
|
-
if
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
414
|
+
# No explicit triggers - check target SHA state
|
|
415
|
+
if not target_show:
|
|
416
|
+
# Target SHA doesn't exist - create it
|
|
417
|
+
return "create_environment"
|
|
418
|
+
elif target_show.status == "failed":
|
|
419
|
+
# Target SHA failed - rebuild it
|
|
390
420
|
return "create_environment"
|
|
421
|
+
elif target_show.status in ["building", "built", "deploying"]:
|
|
422
|
+
# Target SHA in progress - wait
|
|
423
|
+
return "no_action"
|
|
424
|
+
elif target_show.status == "running":
|
|
425
|
+
# Target SHA already running - no action needed
|
|
426
|
+
return "no_action"
|
|
391
427
|
|
|
392
428
|
return "no_action"
|
|
393
429
|
|
|
394
430
|
def _atomic_claim(self, target_sha: str, action: str, dry_run: bool = False) -> bool:
|
|
395
|
-
"""Atomically claim this PR for the current job"""
|
|
396
|
-
|
|
431
|
+
"""Atomically claim this PR for the current job based on target SHA state"""
|
|
432
|
+
target_sha_short = target_sha[:7]
|
|
433
|
+
target_show = self.get_show_by_sha(target_sha_short)
|
|
434
|
+
|
|
435
|
+
# 1. Validate current state allows this action for target SHA
|
|
397
436
|
if action in ["create_environment", "rolling_update", "auto_sync"]:
|
|
398
|
-
if
|
|
437
|
+
if target_show and target_show.status in [
|
|
399
438
|
"building",
|
|
400
439
|
"built",
|
|
401
440
|
"deploying",
|
|
402
441
|
]:
|
|
403
|
-
return False #
|
|
442
|
+
return False # Target SHA already in progress
|
|
404
443
|
|
|
405
|
-
#
|
|
406
|
-
|
|
407
|
-
return True # Allow rolling updates on running environments
|
|
444
|
+
# Allow actions on failed, running, or non-existent target SHAs
|
|
445
|
+
return True
|
|
408
446
|
|
|
409
447
|
if dry_run:
|
|
410
448
|
print(f"🎪 [DRY-RUN] Would atomically claim PR for {action}")
|
|
@@ -412,17 +450,31 @@ class PullRequest:
|
|
|
412
450
|
|
|
413
451
|
# 2. Remove trigger labels (atomic operation)
|
|
414
452
|
trigger_labels = [label for label in self.labels if "showtime-trigger-" in label]
|
|
415
|
-
|
|
416
|
-
|
|
453
|
+
if trigger_labels:
|
|
454
|
+
print(f"🏷️ Removing trigger labels: {trigger_labels}")
|
|
455
|
+
for trigger_label in trigger_labels:
|
|
456
|
+
get_github().remove_label(self.pr_number, trigger_label)
|
|
457
|
+
else:
|
|
458
|
+
print("🏷️ No trigger labels to remove")
|
|
417
459
|
|
|
418
460
|
# 3. Set building state immediately (claim the PR)
|
|
419
461
|
if action in ["create_environment", "rolling_update", "auto_sync"]:
|
|
420
462
|
building_show = self._create_new_show(target_sha)
|
|
421
463
|
building_show.status = "building"
|
|
464
|
+
|
|
422
465
|
# Update labels to reflect building state
|
|
466
|
+
print(f"🏷️ Removing existing circus labels...")
|
|
423
467
|
get_github().remove_circus_labels(self.pr_number)
|
|
424
|
-
|
|
425
|
-
|
|
468
|
+
|
|
469
|
+
new_labels = building_show.to_circus_labels()
|
|
470
|
+
print(f"🏷️ Creating new labels: {new_labels}")
|
|
471
|
+
for label in new_labels:
|
|
472
|
+
try:
|
|
473
|
+
get_github().add_label(self.pr_number, label)
|
|
474
|
+
print(f" ✅ Added: {label}")
|
|
475
|
+
except Exception as e:
|
|
476
|
+
print(f" ❌ Failed to add {label}: {e}")
|
|
477
|
+
raise
|
|
426
478
|
|
|
427
479
|
return True
|
|
428
480
|
|
|
@@ -175,7 +175,7 @@ class Show:
|
|
|
175
175
|
"--platform",
|
|
176
176
|
"linux/amd64",
|
|
177
177
|
"--target",
|
|
178
|
-
"
|
|
178
|
+
"dev",
|
|
179
179
|
"--build-arg",
|
|
180
180
|
"INCLUDE_CHROMIUM=false",
|
|
181
181
|
"--build-arg",
|
|
@@ -202,16 +202,14 @@ class Show:
|
|
|
202
202
|
])
|
|
203
203
|
print("🐳 Local environment: Using cache-from only (no export)")
|
|
204
204
|
|
|
205
|
-
# Add --load only when
|
|
206
|
-
# Intel Mac/Linux can load linux/amd64, Apple Silicon cannot
|
|
207
|
-
native_x86 = platform.machine() in ("x86_64", "AMD64")
|
|
205
|
+
# Add --load only when explicitly requested for local testing
|
|
208
206
|
force_load = os.getenv("DOCKER_LOAD", "false").lower() == "true"
|
|
209
207
|
|
|
210
|
-
if
|
|
208
|
+
if force_load:
|
|
211
209
|
cmd.append("--load")
|
|
212
|
-
print("🐳 Will load image to local Docker daemon (
|
|
210
|
+
print("🐳 Will load image to local Docker daemon (DOCKER_LOAD=true)")
|
|
213
211
|
else:
|
|
214
|
-
print("🐳
|
|
212
|
+
print("🐳 Push-only build (no local load) - faster for CI/deployment")
|
|
215
213
|
|
|
216
214
|
# Add build context path last
|
|
217
215
|
cmd.append(".")
|
|
@@ -270,8 +268,10 @@ class Show:
|
|
|
270
268
|
elif emoji == "🤡": # User (clown!)
|
|
271
269
|
show_data["requested_by"] = value
|
|
272
270
|
|
|
273
|
-
#
|
|
274
|
-
|
|
271
|
+
# Return Show if we found any status labels for this SHA
|
|
272
|
+
# For list purposes, we want to show ALL environments, even orphaned ones
|
|
273
|
+
has_status = any(label.startswith(f"🎪 {sha} 🚦 ") for label in labels)
|
|
274
|
+
if has_status:
|
|
275
275
|
return cls(**show_data) # type: ignore[arg-type]
|
|
276
276
|
|
|
277
277
|
return None
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TDD tests for SHA-specific build logic
|
|
3
|
+
|
|
4
|
+
Tests the correct behavior when multiple environments exist per PR.
|
|
5
|
+
The system should make decisions based on the target SHA's state,
|
|
6
|
+
not the overall PR state.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from showtime.core.pull_request import PullRequest
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def test_target_sha_does_not_exist_should_build():
|
|
13
|
+
"""When target SHA doesn't exist, should create environment"""
|
|
14
|
+
# PR with existing environments for different SHAs
|
|
15
|
+
pr = PullRequest(1234, [
|
|
16
|
+
"🎪 abc123f 🚦 running", # Different SHA running
|
|
17
|
+
"🎪 🎯 abc123f", # Active pointer to different SHA
|
|
18
|
+
"🎪 def456a 🚦 failed", # Different SHA failed
|
|
19
|
+
"🎪 xyz789b 🚦 building", # Different SHA building
|
|
20
|
+
])
|
|
21
|
+
|
|
22
|
+
# Target a completely new SHA
|
|
23
|
+
action = pr._determine_action("new567c")
|
|
24
|
+
|
|
25
|
+
# Should create environment for new SHA
|
|
26
|
+
assert action == "create_environment"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def test_target_sha_failed_should_rebuild():
|
|
30
|
+
"""When target SHA is in failed state, should rebuild"""
|
|
31
|
+
pr = PullRequest(1234, [
|
|
32
|
+
"🎪 abc123f 🚦 running", # Other SHA running
|
|
33
|
+
"🎪 🎯 abc123f", # Active pointer
|
|
34
|
+
"🎪 def456a 🚦 failed", # Target SHA failed
|
|
35
|
+
"🎪 🎯 def456a", # Target has pointer (failed but pointed to)
|
|
36
|
+
])
|
|
37
|
+
|
|
38
|
+
# Target the failed SHA
|
|
39
|
+
action = pr._determine_action("def456a")
|
|
40
|
+
|
|
41
|
+
# Should rebuild failed environment
|
|
42
|
+
assert action == "create_environment"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def test_target_sha_building_should_wait():
|
|
46
|
+
"""When target SHA is already building, should not start another build"""
|
|
47
|
+
pr = PullRequest(1234, [
|
|
48
|
+
"🎪 abc123f 🚦 running", # Other SHA running
|
|
49
|
+
"🎪 🎯 abc123f", # Active pointer
|
|
50
|
+
"🎪 def456a 🚦 building", # Target SHA building
|
|
51
|
+
"🎪 🏗️ def456a", # Building pointer
|
|
52
|
+
])
|
|
53
|
+
|
|
54
|
+
# Target the building SHA
|
|
55
|
+
action = pr._determine_action("def456a")
|
|
56
|
+
|
|
57
|
+
# Should not start duplicate build
|
|
58
|
+
assert action == "no_action"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def test_target_sha_running_should_not_rebuild():
|
|
62
|
+
"""When target SHA is already running, should not rebuild"""
|
|
63
|
+
pr = PullRequest(1234, [
|
|
64
|
+
"🎪 abc123f 🚦 running", # Target SHA running
|
|
65
|
+
"🎪 🎯 abc123f", # Active pointer
|
|
66
|
+
"🎪 def456a 🚦 building", # Other SHA building
|
|
67
|
+
])
|
|
68
|
+
|
|
69
|
+
# Target the running SHA (same as current)
|
|
70
|
+
action = pr._determine_action("abc123f")
|
|
71
|
+
|
|
72
|
+
# Should not rebuild running environment
|
|
73
|
+
assert action == "no_action"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def test_target_sha_running_with_trigger_should_rebuild():
|
|
77
|
+
"""When target SHA is running but has start trigger, should rebuild"""
|
|
78
|
+
pr = PullRequest(1234, [
|
|
79
|
+
"🎪 ⚡ showtime-trigger-start", # Explicit start trigger
|
|
80
|
+
"🎪 abc123f 🚦 running", # Target SHA running
|
|
81
|
+
"🎪 🎯 abc123f", # Active pointer
|
|
82
|
+
])
|
|
83
|
+
|
|
84
|
+
# Target the running SHA with explicit trigger
|
|
85
|
+
action = pr._determine_action("abc123f")
|
|
86
|
+
|
|
87
|
+
# Should rebuild due to explicit trigger (force rebuild)
|
|
88
|
+
assert action == "create_environment"
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def test_atomic_claim_sha_specific_validation():
|
|
92
|
+
"""Atomic claim should validate based on target SHA state, not any environment"""
|
|
93
|
+
pr = PullRequest(1234, [
|
|
94
|
+
"🎪 abc123f 🚦 running", # Other SHA running
|
|
95
|
+
"🎪 🎯 abc123f", # Active pointer
|
|
96
|
+
"🎪 def456a 🚦 building", # Other SHA building
|
|
97
|
+
])
|
|
98
|
+
|
|
99
|
+
# Should allow claim for new SHA even though other SHAs are active
|
|
100
|
+
can_claim_new = pr._atomic_claim("new567c", "create_environment", dry_run=True)
|
|
101
|
+
assert can_claim_new is True
|
|
102
|
+
|
|
103
|
+
# Should block claim for SHA that's already building
|
|
104
|
+
can_claim_building = pr._atomic_claim("def456a", "create_environment", dry_run=True)
|
|
105
|
+
assert can_claim_building is False
|
|
106
|
+
|
|
107
|
+
# Should allow claim for running SHA with rolling update
|
|
108
|
+
can_claim_rolling = pr._atomic_claim("abc123f", "rolling_update", dry_run=True)
|
|
109
|
+
assert can_claim_rolling is True
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def test_multiple_environments_pointer_management():
|
|
113
|
+
"""Test proper pointer management with multiple environments"""
|
|
114
|
+
# Scenario: Multiple environments exist, need to identify which is which
|
|
115
|
+
pr = PullRequest(1234, [
|
|
116
|
+
"🎪 abc123f 🚦 running", # Old active
|
|
117
|
+
"🎪 🎯 abc123f", # Active pointer (should be only one)
|
|
118
|
+
"🎪 def456a 🚦 running", # Orphaned (no pointer)
|
|
119
|
+
"🎪 xyz789b 🚦 failed", # Failed (no pointer)
|
|
120
|
+
])
|
|
121
|
+
|
|
122
|
+
# Should have 3 total shows
|
|
123
|
+
assert len(pr.shows) == 3
|
|
124
|
+
|
|
125
|
+
# Should have 1 active show (with pointer)
|
|
126
|
+
assert pr.current_show is not None
|
|
127
|
+
assert pr.current_show.sha == "abc123f"
|
|
128
|
+
|
|
129
|
+
# Should have no building show
|
|
130
|
+
assert pr.building_show is None
|
|
131
|
+
|
|
132
|
+
# Other shows should be findable but not pointed to
|
|
133
|
+
def456a_show = pr.get_show_by_sha("def456a")
|
|
134
|
+
assert def456a_show is not None
|
|
135
|
+
assert def456a_show.status == "running"
|
|
136
|
+
|
|
137
|
+
xyz789b_show = pr.get_show_by_sha("xyz789b")
|
|
138
|
+
assert xyz789b_show is not None
|
|
139
|
+
assert xyz789b_show.status == "failed"
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def test_rolling_update_should_clean_old_pointers():
|
|
143
|
+
"""Rolling update should remove old active pointer and add new one"""
|
|
144
|
+
# This test defines the expected behavior for pointer management
|
|
145
|
+
# Implementation should ensure only 1 active pointer exists at a time
|
|
146
|
+
pass # Implementation test - will write after fixing the logic
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{superset_showtime-0.4.9 → superset_showtime-0.5.1}/workflows-reference/showtime-cleanup.yml
RENAMED
|
File without changes
|
{superset_showtime-0.4.9 → superset_showtime-0.5.1}/workflows-reference/showtime-trigger.yml
RENAMED
|
File without changes
|