zenml-nightly 0.75.0.dev20250312__py3-none-any.whl → 0.75.0.dev20250313__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (160) hide show
  1. zenml/VERSION +1 -1
  2. zenml/__init__.py +2 -0
  3. zenml/analytics/context.py +7 -0
  4. zenml/artifacts/utils.py +0 -2
  5. zenml/cli/login.py +6 -0
  6. zenml/cli/model.py +7 -15
  7. zenml/cli/secret.py +47 -44
  8. zenml/cli/service_connectors.py +0 -1
  9. zenml/cli/stack.py +0 -1
  10. zenml/cli/tag.py +3 -5
  11. zenml/cli/utils.py +25 -23
  12. zenml/cli/workspace.py +79 -5
  13. zenml/client.py +615 -348
  14. zenml/config/global_config.py +16 -3
  15. zenml/config/pipeline_configurations.py +3 -2
  16. zenml/config/pipeline_run_configuration.py +2 -1
  17. zenml/config/secret_reference_mixin.py +1 -1
  18. zenml/constants.py +1 -3
  19. zenml/enums.py +0 -7
  20. zenml/event_hub/event_hub.py +3 -1
  21. zenml/exceptions.py +0 -24
  22. zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +5 -3
  23. zenml/integrations/bitbucket/plugins/event_sources/bitbucket_webhook_event_source.py +1 -4
  24. zenml/integrations/github/plugins/event_sources/github_webhook_event_source.py +1 -4
  25. zenml/integrations/mlflow/steps/mlflow_registry.py +1 -1
  26. zenml/integrations/seldon/model_deployers/seldon_model_deployer.py +1 -1
  27. zenml/integrations/wandb/flavors/wandb_experiment_tracker_flavor.py +3 -3
  28. zenml/model/model.py +8 -8
  29. zenml/models/__init__.py +18 -1
  30. zenml/models/v2/base/base.py +0 -5
  31. zenml/models/v2/base/filter.py +1 -1
  32. zenml/models/v2/base/scoped.py +104 -121
  33. zenml/models/v2/core/api_key.py +1 -1
  34. zenml/models/v2/core/artifact.py +31 -18
  35. zenml/models/v2/core/artifact_version.py +42 -25
  36. zenml/models/v2/core/component.py +22 -33
  37. zenml/models/v2/core/device.py +3 -2
  38. zenml/models/v2/core/event_source.py +2 -2
  39. zenml/models/v2/core/flavor.py +19 -47
  40. zenml/models/v2/core/logs.py +1 -2
  41. zenml/models/v2/core/model.py +7 -4
  42. zenml/models/v2/core/model_version.py +36 -27
  43. zenml/models/v2/core/pipeline.py +1 -1
  44. zenml/models/v2/core/pipeline_run.py +5 -13
  45. zenml/models/v2/core/run_template.py +1 -2
  46. zenml/models/v2/core/schedule.py +0 -9
  47. zenml/models/v2/core/secret.py +93 -127
  48. zenml/models/v2/core/server_settings.py +2 -2
  49. zenml/models/v2/core/service.py +43 -12
  50. zenml/models/v2/core/service_connector.py +14 -16
  51. zenml/models/v2/core/stack.py +24 -26
  52. zenml/models/v2/core/step_run.py +3 -15
  53. zenml/models/v2/core/tag.py +41 -15
  54. zenml/models/v2/core/user.py +19 -2
  55. zenml/models/v2/misc/statistics.py +45 -0
  56. zenml/models/v2/misc/tag.py +27 -0
  57. zenml/orchestrators/cache_utils.py +1 -1
  58. zenml/orchestrators/input_utils.py +1 -0
  59. zenml/orchestrators/step_launcher.py +0 -1
  60. zenml/orchestrators/step_run_utils.py +0 -2
  61. zenml/orchestrators/step_runner.py +10 -1
  62. zenml/pipelines/build_utils.py +0 -2
  63. zenml/pipelines/pipeline_decorator.py +3 -2
  64. zenml/pipelines/pipeline_definition.py +4 -5
  65. zenml/pipelines/run_utils.py +3 -3
  66. zenml/service_connectors/service_connector.py +0 -7
  67. zenml/service_connectors/service_connector_utils.py +0 -1
  68. zenml/stack/authentication_mixin.py +1 -1
  69. zenml/stack/flavor.py +3 -14
  70. zenml/stack/stack_component.py +1 -5
  71. zenml/steps/step_context.py +19 -0
  72. zenml/utils/string_utils.py +1 -1
  73. zenml/utils/tag_utils.py +642 -0
  74. zenml/zen_server/cloud_utils.py +21 -0
  75. zenml/zen_server/exceptions.py +0 -6
  76. zenml/zen_server/rbac/endpoint_utils.py +134 -46
  77. zenml/zen_server/rbac/models.py +65 -3
  78. zenml/zen_server/rbac/rbac_interface.py +9 -0
  79. zenml/zen_server/rbac/rbac_sql_zen_store.py +15 -7
  80. zenml/zen_server/rbac/utils.py +156 -29
  81. zenml/zen_server/rbac/zenml_cloud_rbac.py +43 -11
  82. zenml/zen_server/routers/actions_endpoints.py +3 -5
  83. zenml/zen_server/routers/artifact_endpoint.py +0 -5
  84. zenml/zen_server/routers/artifact_version_endpoints.py +15 -9
  85. zenml/zen_server/routers/auth_endpoints.py +22 -7
  86. zenml/zen_server/routers/code_repositories_endpoints.py +56 -3
  87. zenml/zen_server/routers/devices_endpoints.py +0 -4
  88. zenml/zen_server/routers/event_source_endpoints.py +0 -5
  89. zenml/zen_server/routers/flavors_endpoints.py +0 -5
  90. zenml/zen_server/routers/logs_endpoints.py +0 -1
  91. zenml/zen_server/routers/model_versions_endpoints.py +102 -23
  92. zenml/zen_server/routers/models_endpoints.py +51 -68
  93. zenml/zen_server/routers/pipeline_builds_endpoints.py +58 -4
  94. zenml/zen_server/routers/pipeline_deployments_endpoints.py +58 -4
  95. zenml/zen_server/routers/pipelines_endpoints.py +73 -4
  96. zenml/zen_server/routers/plugin_endpoints.py +0 -1
  97. zenml/zen_server/routers/run_metadata_endpoints.py +99 -0
  98. zenml/zen_server/routers/run_templates_endpoints.py +66 -3
  99. zenml/zen_server/routers/runs_endpoints.py +60 -8
  100. zenml/zen_server/routers/schedule_endpoints.py +69 -6
  101. zenml/zen_server/routers/secrets_endpoints.py +40 -4
  102. zenml/zen_server/routers/server_endpoints.py +53 -1
  103. zenml/zen_server/routers/service_accounts_endpoints.py +14 -15
  104. zenml/zen_server/routers/service_connectors_endpoints.py +96 -14
  105. zenml/zen_server/routers/service_endpoints.py +20 -7
  106. zenml/zen_server/routers/stack_components_endpoints.py +68 -7
  107. zenml/zen_server/routers/stacks_endpoints.py +98 -7
  108. zenml/zen_server/routers/steps_endpoints.py +17 -11
  109. zenml/zen_server/routers/tag_resource_endpoints.py +115 -0
  110. zenml/zen_server/routers/tags_endpoints.py +6 -17
  111. zenml/zen_server/routers/triggers_endpoints.py +5 -8
  112. zenml/zen_server/routers/users_endpoints.py +47 -12
  113. zenml/zen_server/routers/workspaces_endpoints.py +56 -1285
  114. zenml/zen_server/template_execution/utils.py +5 -4
  115. zenml/zen_server/utils.py +21 -0
  116. zenml/zen_server/zen_server_api.py +4 -0
  117. zenml/zen_stores/base_zen_store.py +29 -44
  118. zenml/zen_stores/migrations/versions/1cb6477f72d6_move_artifact_save_type.py +20 -10
  119. zenml/zen_stores/migrations/versions/1f9d1cd00b90_add_unique_name_constraints.py +231 -0
  120. zenml/zen_stores/migrations/versions/288f4fb6e112_make_tags_user_scoped.py +74 -0
  121. zenml/zen_stores/migrations/versions/2e695a26fe7a_add_user_default_workspace.py +45 -0
  122. zenml/zen_stores/migrations/versions/3b1776345020_remove_workspace_from_globals.py +81 -0
  123. zenml/zen_stores/migrations/versions/41b28cae31ce_make_artifacts_workspace_scoped.py +136 -0
  124. zenml/zen_stores/migrations/versions/9e7bf0970266_adding_exclusive_attribute_to_tags.py +47 -0
  125. zenml/zen_stores/migrations/versions/b557b2871693_update_step_run_input_types.py +8 -4
  126. zenml/zen_stores/migrations/versions/cc269488e5a9_separate_run_metadata.py +12 -6
  127. zenml/zen_stores/migrations/versions/f1d723fd723b_add_secret_private_attr.py +61 -0
  128. zenml/zen_stores/migrations/versions/f76a368a25a5_add_stack_description.py +35 -0
  129. zenml/zen_stores/rest_zen_store.py +172 -171
  130. zenml/zen_stores/schemas/action_schemas.py +8 -1
  131. zenml/zen_stores/schemas/api_key_schemas.py +8 -1
  132. zenml/zen_stores/schemas/artifact_schemas.py +28 -1
  133. zenml/zen_stores/schemas/code_repository_schemas.py +8 -1
  134. zenml/zen_stores/schemas/component_schemas.py +9 -14
  135. zenml/zen_stores/schemas/event_source_schemas.py +8 -1
  136. zenml/zen_stores/schemas/flavor_schemas.py +14 -20
  137. zenml/zen_stores/schemas/model_schemas.py +3 -0
  138. zenml/zen_stores/schemas/pipeline_deployment_schemas.py +3 -1
  139. zenml/zen_stores/schemas/pipeline_run_schemas.py +0 -3
  140. zenml/zen_stores/schemas/run_template_schemas.py +8 -4
  141. zenml/zen_stores/schemas/schedule_schema.py +9 -14
  142. zenml/zen_stores/schemas/secret_schemas.py +15 -25
  143. zenml/zen_stores/schemas/service_connector_schemas.py +8 -17
  144. zenml/zen_stores/schemas/service_schemas.py +0 -1
  145. zenml/zen_stores/schemas/stack_schemas.py +12 -15
  146. zenml/zen_stores/schemas/step_run_schemas.py +7 -8
  147. zenml/zen_stores/schemas/tag_schemas.py +30 -2
  148. zenml/zen_stores/schemas/trigger_schemas.py +8 -1
  149. zenml/zen_stores/schemas/user_schemas.py +24 -2
  150. zenml/zen_stores/schemas/utils.py +16 -0
  151. zenml/zen_stores/schemas/workspace_schemas.py +7 -25
  152. zenml/zen_stores/secrets_stores/service_connector_secrets_store.py +0 -3
  153. zenml/zen_stores/sql_zen_store.py +2905 -2280
  154. zenml/zen_stores/template_utils.py +1 -1
  155. zenml/zen_stores/zen_store_interface.py +82 -58
  156. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250313.dist-info}/METADATA +1 -1
  157. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250313.dist-info}/RECORD +160 -147
  158. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250313.dist-info}/LICENSE +0 -0
  159. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250313.dist-info}/WHEEL +0 -0
  160. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250313.dist-info}/entry_points.txt +0 -0
@@ -149,9 +149,10 @@ def run_template(
149
149
  )
150
150
 
151
151
  def _task() -> None:
152
- pypi_requirements, apt_packages = (
153
- requirements_utils.get_requirements_for_stack(stack=stack)
154
- )
152
+ (
153
+ pypi_requirements,
154
+ apt_packages,
155
+ ) = requirements_utils.get_requirements_for_stack(stack=stack)
155
156
 
156
157
  if build.python_version:
157
158
  version_info = version.parse(build.python_version)
@@ -406,7 +407,6 @@ def deployment_request_from_template(
406
407
  assert deployment.stack
407
408
  assert deployment.build
408
409
  deployment_request = PipelineDeploymentRequest(
409
- user=user_id,
410
410
  workspace=deployment.workspace.id,
411
411
  run_name_template=config.run_name
412
412
  or get_default_run_name(pipeline_name=pipeline_configuration.name),
@@ -463,6 +463,7 @@ def get_pipeline_run_analytics_metadata(
463
463
  }
464
464
 
465
465
  return {
466
+ "workspace_id": deployment.workspace.id,
466
467
  "store_type": "rest", # This method is called from within a REST endpoint
467
468
  **stack_metadata,
468
469
  "total_steps": len(deployment.step_configurations),
zenml/zen_server/utils.py CHANGED
@@ -26,9 +26,11 @@ from typing import (
26
26
  Tuple,
27
27
  Type,
28
28
  TypeVar,
29
+ Union,
29
30
  cast,
30
31
  )
31
32
  from urllib.parse import urlparse
33
+ from uuid import UUID
32
34
 
33
35
  from pydantic import BaseModel, ValidationError
34
36
 
@@ -44,6 +46,7 @@ from zenml.constants import (
44
46
  from zenml.enums import StoreType
45
47
  from zenml.exceptions import IllegalOperationError, OAuthError
46
48
  from zenml.logger import get_logger
49
+ from zenml.models.v2.base.scoped import WorkspaceScopedFilter
47
50
  from zenml.plugins.plugin_flavor_registry import PluginFlavorRegistry
48
51
  from zenml.zen_server.cache import MemoryCache
49
52
  from zenml.zen_server.deploy.deployment import (
@@ -635,3 +638,21 @@ def get_zenml_headers() -> Dict[str, str]:
635
638
  headers["zenml-server-url"] = config.server_url
636
639
 
637
640
  return headers
641
+
642
+
643
+ def set_filter_workspace_scope(
644
+ filter_model: WorkspaceScopedFilter,
645
+ workspace_name_or_id: Optional[Union[UUID, str]] = None,
646
+ ) -> None:
647
+ """Set the workspace scope of the filter model.
648
+
649
+ Args:
650
+ filter_model: The filter model to set the scope for.
651
+ workspace_name_or_id: The workspace to set the scope for. If not
652
+ provided, the workspace scope is determined from the request
653
+ workspace filter or the default workspace, in that order.
654
+ """
655
+ zen_store().set_filter_workspace_id(
656
+ filter_model=filter_model,
657
+ workspace_name_or_id=workspace_name_or_id,
658
+ )
@@ -73,6 +73,7 @@ from zenml.zen_server.routers import (
73
73
  pipeline_deployments_endpoints,
74
74
  pipelines_endpoints,
75
75
  plugin_endpoints,
76
+ run_metadata_endpoints,
76
77
  run_templates_endpoints,
77
78
  runs_endpoints,
78
79
  schedule_endpoints,
@@ -85,6 +86,7 @@ from zenml.zen_server.routers import (
85
86
  stack_deployment_endpoints,
86
87
  stacks_endpoints,
87
88
  steps_endpoints,
89
+ tag_resource_endpoints,
88
90
  tags_endpoints,
89
91
  triggers_endpoints,
90
92
  users_endpoints,
@@ -472,6 +474,7 @@ app.include_router(pipelines_endpoints.router)
472
474
  app.include_router(pipeline_builds_endpoints.router)
473
475
  app.include_router(pipeline_deployments_endpoints.router)
474
476
  app.include_router(runs_endpoints.router)
477
+ app.include_router(run_metadata_endpoints.router)
475
478
  app.include_router(run_templates_endpoints.router)
476
479
  app.include_router(schedule_endpoints.router)
477
480
  app.include_router(secrets_endpoints.router)
@@ -487,6 +490,7 @@ app.include_router(stack_components_endpoints.router)
487
490
  app.include_router(stack_components_endpoints.types_router)
488
491
  app.include_router(steps_endpoints.router)
489
492
  app.include_router(tags_endpoints.router)
493
+ app.include_router(tag_resource_endpoints.router)
490
494
  app.include_router(triggers_endpoints.router)
491
495
  app.include_router(users_endpoints.router)
492
496
  app.include_router(users_endpoints.current_user_router)
@@ -295,16 +295,16 @@ class BaseZenStore(
295
295
  active_workspace_name_or_id: Optional[Union[str, UUID]] = None,
296
296
  active_stack_id: Optional[UUID] = None,
297
297
  config_name: str = "",
298
- ) -> Tuple[WorkspaceResponse, StackResponse]:
298
+ ) -> Tuple[Optional[WorkspaceResponse], StackResponse]:
299
299
  """Validate the active configuration.
300
300
 
301
301
  Call this method to validate the supplied active workspace and active
302
302
  stack values.
303
303
 
304
- This method is guaranteed to return valid workspace ID and stack ID
305
- values. If the supplied workspace and stack are not set or are not valid
306
- (e.g. they do not exist or are not accessible), the default workspace and
307
- default workspace stack will be returned in their stead.
304
+ This method returns a valid workspace and stack values. If the
305
+ supplied workspace and stack are not set or are not valid (e.g. they
306
+ do not exist or are not accessible), the default workspace and default
307
+ stack will be returned in their stead.
308
308
 
309
309
  Args:
310
310
  active_workspace_name_or_id: The name or ID of the active workspace.
@@ -315,28 +315,34 @@ class BaseZenStore(
315
315
  Returns:
316
316
  A tuple containing the active workspace and active stack.
317
317
  """
318
- active_workspace: WorkspaceResponse
318
+ active_workspace: Optional[WorkspaceResponse] = None
319
319
 
320
320
  if active_workspace_name_or_id:
321
321
  try:
322
322
  active_workspace = self.get_workspace(
323
323
  active_workspace_name_or_id
324
324
  )
325
- except KeyError:
326
- active_workspace = self._get_default_workspace()
327
-
325
+ except (KeyError, IllegalOperationError):
326
+ active_workspace_name_or_id = None
328
327
  logger.warning(
329
328
  f"The current {config_name} active workspace is no longer "
330
- f"available. Resetting the active workspace to "
331
- f"'{active_workspace.name}'."
329
+ f"available."
332
330
  )
333
- else:
334
- active_workspace = self._get_default_workspace()
335
331
 
336
- logger.info(
337
- f"Setting the {config_name} active workspace "
338
- f"to '{active_workspace.name}'."
339
- )
332
+ if active_workspace is None:
333
+ try:
334
+ active_workspace = self._get_default_workspace()
335
+ except (KeyError, IllegalOperationError):
336
+ logger.warning(
337
+ "An active workspace is not set. Please set the active "
338
+ "workspace by running `zenml workspace set "
339
+ "<workspace-name>`."
340
+ )
341
+ else:
342
+ logger.info(
343
+ f"Setting the {config_name} active workspace "
344
+ f"to '{active_workspace.name}'."
345
+ )
340
346
 
341
347
  active_stack: StackResponse
342
348
 
@@ -351,28 +357,14 @@ class BaseZenStore(
351
357
  "Resetting the active stack to default.",
352
358
  config_name,
353
359
  )
354
- active_stack = self._get_default_stack(
355
- workspace_id=active_workspace.id
356
- )
357
- else:
358
- if active_stack.workspace.id != active_workspace.id:
359
- logger.warning(
360
- "The current %s active stack is not part of the active "
361
- "workspace. Resetting the active stack to default.",
362
- config_name,
363
- )
364
- active_stack = self._get_default_stack(
365
- workspace_id=active_workspace.id
366
- )
360
+ active_stack = self._get_default_stack()
367
361
 
368
362
  else:
369
363
  logger.warning(
370
364
  "Setting the %s active stack to default.",
371
365
  config_name,
372
366
  )
373
- active_stack = self._get_default_stack(
374
- workspace_id=active_workspace.id
375
- )
367
+ active_stack = self._get_default_stack()
376
368
 
377
369
  return active_workspace, active_stack
378
370
 
@@ -462,29 +454,22 @@ class BaseZenStore(
462
454
 
463
455
  def _get_default_stack(
464
456
  self,
465
- workspace_id: UUID,
466
457
  ) -> StackResponse:
467
- """Get the default stack for a user in a workspace.
468
-
469
- Args:
470
- workspace_id: ID of the workspace.
458
+ """Get the default stack.
471
459
 
472
460
  Returns:
473
- The default stack in the workspace.
461
+ The default stack.
474
462
 
475
463
  Raises:
476
- KeyError: if the workspace or default stack doesn't exist.
464
+ KeyError: if the default stack doesn't exist.
477
465
  """
478
466
  default_stacks = self.list_stacks(
479
467
  StackFilter(
480
- workspace_id=workspace_id,
481
468
  name=DEFAULT_STACK_AND_COMPONENT_NAME,
482
469
  )
483
470
  )
484
471
  if default_stacks.total == 0:
485
- raise KeyError(
486
- f"No default stack found in workspace {workspace_id}."
487
- )
472
+ raise KeyError("No default stack found.")
488
473
  return default_stacks.items[0]
489
474
 
490
475
  def get_external_user(self, user_id: UUID) -> UserResponse:
@@ -23,7 +23,8 @@ def upgrade() -> None:
23
23
  batch_op.add_column(sa.Column("save_type", sa.TEXT(), nullable=True))
24
24
 
25
25
  # Step 2: Move data from step_run_output_artifact.type to artifact_version.save_type
26
- op.execute("""
26
+ op.execute(
27
+ """
27
28
  UPDATE artifact_version
28
29
  SET save_type = (
29
30
  SELECT max(step_run_output_artifact.type)
@@ -31,17 +32,22 @@ def upgrade() -> None:
31
32
  WHERE step_run_output_artifact.artifact_id = artifact_version.id
32
33
  GROUP BY artifact_id
33
34
  )
34
- """)
35
- op.execute("""
35
+ """
36
+ )
37
+ op.execute(
38
+ """
36
39
  UPDATE artifact_version
37
40
  SET save_type = 'step_output'
38
41
  WHERE artifact_version.save_type = 'default'
39
- """)
40
- op.execute("""
42
+ """
43
+ )
44
+ op.execute(
45
+ """
41
46
  UPDATE artifact_version
42
47
  SET save_type = 'external'
43
48
  WHERE save_type is NULL
44
- """)
49
+ """
50
+ )
45
51
 
46
52
  # # Step 3: Set save_type to non-nullable
47
53
  with op.batch_alter_table("artifact_version", schema=None) as batch_op:
@@ -69,7 +75,8 @@ def downgrade() -> None:
69
75
  )
70
76
 
71
77
  # Move data back from artifact_version.save_type to step_run_output_artifact.type
72
- op.execute("""
78
+ op.execute(
79
+ """
73
80
  UPDATE step_run_output_artifact
74
81
  SET type = (
75
82
  SELECT max(artifact_version.save_type)
@@ -77,12 +84,15 @@ def downgrade() -> None:
77
84
  WHERE step_run_output_artifact.artifact_id = artifact_version.id
78
85
  GROUP BY artifact_id
79
86
  )
80
- """)
81
- op.execute("""
87
+ """
88
+ )
89
+ op.execute(
90
+ """
82
91
  UPDATE step_run_output_artifact
83
92
  SET type = 'default'
84
93
  WHERE step_run_output_artifact.type = 'step_output'
85
- """)
94
+ """
95
+ )
86
96
 
87
97
  # Set type to non-nullable
88
98
  with op.batch_alter_table(
@@ -0,0 +1,231 @@
1
+ """add unique name constraints [1f9d1cd00b90].
2
+
3
+ Revision ID: 1f9d1cd00b90
4
+ Revises: f76a368a25a5
5
+ Create Date: 2025-02-22 20:18:34.258987
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+ from sqlalchemy.orm import Session
12
+
13
+ from zenml.logger import get_logger
14
+
15
+ logger = get_logger(__name__)
16
+
17
+ # revision identifiers, used by Alembic.
18
+ revision = "1f9d1cd00b90"
19
+ down_revision = "f76a368a25a5"
20
+ branch_labels = None
21
+ depends_on = None
22
+
23
+
24
+ def resolve_duplicate_names(
25
+ table_name: str, other_columns: list[str], session: Session
26
+ ) -> None:
27
+ """Resolve duplicate entities.
28
+
29
+ Args:
30
+ table_name: The name of the table to resolve duplicate entities for.
31
+ other_columns: The columns that are part of the unique constraint,
32
+ excluding the name column.
33
+ session: The SQLAlchemy session to use.
34
+ """
35
+ columns = ["name"] + other_columns
36
+ duplicates = session.execute(
37
+ sa.text(
38
+ f"""
39
+ SELECT id, name
40
+ FROM `{table_name}`
41
+ WHERE ({", ".join(columns)}) IN (
42
+ SELECT {", ".join(columns)}
43
+ FROM `{table_name}`
44
+ GROUP BY {", ".join(columns)}
45
+ HAVING COUNT(*) > 1
46
+ )
47
+ """ # nosec B608
48
+ )
49
+ )
50
+ for id_, name in list(duplicates)[1:]:
51
+ logger.warning(f"Duplicate {table_name}: {name} (id: {id_})")
52
+ session.execute(
53
+ sa.text(
54
+ f"""
55
+ UPDATE {table_name}
56
+ SET name = :new_name
57
+ WHERE id = :id_
58
+ """ # nosec B608
59
+ ),
60
+ params={"id_": id_, "new_name": f"{name}_{id_[:6]}"},
61
+ )
62
+
63
+
64
+ def upgrade() -> None:
65
+ """Upgrade database schema and/or data, creating a new revision."""
66
+ bind = op.get_bind()
67
+ session = Session(bind=bind)
68
+
69
+ resolve_duplicate_names("action", ["workspace_id"], session)
70
+
71
+ with op.batch_alter_table("action", schema=None) as batch_op:
72
+ batch_op.create_unique_constraint(
73
+ "unique_action_name_in_workspace", ["name", "workspace_id"]
74
+ )
75
+
76
+ resolve_duplicate_names("api_key", ["service_account_id"], session)
77
+
78
+ with op.batch_alter_table("api_key", schema=None) as batch_op:
79
+ batch_op.create_unique_constraint(
80
+ "unique_api_key_name_in_service_account",
81
+ ["name", "service_account_id"],
82
+ )
83
+
84
+ resolve_duplicate_names("artifact", ["workspace_id"], session)
85
+
86
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
87
+ batch_op.drop_constraint("unique_artifact_name", type_="unique")
88
+ batch_op.create_unique_constraint(
89
+ "unique_artifact_name_in_workspace", ["name", "workspace_id"]
90
+ )
91
+
92
+ resolve_duplicate_names("code_repository", ["workspace_id"], session)
93
+
94
+ with op.batch_alter_table("code_repository", schema=None) as batch_op:
95
+ batch_op.create_unique_constraint(
96
+ "unique_code_repository_name_in_workspace",
97
+ ["name", "workspace_id"],
98
+ )
99
+
100
+ resolve_duplicate_names("event_source", ["workspace_id"], session)
101
+
102
+ with op.batch_alter_table("event_source", schema=None) as batch_op:
103
+ batch_op.create_unique_constraint(
104
+ "unique_event_source_name_in_workspace", ["name", "workspace_id"]
105
+ )
106
+
107
+ resolve_duplicate_names("flavor", ["type"], session)
108
+
109
+ with op.batch_alter_table("flavor", schema=None) as batch_op:
110
+ batch_op.create_unique_constraint(
111
+ "unique_flavor_name_and_type", ["name", "type"]
112
+ )
113
+
114
+ resolve_duplicate_names("schedule", ["workspace_id"], session)
115
+
116
+ with op.batch_alter_table("schedule", schema=None) as batch_op:
117
+ batch_op.create_unique_constraint(
118
+ "unique_schedule_name_in_workspace", ["name", "workspace_id"]
119
+ )
120
+
121
+ resolve_duplicate_names("secret", ["private", "user_id"], session)
122
+
123
+ with op.batch_alter_table("secret", schema=None) as batch_op:
124
+ batch_op.create_unique_constraint(
125
+ "unique_secret_name_private_scope_user",
126
+ ["name", "private", "user_id"],
127
+ )
128
+
129
+ resolve_duplicate_names("service_connector", [], session)
130
+
131
+ with op.batch_alter_table("service_connector", schema=None) as batch_op:
132
+ batch_op.create_unique_constraint(
133
+ "unique_service_connector_name", ["name"]
134
+ )
135
+
136
+ resolve_duplicate_names("stack", [], session)
137
+
138
+ with op.batch_alter_table("stack", schema=None) as batch_op:
139
+ batch_op.create_unique_constraint("unique_stack_name", ["name"])
140
+
141
+ resolve_duplicate_names("stack_component", ["type"], session)
142
+
143
+ with op.batch_alter_table("stack_component", schema=None) as batch_op:
144
+ batch_op.create_unique_constraint(
145
+ "unique_component_name_and_type", ["name", "type"]
146
+ )
147
+
148
+ resolve_duplicate_names("tag", [], session)
149
+
150
+ with op.batch_alter_table("tag", schema=None) as batch_op:
151
+ batch_op.create_unique_constraint("unique_tag_name", ["name"])
152
+
153
+ resolve_duplicate_names("trigger", ["workspace_id"], session)
154
+
155
+ with op.batch_alter_table("trigger", schema=None) as batch_op:
156
+ batch_op.create_unique_constraint(
157
+ "unique_trigger_name_in_workspace", ["name", "workspace_id"]
158
+ )
159
+
160
+ resolve_duplicate_names("workspace", [], session)
161
+
162
+ with op.batch_alter_table("workspace", schema=None) as batch_op:
163
+ batch_op.create_unique_constraint("unique_workspace_name", ["name"])
164
+
165
+
166
+ def downgrade() -> None:
167
+ """Downgrade database schema and/or data back to the previous revision."""
168
+ with op.batch_alter_table("workspace", schema=None) as batch_op:
169
+ batch_op.drop_constraint("unique_workspace_name", type_="unique")
170
+
171
+ with op.batch_alter_table("trigger", schema=None) as batch_op:
172
+ batch_op.drop_constraint(
173
+ "unique_trigger_name_in_workspace", type_="unique"
174
+ )
175
+
176
+ with op.batch_alter_table("tag", schema=None) as batch_op:
177
+ batch_op.drop_constraint(
178
+ "unique_tag_name_in_workspace", type_="unique"
179
+ )
180
+
181
+ with op.batch_alter_table("stack_component", schema=None) as batch_op:
182
+ batch_op.drop_constraint(
183
+ "unique_component_name_and_type", type_="unique"
184
+ )
185
+
186
+ with op.batch_alter_table("stack", schema=None) as batch_op:
187
+ batch_op.drop_constraint("unique_stack_name", type_="unique")
188
+
189
+ with op.batch_alter_table("service_connector", schema=None) as batch_op:
190
+ batch_op.drop_constraint(
191
+ "unique_service_connector_name", type_="unique"
192
+ )
193
+
194
+ with op.batch_alter_table("secret", schema=None) as batch_op:
195
+ batch_op.drop_constraint(
196
+ "unique_secret_name_and_private_scope", type_="unique"
197
+ )
198
+
199
+ with op.batch_alter_table("schedule", schema=None) as batch_op:
200
+ batch_op.drop_constraint(
201
+ "unique_schedule_name_in_workspace", type_="unique"
202
+ )
203
+
204
+ with op.batch_alter_table("flavor", schema=None) as batch_op:
205
+ batch_op.drop_constraint("unique_flavor_name_and_type", type_="unique")
206
+
207
+ with op.batch_alter_table("event_source", schema=None) as batch_op:
208
+ batch_op.drop_constraint(
209
+ "unique_event_source_name_in_workspace", type_="unique"
210
+ )
211
+
212
+ with op.batch_alter_table("code_repository", schema=None) as batch_op:
213
+ batch_op.drop_constraint(
214
+ "unique_code_repository_name_in_workspace", type_="unique"
215
+ )
216
+
217
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
218
+ batch_op.drop_constraint(
219
+ "unique_artifact_name_in_workspace", type_="unique"
220
+ )
221
+ batch_op.create_unique_constraint("unique_artifact_name", ["name"])
222
+
223
+ with op.batch_alter_table("api_key", schema=None) as batch_op:
224
+ batch_op.drop_constraint(
225
+ "unique_api_key_name_in_service_account", type_="unique"
226
+ )
227
+
228
+ with op.batch_alter_table("action", schema=None) as batch_op:
229
+ batch_op.drop_constraint(
230
+ "unique_action_name_in_workspace", type_="unique"
231
+ )
@@ -0,0 +1,74 @@
1
+ """make tags user scoped [288f4fb6e112].
2
+
3
+ Revision ID: 288f4fb6e112
4
+ Revises: 3b1776345020
5
+ Create Date: 2025-02-19 15:16:42.954792
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ import sqlmodel
11
+ from alembic import op
12
+ from sqlalchemy.orm import Session
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision = "288f4fb6e112"
16
+ down_revision = "3b1776345020"
17
+ branch_labels = None
18
+ depends_on = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade database schema and/or data, creating a new revision."""
23
+ with op.batch_alter_table("tag", schema=None) as batch_op:
24
+ # First add columns as nullable
25
+ batch_op.add_column(
26
+ sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
27
+ )
28
+
29
+ # Add foreign key constraints
30
+ batch_op.create_foreign_key(
31
+ "fk_tag_user_id_user",
32
+ "user",
33
+ ["user_id"],
34
+ ["id"],
35
+ ondelete="SET NULL",
36
+ )
37
+
38
+ bind = op.get_bind()
39
+ session = Session(bind=bind)
40
+
41
+ tags = session.execute(
42
+ sa.text("""
43
+ SELECT t.id, tr.resource_id, tr.resource_type
44
+ FROM tag t
45
+ JOIN tag_resource tr ON t.id = tr.tag_id
46
+ """)
47
+ )
48
+
49
+ tag_ids = []
50
+ for tag_id, resource_id, resource_type in tags:
51
+ if tag_id in tag_ids:
52
+ continue
53
+ tag_ids.append(tag_id)
54
+ session.execute(
55
+ sa.text(
56
+ f"""
57
+ UPDATE tag
58
+ SET user_id = (
59
+ SELECT r.user_id
60
+ FROM {resource_type} r
61
+ WHERE r.id = :resource_id
62
+ )
63
+ WHERE id = :tag_id
64
+ """ # nosec B608
65
+ ),
66
+ params={"resource_id": resource_id, "tag_id": tag_id},
67
+ )
68
+
69
+
70
+ def downgrade() -> None:
71
+ """Downgrade database schema and/or data back to the previous revision."""
72
+ with op.batch_alter_table("tag", schema=None) as batch_op:
73
+ batch_op.drop_constraint("fk_tag_user_id_user", type_="foreignkey")
74
+ batch_op.drop_column("user_id")
@@ -0,0 +1,45 @@
1
+ """add user default workspace [2e695a26fe7a].
2
+
3
+ Revision ID: 2e695a26fe7a
4
+ Revises: 1f9d1cd00b90
5
+ Create Date: 2025-02-24 18:19:43.121393
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ import sqlmodel
11
+ from alembic import op
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "2e695a26fe7a"
15
+ down_revision = "1f9d1cd00b90"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ """Upgrade database schema and/or data, creating a new revision."""
22
+ with op.batch_alter_table("user", schema=None) as batch_op:
23
+ batch_op.add_column(
24
+ sa.Column(
25
+ "default_workspace_id",
26
+ sqlmodel.sql.sqltypes.GUID(),
27
+ nullable=True,
28
+ )
29
+ )
30
+ batch_op.create_foreign_key(
31
+ "fk_user_default_workspace_id_workspace",
32
+ "workspace",
33
+ ["default_workspace_id"],
34
+ ["id"],
35
+ ondelete="SET NULL",
36
+ )
37
+
38
+
39
+ def downgrade() -> None:
40
+ """Downgrade database schema and/or data back to the previous revision."""
41
+ with op.batch_alter_table("user", schema=None) as batch_op:
42
+ batch_op.drop_constraint(
43
+ "fk_user_default_workspace_id_workspace", type_="foreignkey"
44
+ )
45
+ batch_op.drop_column("default_workspace_id")