zenml-nightly 0.75.0.dev20250312__py3-none-any.whl → 0.75.0.dev20250314__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. zenml/VERSION +1 -1
  2. zenml/__init__.py +2 -0
  3. zenml/analytics/context.py +7 -0
  4. zenml/analytics/enums.py +2 -2
  5. zenml/artifacts/utils.py +2 -4
  6. zenml/cli/__init__.py +8 -9
  7. zenml/cli/base.py +2 -2
  8. zenml/cli/code_repository.py +1 -1
  9. zenml/cli/login.py +6 -0
  10. zenml/cli/model.py +7 -15
  11. zenml/cli/pipeline.py +3 -3
  12. zenml/cli/project.py +172 -0
  13. zenml/cli/secret.py +47 -44
  14. zenml/cli/service_accounts.py +0 -1
  15. zenml/cli/service_connectors.py +15 -17
  16. zenml/cli/stack.py +0 -3
  17. zenml/cli/stack_components.py +2 -2
  18. zenml/cli/tag.py +3 -5
  19. zenml/cli/utils.py +25 -23
  20. zenml/client.py +749 -475
  21. zenml/config/global_config.py +48 -37
  22. zenml/config/pipeline_configurations.py +3 -2
  23. zenml/config/pipeline_run_configuration.py +2 -1
  24. zenml/config/secret_reference_mixin.py +1 -1
  25. zenml/constants.py +6 -6
  26. zenml/enums.py +0 -7
  27. zenml/event_hub/event_hub.py +3 -1
  28. zenml/exceptions.py +0 -24
  29. zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +5 -3
  30. zenml/integrations/bitbucket/plugins/event_sources/bitbucket_webhook_event_source.py +1 -4
  31. zenml/integrations/gcp/service_connectors/gcp_service_connector.py +7 -6
  32. zenml/integrations/github/plugins/event_sources/github_webhook_event_source.py +1 -4
  33. zenml/integrations/mlflow/steps/mlflow_registry.py +3 -3
  34. zenml/integrations/seldon/model_deployers/seldon_model_deployer.py +1 -1
  35. zenml/integrations/wandb/__init__.py +1 -1
  36. zenml/integrations/wandb/experiment_trackers/wandb_experiment_tracker.py +29 -9
  37. zenml/integrations/wandb/flavors/wandb_experiment_tracker_flavor.py +5 -3
  38. zenml/model/model.py +10 -10
  39. zenml/model_registries/base_model_registry.py +1 -1
  40. zenml/models/__init__.py +45 -28
  41. zenml/models/v2/base/base.py +0 -5
  42. zenml/models/v2/base/filter.py +2 -2
  43. zenml/models/v2/base/scoped.py +135 -156
  44. zenml/models/v2/core/action.py +12 -12
  45. zenml/models/v2/core/api_key.py +1 -1
  46. zenml/models/v2/core/artifact.py +31 -18
  47. zenml/models/v2/core/artifact_version.py +57 -40
  48. zenml/models/v2/core/code_repository.py +12 -12
  49. zenml/models/v2/core/component.py +22 -33
  50. zenml/models/v2/core/device.py +3 -2
  51. zenml/models/v2/core/event_source.py +14 -14
  52. zenml/models/v2/core/flavor.py +19 -47
  53. zenml/models/v2/core/logs.py +1 -2
  54. zenml/models/v2/core/model.py +23 -20
  55. zenml/models/v2/core/model_version.py +51 -42
  56. zenml/models/v2/core/pipeline.py +16 -16
  57. zenml/models/v2/core/pipeline_build.py +14 -14
  58. zenml/models/v2/core/pipeline_deployment.py +12 -14
  59. zenml/models/v2/core/pipeline_run.py +21 -29
  60. zenml/models/v2/core/project.py +203 -0
  61. zenml/models/v2/core/run_metadata.py +2 -2
  62. zenml/models/v2/core/run_template.py +16 -17
  63. zenml/models/v2/core/schedule.py +12 -21
  64. zenml/models/v2/core/secret.py +94 -128
  65. zenml/models/v2/core/server_settings.py +2 -2
  66. zenml/models/v2/core/service.py +57 -26
  67. zenml/models/v2/core/service_connector.py +14 -16
  68. zenml/models/v2/core/stack.py +24 -26
  69. zenml/models/v2/core/step_run.py +16 -28
  70. zenml/models/v2/core/tag.py +41 -15
  71. zenml/models/v2/core/trigger.py +13 -13
  72. zenml/models/v2/core/trigger_execution.py +2 -2
  73. zenml/models/v2/core/user.py +2 -2
  74. zenml/models/v2/misc/statistics.py +45 -0
  75. zenml/models/v2/misc/tag.py +27 -0
  76. zenml/orchestrators/cache_utils.py +7 -7
  77. zenml/orchestrators/input_utils.py +1 -0
  78. zenml/orchestrators/step_launcher.py +1 -2
  79. zenml/orchestrators/step_run_utils.py +2 -4
  80. zenml/orchestrators/step_runner.py +10 -1
  81. zenml/orchestrators/utils.py +4 -4
  82. zenml/pipelines/build_utils.py +2 -4
  83. zenml/pipelines/pipeline_decorator.py +3 -2
  84. zenml/pipelines/pipeline_definition.py +8 -9
  85. zenml/pipelines/run_utils.py +4 -4
  86. zenml/service_connectors/service_connector.py +0 -10
  87. zenml/service_connectors/service_connector_utils.py +0 -2
  88. zenml/stack/authentication_mixin.py +1 -1
  89. zenml/stack/flavor.py +3 -14
  90. zenml/stack/stack.py +0 -1
  91. zenml/stack/stack_component.py +1 -5
  92. zenml/steps/base_step.py +10 -2
  93. zenml/steps/step_context.py +19 -0
  94. zenml/utils/string_utils.py +1 -1
  95. zenml/utils/tag_utils.py +642 -0
  96. zenml/zen_server/cloud_utils.py +21 -0
  97. zenml/zen_server/exceptions.py +0 -6
  98. zenml/zen_server/rbac/endpoint_utils.py +134 -46
  99. zenml/zen_server/rbac/models.py +65 -3
  100. zenml/zen_server/rbac/rbac_interface.py +9 -0
  101. zenml/zen_server/rbac/rbac_sql_zen_store.py +15 -7
  102. zenml/zen_server/rbac/utils.py +155 -30
  103. zenml/zen_server/rbac/zenml_cloud_rbac.py +39 -11
  104. zenml/zen_server/routers/actions_endpoints.py +3 -5
  105. zenml/zen_server/routers/artifact_endpoint.py +0 -5
  106. zenml/zen_server/routers/artifact_version_endpoints.py +15 -9
  107. zenml/zen_server/routers/auth_endpoints.py +22 -7
  108. zenml/zen_server/routers/code_repositories_endpoints.py +54 -3
  109. zenml/zen_server/routers/devices_endpoints.py +0 -4
  110. zenml/zen_server/routers/event_source_endpoints.py +0 -5
  111. zenml/zen_server/routers/flavors_endpoints.py +0 -5
  112. zenml/zen_server/routers/logs_endpoints.py +0 -1
  113. zenml/zen_server/routers/model_versions_endpoints.py +100 -23
  114. zenml/zen_server/routers/models_endpoints.py +50 -69
  115. zenml/zen_server/routers/pipeline_builds_endpoints.py +55 -3
  116. zenml/zen_server/routers/pipeline_deployments_endpoints.py +56 -4
  117. zenml/zen_server/routers/pipelines_endpoints.py +70 -3
  118. zenml/zen_server/routers/plugin_endpoints.py +0 -1
  119. zenml/zen_server/routers/projects_endpoints.py +283 -0
  120. zenml/zen_server/routers/run_metadata_endpoints.py +97 -0
  121. zenml/zen_server/routers/run_templates_endpoints.py +64 -3
  122. zenml/zen_server/routers/runs_endpoints.py +58 -8
  123. zenml/zen_server/routers/schedule_endpoints.py +67 -6
  124. zenml/zen_server/routers/secrets_endpoints.py +38 -4
  125. zenml/zen_server/routers/server_endpoints.py +53 -1
  126. zenml/zen_server/routers/service_accounts_endpoints.py +14 -15
  127. zenml/zen_server/routers/service_connectors_endpoints.py +94 -14
  128. zenml/zen_server/routers/service_endpoints.py +18 -7
  129. zenml/zen_server/routers/stack_components_endpoints.py +66 -7
  130. zenml/zen_server/routers/stacks_endpoints.py +95 -6
  131. zenml/zen_server/routers/steps_endpoints.py +17 -11
  132. zenml/zen_server/routers/tag_resource_endpoints.py +115 -0
  133. zenml/zen_server/routers/tags_endpoints.py +6 -17
  134. zenml/zen_server/routers/triggers_endpoints.py +5 -8
  135. zenml/zen_server/routers/users_endpoints.py +9 -12
  136. zenml/zen_server/template_execution/utils.py +8 -7
  137. zenml/zen_server/utils.py +21 -0
  138. zenml/zen_server/zen_server_api.py +7 -2
  139. zenml/zen_stores/base_zen_store.py +50 -69
  140. zenml/zen_stores/migrations/versions/12eff0206201_rename_workspace_to_project.py +768 -0
  141. zenml/zen_stores/migrations/versions/1cb6477f72d6_move_artifact_save_type.py +20 -10
  142. zenml/zen_stores/migrations/versions/1f9d1cd00b90_add_unique_name_constraints.py +231 -0
  143. zenml/zen_stores/migrations/versions/288f4fb6e112_make_tags_user_scoped.py +74 -0
  144. zenml/zen_stores/migrations/versions/2e695a26fe7a_add_user_default_workspace.py +45 -0
  145. zenml/zen_stores/migrations/versions/3b1776345020_remove_workspace_from_globals.py +81 -0
  146. zenml/zen_stores/migrations/versions/41b28cae31ce_make_artifacts_workspace_scoped.py +136 -0
  147. zenml/zen_stores/migrations/versions/9e7bf0970266_adding_exclusive_attribute_to_tags.py +47 -0
  148. zenml/zen_stores/migrations/versions/b557b2871693_update_step_run_input_types.py +8 -4
  149. zenml/zen_stores/migrations/versions/cbc6acd71f92_add_workspace_display_name.py +58 -0
  150. zenml/zen_stores/migrations/versions/cc269488e5a9_separate_run_metadata.py +12 -6
  151. zenml/zen_stores/migrations/versions/f1d723fd723b_add_secret_private_attr.py +61 -0
  152. zenml/zen_stores/migrations/versions/f76a368a25a5_add_stack_description.py +35 -0
  153. zenml/zen_stores/rest_zen_store.py +223 -230
  154. zenml/zen_stores/schemas/__init__.py +2 -2
  155. zenml/zen_stores/schemas/action_schemas.py +15 -8
  156. zenml/zen_stores/schemas/api_key_schemas.py +8 -1
  157. zenml/zen_stores/schemas/artifact_schemas.py +35 -10
  158. zenml/zen_stores/schemas/code_repository_schemas.py +22 -17
  159. zenml/zen_stores/schemas/component_schemas.py +9 -14
  160. zenml/zen_stores/schemas/event_source_schemas.py +15 -8
  161. zenml/zen_stores/schemas/flavor_schemas.py +14 -20
  162. zenml/zen_stores/schemas/model_schemas.py +18 -17
  163. zenml/zen_stores/schemas/pipeline_build_schemas.py +7 -7
  164. zenml/zen_stores/schemas/pipeline_deployment_schemas.py +10 -8
  165. zenml/zen_stores/schemas/pipeline_run_schemas.py +9 -12
  166. zenml/zen_stores/schemas/pipeline_schemas.py +9 -9
  167. zenml/zen_stores/schemas/{workspace_schemas.py → project_schemas.py} +53 -65
  168. zenml/zen_stores/schemas/run_metadata_schemas.py +5 -5
  169. zenml/zen_stores/schemas/run_template_schemas.py +17 -13
  170. zenml/zen_stores/schemas/schedule_schema.py +16 -21
  171. zenml/zen_stores/schemas/secret_schemas.py +15 -25
  172. zenml/zen_stores/schemas/service_connector_schemas.py +8 -17
  173. zenml/zen_stores/schemas/service_schemas.py +7 -8
  174. zenml/zen_stores/schemas/stack_schemas.py +12 -15
  175. zenml/zen_stores/schemas/step_run_schemas.py +14 -15
  176. zenml/zen_stores/schemas/tag_schemas.py +30 -2
  177. zenml/zen_stores/schemas/trigger_schemas.py +15 -8
  178. zenml/zen_stores/schemas/user_schemas.py +12 -2
  179. zenml/zen_stores/schemas/utils.py +16 -0
  180. zenml/zen_stores/secrets_stores/service_connector_secrets_store.py +0 -3
  181. zenml/zen_stores/sql_zen_store.py +2984 -2369
  182. zenml/zen_stores/template_utils.py +1 -1
  183. zenml/zen_stores/zen_store_interface.py +136 -126
  184. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250314.dist-info}/METADATA +1 -1
  185. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250314.dist-info}/RECORD +188 -173
  186. zenml/cli/workspace.py +0 -86
  187. zenml/models/v2/core/workspace.py +0 -131
  188. zenml/zen_server/routers/workspaces_endpoints.py +0 -1469
  189. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250314.dist-info}/LICENSE +0 -0
  190. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250314.dist-info}/WHEEL +0 -0
  191. {zenml_nightly-0.75.0.dev20250312.dist-info → zenml_nightly-0.75.0.dev20250314.dist-info}/entry_points.txt +0 -0
@@ -23,7 +23,8 @@ def upgrade() -> None:
23
23
  batch_op.add_column(sa.Column("save_type", sa.TEXT(), nullable=True))
24
24
 
25
25
  # Step 2: Move data from step_run_output_artifact.type to artifact_version.save_type
26
- op.execute("""
26
+ op.execute(
27
+ """
27
28
  UPDATE artifact_version
28
29
  SET save_type = (
29
30
  SELECT max(step_run_output_artifact.type)
@@ -31,17 +32,22 @@ def upgrade() -> None:
31
32
  WHERE step_run_output_artifact.artifact_id = artifact_version.id
32
33
  GROUP BY artifact_id
33
34
  )
34
- """)
35
- op.execute("""
35
+ """
36
+ )
37
+ op.execute(
38
+ """
36
39
  UPDATE artifact_version
37
40
  SET save_type = 'step_output'
38
41
  WHERE artifact_version.save_type = 'default'
39
- """)
40
- op.execute("""
42
+ """
43
+ )
44
+ op.execute(
45
+ """
41
46
  UPDATE artifact_version
42
47
  SET save_type = 'external'
43
48
  WHERE save_type is NULL
44
- """)
49
+ """
50
+ )
45
51
 
46
52
  # # Step 3: Set save_type to non-nullable
47
53
  with op.batch_alter_table("artifact_version", schema=None) as batch_op:
@@ -69,7 +75,8 @@ def downgrade() -> None:
69
75
  )
70
76
 
71
77
  # Move data back from artifact_version.save_type to step_run_output_artifact.type
72
- op.execute("""
78
+ op.execute(
79
+ """
73
80
  UPDATE step_run_output_artifact
74
81
  SET type = (
75
82
  SELECT max(artifact_version.save_type)
@@ -77,12 +84,15 @@ def downgrade() -> None:
77
84
  WHERE step_run_output_artifact.artifact_id = artifact_version.id
78
85
  GROUP BY artifact_id
79
86
  )
80
- """)
81
- op.execute("""
87
+ """
88
+ )
89
+ op.execute(
90
+ """
82
91
  UPDATE step_run_output_artifact
83
92
  SET type = 'default'
84
93
  WHERE step_run_output_artifact.type = 'step_output'
85
- """)
94
+ """
95
+ )
86
96
 
87
97
  # Set type to non-nullable
88
98
  with op.batch_alter_table(
@@ -0,0 +1,231 @@
1
+ """add unique name constraints [1f9d1cd00b90].
2
+
3
+ Revision ID: 1f9d1cd00b90
4
+ Revises: f76a368a25a5
5
+ Create Date: 2025-02-22 20:18:34.258987
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+ from sqlalchemy.orm import Session
12
+
13
+ from zenml.logger import get_logger
14
+
15
+ logger = get_logger(__name__)
16
+
17
+ # revision identifiers, used by Alembic.
18
+ revision = "1f9d1cd00b90"
19
+ down_revision = "f76a368a25a5"
20
+ branch_labels = None
21
+ depends_on = None
22
+
23
+
24
+ def resolve_duplicate_names(
25
+ table_name: str, other_columns: list[str], session: Session
26
+ ) -> None:
27
+ """Resolve duplicate entities.
28
+
29
+ Args:
30
+ table_name: The name of the table to resolve duplicate entities for.
31
+ other_columns: The columns that are part of the unique constraint,
32
+ excluding the name column.
33
+ session: The SQLAlchemy session to use.
34
+ """
35
+ columns = ["name"] + other_columns
36
+ duplicates = session.execute(
37
+ sa.text(
38
+ f"""
39
+ SELECT id, name
40
+ FROM `{table_name}`
41
+ WHERE ({", ".join(columns)}) IN (
42
+ SELECT {", ".join(columns)}
43
+ FROM `{table_name}`
44
+ GROUP BY {", ".join(columns)}
45
+ HAVING COUNT(*) > 1
46
+ )
47
+ """ # nosec B608
48
+ )
49
+ )
50
+ for id_, name in list(duplicates)[1:]:
51
+ logger.warning(f"Duplicate {table_name}: {name} (id: {id_})")
52
+ session.execute(
53
+ sa.text(
54
+ f"""
55
+ UPDATE {table_name}
56
+ SET name = :new_name
57
+ WHERE id = :id_
58
+ """ # nosec B608
59
+ ),
60
+ params={"id_": id_, "new_name": f"{name}_{id_[:6]}"},
61
+ )
62
+
63
+
64
+ def upgrade() -> None:
65
+ """Upgrade database schema and/or data, creating a new revision."""
66
+ bind = op.get_bind()
67
+ session = Session(bind=bind)
68
+
69
+ resolve_duplicate_names("action", ["workspace_id"], session)
70
+
71
+ with op.batch_alter_table("action", schema=None) as batch_op:
72
+ batch_op.create_unique_constraint(
73
+ "unique_action_name_in_workspace", ["name", "workspace_id"]
74
+ )
75
+
76
+ resolve_duplicate_names("api_key", ["service_account_id"], session)
77
+
78
+ with op.batch_alter_table("api_key", schema=None) as batch_op:
79
+ batch_op.create_unique_constraint(
80
+ "unique_api_key_name_in_service_account",
81
+ ["name", "service_account_id"],
82
+ )
83
+
84
+ resolve_duplicate_names("artifact", ["workspace_id"], session)
85
+
86
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
87
+ batch_op.drop_constraint("unique_artifact_name", type_="unique")
88
+ batch_op.create_unique_constraint(
89
+ "unique_artifact_name_in_workspace", ["name", "workspace_id"]
90
+ )
91
+
92
+ resolve_duplicate_names("code_repository", ["workspace_id"], session)
93
+
94
+ with op.batch_alter_table("code_repository", schema=None) as batch_op:
95
+ batch_op.create_unique_constraint(
96
+ "unique_code_repository_name_in_workspace",
97
+ ["name", "workspace_id"],
98
+ )
99
+
100
+ resolve_duplicate_names("event_source", ["workspace_id"], session)
101
+
102
+ with op.batch_alter_table("event_source", schema=None) as batch_op:
103
+ batch_op.create_unique_constraint(
104
+ "unique_event_source_name_in_workspace", ["name", "workspace_id"]
105
+ )
106
+
107
+ resolve_duplicate_names("flavor", ["type"], session)
108
+
109
+ with op.batch_alter_table("flavor", schema=None) as batch_op:
110
+ batch_op.create_unique_constraint(
111
+ "unique_flavor_name_and_type", ["name", "type"]
112
+ )
113
+
114
+ resolve_duplicate_names("schedule", ["workspace_id"], session)
115
+
116
+ with op.batch_alter_table("schedule", schema=None) as batch_op:
117
+ batch_op.create_unique_constraint(
118
+ "unique_schedule_name_in_workspace", ["name", "workspace_id"]
119
+ )
120
+
121
+ resolve_duplicate_names("secret", ["private", "user_id"], session)
122
+
123
+ with op.batch_alter_table("secret", schema=None) as batch_op:
124
+ batch_op.create_unique_constraint(
125
+ "unique_secret_name_private_scope_user",
126
+ ["name", "private", "user_id"],
127
+ )
128
+
129
+ resolve_duplicate_names("service_connector", [], session)
130
+
131
+ with op.batch_alter_table("service_connector", schema=None) as batch_op:
132
+ batch_op.create_unique_constraint(
133
+ "unique_service_connector_name", ["name"]
134
+ )
135
+
136
+ resolve_duplicate_names("stack", [], session)
137
+
138
+ with op.batch_alter_table("stack", schema=None) as batch_op:
139
+ batch_op.create_unique_constraint("unique_stack_name", ["name"])
140
+
141
+ resolve_duplicate_names("stack_component", ["type"], session)
142
+
143
+ with op.batch_alter_table("stack_component", schema=None) as batch_op:
144
+ batch_op.create_unique_constraint(
145
+ "unique_component_name_and_type", ["name", "type"]
146
+ )
147
+
148
+ resolve_duplicate_names("tag", [], session)
149
+
150
+ with op.batch_alter_table("tag", schema=None) as batch_op:
151
+ batch_op.create_unique_constraint("unique_tag_name", ["name"])
152
+
153
+ resolve_duplicate_names("trigger", ["workspace_id"], session)
154
+
155
+ with op.batch_alter_table("trigger", schema=None) as batch_op:
156
+ batch_op.create_unique_constraint(
157
+ "unique_trigger_name_in_workspace", ["name", "workspace_id"]
158
+ )
159
+
160
+ resolve_duplicate_names("workspace", [], session)
161
+
162
+ with op.batch_alter_table("workspace", schema=None) as batch_op:
163
+ batch_op.create_unique_constraint("unique_workspace_name", ["name"])
164
+
165
+
166
+ def downgrade() -> None:
167
+ """Downgrade database schema and/or data back to the previous revision."""
168
+ with op.batch_alter_table("workspace", schema=None) as batch_op:
169
+ batch_op.drop_constraint("unique_workspace_name", type_="unique")
170
+
171
+ with op.batch_alter_table("trigger", schema=None) as batch_op:
172
+ batch_op.drop_constraint(
173
+ "unique_trigger_name_in_workspace", type_="unique"
174
+ )
175
+
176
+ with op.batch_alter_table("tag", schema=None) as batch_op:
177
+ batch_op.drop_constraint(
178
+ "unique_tag_name_in_workspace", type_="unique"
179
+ )
180
+
181
+ with op.batch_alter_table("stack_component", schema=None) as batch_op:
182
+ batch_op.drop_constraint(
183
+ "unique_component_name_and_type", type_="unique"
184
+ )
185
+
186
+ with op.batch_alter_table("stack", schema=None) as batch_op:
187
+ batch_op.drop_constraint("unique_stack_name", type_="unique")
188
+
189
+ with op.batch_alter_table("service_connector", schema=None) as batch_op:
190
+ batch_op.drop_constraint(
191
+ "unique_service_connector_name", type_="unique"
192
+ )
193
+
194
+ with op.batch_alter_table("secret", schema=None) as batch_op:
195
+ batch_op.drop_constraint(
196
+ "unique_secret_name_and_private_scope", type_="unique"
197
+ )
198
+
199
+ with op.batch_alter_table("schedule", schema=None) as batch_op:
200
+ batch_op.drop_constraint(
201
+ "unique_schedule_name_in_workspace", type_="unique"
202
+ )
203
+
204
+ with op.batch_alter_table("flavor", schema=None) as batch_op:
205
+ batch_op.drop_constraint("unique_flavor_name_and_type", type_="unique")
206
+
207
+ with op.batch_alter_table("event_source", schema=None) as batch_op:
208
+ batch_op.drop_constraint(
209
+ "unique_event_source_name_in_workspace", type_="unique"
210
+ )
211
+
212
+ with op.batch_alter_table("code_repository", schema=None) as batch_op:
213
+ batch_op.drop_constraint(
214
+ "unique_code_repository_name_in_workspace", type_="unique"
215
+ )
216
+
217
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
218
+ batch_op.drop_constraint(
219
+ "unique_artifact_name_in_workspace", type_="unique"
220
+ )
221
+ batch_op.create_unique_constraint("unique_artifact_name", ["name"])
222
+
223
+ with op.batch_alter_table("api_key", schema=None) as batch_op:
224
+ batch_op.drop_constraint(
225
+ "unique_api_key_name_in_service_account", type_="unique"
226
+ )
227
+
228
+ with op.batch_alter_table("action", schema=None) as batch_op:
229
+ batch_op.drop_constraint(
230
+ "unique_action_name_in_workspace", type_="unique"
231
+ )
@@ -0,0 +1,74 @@
1
+ """make tags user scoped [288f4fb6e112].
2
+
3
+ Revision ID: 288f4fb6e112
4
+ Revises: 3b1776345020
5
+ Create Date: 2025-02-19 15:16:42.954792
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ import sqlmodel
11
+ from alembic import op
12
+ from sqlalchemy.orm import Session
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision = "288f4fb6e112"
16
+ down_revision = "3b1776345020"
17
+ branch_labels = None
18
+ depends_on = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade database schema and/or data, creating a new revision."""
23
+ with op.batch_alter_table("tag", schema=None) as batch_op:
24
+ # First add columns as nullable
25
+ batch_op.add_column(
26
+ sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
27
+ )
28
+
29
+ # Add foreign key constraints
30
+ batch_op.create_foreign_key(
31
+ "fk_tag_user_id_user",
32
+ "user",
33
+ ["user_id"],
34
+ ["id"],
35
+ ondelete="SET NULL",
36
+ )
37
+
38
+ bind = op.get_bind()
39
+ session = Session(bind=bind)
40
+
41
+ tags = session.execute(
42
+ sa.text("""
43
+ SELECT t.id, tr.resource_id, tr.resource_type
44
+ FROM tag t
45
+ JOIN tag_resource tr ON t.id = tr.tag_id
46
+ """)
47
+ )
48
+
49
+ tag_ids = []
50
+ for tag_id, resource_id, resource_type in tags:
51
+ if tag_id in tag_ids:
52
+ continue
53
+ tag_ids.append(tag_id)
54
+ session.execute(
55
+ sa.text(
56
+ f"""
57
+ UPDATE tag
58
+ SET user_id = (
59
+ SELECT r.user_id
60
+ FROM {resource_type} r
61
+ WHERE r.id = :resource_id
62
+ )
63
+ WHERE id = :tag_id
64
+ """ # nosec B608
65
+ ),
66
+ params={"resource_id": resource_id, "tag_id": tag_id},
67
+ )
68
+
69
+
70
+ def downgrade() -> None:
71
+ """Downgrade database schema and/or data back to the previous revision."""
72
+ with op.batch_alter_table("tag", schema=None) as batch_op:
73
+ batch_op.drop_constraint("fk_tag_user_id_user", type_="foreignkey")
74
+ batch_op.drop_column("user_id")
@@ -0,0 +1,45 @@
1
+ """add user default workspace [2e695a26fe7a].
2
+
3
+ Revision ID: 2e695a26fe7a
4
+ Revises: 1f9d1cd00b90
5
+ Create Date: 2025-02-24 18:19:43.121393
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ import sqlmodel
11
+ from alembic import op
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "2e695a26fe7a"
15
+ down_revision = "1f9d1cd00b90"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ """Upgrade database schema and/or data, creating a new revision."""
22
+ with op.batch_alter_table("user", schema=None) as batch_op:
23
+ batch_op.add_column(
24
+ sa.Column(
25
+ "default_workspace_id",
26
+ sqlmodel.sql.sqltypes.GUID(),
27
+ nullable=True,
28
+ )
29
+ )
30
+ batch_op.create_foreign_key(
31
+ "fk_user_default_workspace_id_workspace",
32
+ "workspace",
33
+ ["default_workspace_id"],
34
+ ["id"],
35
+ ondelete="SET NULL",
36
+ )
37
+
38
+
39
+ def downgrade() -> None:
40
+ """Downgrade database schema and/or data back to the previous revision."""
41
+ with op.batch_alter_table("user", schema=None) as batch_op:
42
+ batch_op.drop_constraint(
43
+ "fk_user_default_workspace_id_workspace", type_="foreignkey"
44
+ )
45
+ batch_op.drop_column("default_workspace_id")
@@ -0,0 +1,81 @@
1
+ """remove workspace from globals [3b1776345020].
2
+
3
+ Revision ID: 3b1776345020
4
+ Revises: 0392807467dc
5
+ Create Date: 2025-02-13 15:57:38.255825
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "3b1776345020"
14
+ down_revision = "0392807467dc"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ """Upgrade database schema and/or data, creating a new revision.
21
+
22
+ Raises:
23
+ RuntimeError: If more than one workspace exists.
24
+ """
25
+ # If more than one workspace exists, we fail the migration because it
26
+ # would mean merging together resources from different workspaces, which
27
+ # can lead to naming conflicts.
28
+ workspace_count_query = sa.text("SELECT COUNT(*) FROM workspace")
29
+ connection = op.get_bind()
30
+ workspace_count = connection.execute(
31
+ workspace_count_query,
32
+ ).scalar()
33
+ assert isinstance(workspace_count, int)
34
+ if workspace_count > 1:
35
+ raise RuntimeError(
36
+ "Your ZenML installation has more than just the default workspace "
37
+ "configured. This migration removes the workspace scopes of all "
38
+ "stacks, components, flavors, service connectors and secrets, "
39
+ "which may lead to naming conflicts if multiple workspaces are "
40
+ "present. Please delete all but the default workspace before "
41
+ "running this migration."
42
+ )
43
+
44
+ with op.batch_alter_table("flavor", schema=None) as batch_op:
45
+ batch_op.drop_constraint(
46
+ "fk_flavor_workspace_id_workspace", type_="foreignkey"
47
+ )
48
+ batch_op.drop_column("workspace_id")
49
+
50
+ with op.batch_alter_table("secret", schema=None) as batch_op:
51
+ batch_op.drop_constraint(
52
+ "fk_secret_workspace_id_workspace", type_="foreignkey"
53
+ )
54
+ batch_op.drop_column("workspace_id")
55
+
56
+ with op.batch_alter_table("service_connector", schema=None) as batch_op:
57
+ batch_op.drop_constraint(
58
+ "fk_service_connector_workspace_id_workspace", type_="foreignkey"
59
+ )
60
+ batch_op.drop_column("workspace_id")
61
+
62
+ with op.batch_alter_table("stack", schema=None) as batch_op:
63
+ batch_op.drop_constraint(
64
+ "fk_stack_workspace_id_workspace", type_="foreignkey"
65
+ )
66
+ batch_op.drop_column("workspace_id")
67
+
68
+ with op.batch_alter_table("stack_component", schema=None) as batch_op:
69
+ batch_op.drop_constraint(
70
+ "fk_stack_component_workspace_id_workspace", type_="foreignkey"
71
+ )
72
+ batch_op.drop_column("workspace_id")
73
+
74
+
75
+ def downgrade() -> None:
76
+ """Downgrade database schema and/or data back to the previous revision.
77
+
78
+ Raises:
79
+ NotImplementedError: This migration is not reversible.
80
+ """
81
+ raise NotImplementedError("This migration is not reversible.")
@@ -0,0 +1,136 @@
1
+ """make artifacts workspace scoped [41b28cae31ce].
2
+
3
+ Revision ID: 41b28cae31ce
4
+ Revises: 288f4fb6e112
5
+ Create Date: 2025-02-19 23:23:08.133826
6
+
7
+ """
8
+
9
+ import os
10
+
11
+ import sqlalchemy as sa
12
+ import sqlmodel
13
+ from alembic import op
14
+ from sqlalchemy.orm import Session
15
+ from sqlalchemy.sql import column, table
16
+
17
+ from zenml.constants import (
18
+ DEFAULT_PROJECT_NAME,
19
+ ENV_ZENML_DEFAULT_PROJECT_NAME,
20
+ )
21
+
22
+ # revision identifiers, used by Alembic.
23
+ revision = "41b28cae31ce"
24
+ down_revision = "288f4fb6e112"
25
+ branch_labels = None
26
+ depends_on = None
27
+
28
+
29
+ def upgrade() -> None:
30
+ """Upgrade database schema and/or data, creating a new revision.
31
+
32
+ Raises:
33
+ Exception: If the default workspace is not found.
34
+ """
35
+ # ### commands auto generated by Alembic - please adjust! ###
36
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
37
+ # First add columns as nullable
38
+ batch_op.add_column(
39
+ sa.Column(
40
+ "workspace_id", sqlmodel.sql.sqltypes.GUID(), nullable=True
41
+ )
42
+ )
43
+ batch_op.add_column(
44
+ sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)
45
+ )
46
+
47
+ # Create a temp table object for the update
48
+ artifact_table = table(
49
+ "artifact",
50
+ column("id", sqlmodel.sql.sqltypes.GUID()),
51
+ column("workspace_id", sqlmodel.sql.sqltypes.GUID()),
52
+ )
53
+
54
+ default_workspace_name = os.getenv(
55
+ ENV_ZENML_DEFAULT_PROJECT_NAME, DEFAULT_PROJECT_NAME
56
+ )
57
+
58
+ default_workspace_query = sa.text(
59
+ "SELECT id FROM workspace WHERE name = :default_workspace_name LIMIT 1"
60
+ )
61
+ connection = op.get_bind()
62
+ default_workspace_id = connection.execute(
63
+ default_workspace_query,
64
+ {"default_workspace_name": default_workspace_name},
65
+ ).scalar()
66
+
67
+ if default_workspace_id is None:
68
+ raise Exception(
69
+ "Default workspace not found. Cannot proceed with migration."
70
+ )
71
+
72
+ # Update existing records with the default workspace
73
+ op.execute(
74
+ artifact_table.update().values(workspace_id=default_workspace_id)
75
+ )
76
+
77
+ bind = op.get_bind()
78
+ session = Session(bind=bind)
79
+
80
+ # Set the artifact owner to the owner of the latest artifact version.
81
+ # NOTE: we skip this for SQLite because the subquery will fail.
82
+ if bind.dialect.name != "sqlite":
83
+ session.execute(
84
+ sa.text(
85
+ """
86
+ UPDATE artifact a
87
+ SET user_id = (
88
+ SELECT v.user_id
89
+ FROM `artifact_version` v
90
+ WHERE v.artifact_id = a.id
91
+ ORDER BY v.created DESC
92
+ LIMIT 1
93
+ )
94
+ """
95
+ ),
96
+ )
97
+
98
+ # Now make workspace_id non-nullable
99
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
100
+ batch_op.alter_column(
101
+ "workspace_id",
102
+ existing_type=sqlmodel.sql.sqltypes.GUID(),
103
+ nullable=False,
104
+ )
105
+
106
+ # Add foreign key constraints
107
+ batch_op.create_foreign_key(
108
+ "fk_artifact_workspace_id_workspace",
109
+ "workspace",
110
+ ["workspace_id"],
111
+ ["id"],
112
+ ondelete="CASCADE",
113
+ )
114
+ batch_op.create_foreign_key(
115
+ "fk_artifact_user_id_user",
116
+ "user",
117
+ ["user_id"],
118
+ ["id"],
119
+ ondelete="SET NULL",
120
+ )
121
+
122
+
123
+ def downgrade() -> None:
124
+ """Downgrade database schema and/or data back to the previous revision."""
125
+ # ### commands auto generated by Alembic - please adjust! ###
126
+ with op.batch_alter_table("artifact", schema=None) as batch_op:
127
+ batch_op.drop_constraint(
128
+ "fk_artifact_user_id_user", type_="foreignkey"
129
+ )
130
+ batch_op.drop_constraint(
131
+ "fk_artifact_workspace_id_workspace", type_="foreignkey"
132
+ )
133
+ batch_op.drop_column("user_id")
134
+ batch_op.drop_column("workspace_id")
135
+
136
+ # ### end Alembic commands ###
@@ -0,0 +1,47 @@
1
+ """adding exclusive attribute to tags [9e7bf0970266].
2
+
3
+ Revision ID: 9e7bf0970266
4
+ Revises: 2e695a26fe7a
5
+ Create Date: 2025-03-03 15:17:49.341208
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "9e7bf0970266"
14
+ down_revision = "2e695a26fe7a"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ """Upgrade database schema and/or data, creating a new revision."""
21
+ # Use batch_alter_table for safer schema modifications
22
+ with op.batch_alter_table("tag") as batch_op:
23
+ # First add the column as nullable
24
+ batch_op.add_column(
25
+ sa.Column(
26
+ "exclusive",
27
+ sa.Boolean(),
28
+ nullable=True,
29
+ ),
30
+ )
31
+
32
+ # Update existing rows with default value
33
+ op.execute("UPDATE tag SET exclusive = FALSE WHERE exclusive IS NULL")
34
+
35
+ # Then alter the column to be non-nullable with a default
36
+ with op.batch_alter_table("tag") as batch_op:
37
+ batch_op.alter_column(
38
+ "exclusive",
39
+ existing_type=sa.Boolean(),
40
+ nullable=False,
41
+ server_default=sa.false(),
42
+ )
43
+
44
+
45
+ def downgrade() -> None:
46
+ """Downgrade database schema and/or data back to the previous revision."""
47
+ op.drop_column("tag", "exclusive")