google-cloud-pipeline-components 2.18.0__py3-none-any.whl → 2.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of google-cloud-pipeline-components might be problematic. Click here for more details.

Files changed (64) hide show
  1. google_cloud_pipeline_components/_implementation/starry_net/get_training_artifacts/component.py +2 -2
  2. google_cloud_pipeline_components/_implementation/starry_net/set_test_set/component.py +1 -1
  3. google_cloud_pipeline_components/_implementation/starry_net/upload_decomposition_plots/component.py +7 -4
  4. google_cloud_pipeline_components/_implementation/starry_net/version.py +3 -3
  5. google_cloud_pipeline_components/container/v1/custom_job/remote_runner.py +13 -3
  6. google_cloud_pipeline_components/preview/automl/forecasting/forecasting_ensemble.py +1 -1
  7. google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_1_tuner.py +2 -2
  8. google_cloud_pipeline_components/preview/automl/forecasting/forecasting_stage_2_tuner.py +2 -2
  9. google_cloud_pipeline_components/preview/automl/forecasting/learn_to_learn_forecasting_pipeline.yaml +34 -34
  10. google_cloud_pipeline_components/preview/automl/forecasting/sequence_to_sequence_forecasting_pipeline.yaml +34 -34
  11. google_cloud_pipeline_components/preview/automl/forecasting/temporal_fusion_transformer_forecasting_pipeline.yaml +34 -34
  12. google_cloud_pipeline_components/preview/automl/forecasting/time_series_dense_encoder_forecasting_pipeline.yaml +34 -34
  13. google_cloud_pipeline_components/preview/automl/tabular/auto_feature_engineering.py +1 -1
  14. google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_feature_selection_pipeline.yaml +39 -39
  15. google_cloud_pipeline_components/preview/automl/tabular/automl_tabular_v2_pipeline.yaml +41 -41
  16. google_cloud_pipeline_components/preview/automl/tabular/distillation_stage_feature_transform_engine.py +2 -2
  17. google_cloud_pipeline_components/preview/automl/tabular/feature_selection.py +2 -2
  18. google_cloud_pipeline_components/preview/automl/tabular/feature_selection_pipeline.yaml +4 -4
  19. google_cloud_pipeline_components/preview/automl/tabular/feature_transform_engine.py +3 -3
  20. google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job.py +2 -2
  21. google_cloud_pipeline_components/preview/automl/tabular/tabnet_hyperparameter_tuning_job_pipeline.yaml +15 -15
  22. google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer.py +2 -2
  23. google_cloud_pipeline_components/preview/automl/tabular/tabnet_trainer_pipeline.yaml +13 -13
  24. google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job.py +2 -2
  25. google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_hyperparameter_tuning_job_pipeline.yaml +14 -14
  26. google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer.py +2 -2
  27. google_cloud_pipeline_components/preview/automl/tabular/wide_and_deep_trainer_pipeline.yaml +13 -13
  28. google_cloud_pipeline_components/preview/automl/tabular/xgboost_hyperparameter_tuning_job_pipeline.yaml +14 -14
  29. google_cloud_pipeline_components/preview/automl/tabular/xgboost_trainer_pipeline.yaml +13 -13
  30. google_cloud_pipeline_components/proto/README.md +49 -0
  31. google_cloud_pipeline_components/proto/gcp_resources.proto +25 -0
  32. google_cloud_pipeline_components/proto/task_error.proto +11 -0
  33. google_cloud_pipeline_components/proto/template_metadata.proto +323 -0
  34. google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_predict_pipeline.yaml +10 -10
  35. google_cloud_pipeline_components/v1/automl/forecasting/bqml_arima_train_pipeline.yaml +31 -31
  36. google_cloud_pipeline_components/v1/automl/forecasting/prophet_predict_pipeline.yaml +13 -13
  37. google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer.py +3 -3
  38. google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer_pipeline.yaml +14 -14
  39. google_cloud_pipeline_components/v1/automl/tabular/automl_tabular_pipeline.yaml +37 -37
  40. google_cloud_pipeline_components/v1/automl/tabular/cv_trainer.py +2 -2
  41. google_cloud_pipeline_components/v1/automl/tabular/ensemble.py +2 -2
  42. google_cloud_pipeline_components/v1/automl/tabular/finalizer.py +1 -1
  43. google_cloud_pipeline_components/v1/automl/tabular/infra_validator.py +1 -1
  44. google_cloud_pipeline_components/v1/automl/tabular/split_materialized_data.py +1 -1
  45. google_cloud_pipeline_components/v1/automl/tabular/stage_1_tuner.py +2 -2
  46. google_cloud_pipeline_components/v1/automl/tabular/stats_and_example_gen.py +2 -2
  47. google_cloud_pipeline_components/v1/automl/tabular/training_configurator_and_validator.py +1 -1
  48. google_cloud_pipeline_components/v1/automl/tabular/transform.py +2 -2
  49. google_cloud_pipeline_components/v1/custom_job/component.py +3 -0
  50. google_cloud_pipeline_components/v1/custom_job/utils.py +3 -0
  51. google_cloud_pipeline_components/version.py +1 -1
  52. {google_cloud_pipeline_components-2.18.0.dist-info → google_cloud_pipeline_components-2.20.0.dist-info}/METADATA +18 -21
  53. {google_cloud_pipeline_components-2.18.0.dist-info → google_cloud_pipeline_components-2.20.0.dist-info}/RECORD +56 -60
  54. {google_cloud_pipeline_components-2.18.0.dist-info → google_cloud_pipeline_components-2.20.0.dist-info}/WHEEL +1 -1
  55. google_cloud_pipeline_components/_implementation/model_evaluation/import_evaluation/__init__.py +0 -14
  56. google_cloud_pipeline_components/_implementation/model_evaluation/import_evaluation/component.py +0 -208
  57. google_cloud_pipeline_components/preview/model_evaluation/evaluation_llm_classification_pipeline.py +0 -180
  58. google_cloud_pipeline_components/preview/model_evaluation/evaluation_llm_text_generation_pipeline.py +0 -178
  59. google_cloud_pipeline_components/preview/model_evaluation/model_based_llm_evaluation/__init__.py +0 -20
  60. google_cloud_pipeline_components/preview/model_evaluation/model_based_llm_evaluation/autosxs/__init__.py +0 -13
  61. google_cloud_pipeline_components/preview/model_evaluation/model_based_llm_evaluation/autosxs/autosxs_pipeline.py +0 -109
  62. google_cloud_pipeline_components/proto/preflight_validations_pb2.py +0 -58
  63. {google_cloud_pipeline_components-2.18.0.dist-info → google_cloud_pipeline_components-2.20.0.dist-info}/LICENSE +0 -0
  64. {google_cloud_pipeline_components-2.18.0.dist-info → google_cloud_pipeline_components-2.20.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,49 @@
1
+ # GCP Resource Proto
2
+ The gcp_resource is a special parameter that if a component adopts it, the component can take advantage of better supports in Vertex Pipelines in the following ways
3
+ * Better UI experience. Vertex Pipelines UI can recognize this parameter, and provide a customized view of the resource's logs and status in the Pipeline console.
4
+ * Better cancellation. The resource will be automatically cancelled when the Pipeline is cancelled.
5
+ * More cost-effective execution. Supported by dataflow only. See [wait_gcp_resources](https://github.com/kubeflow/pipelines/blob/master/components/google-cloud/google_cloud_pipeline_components/experimental/wait_gcp_resources/component.yaml) for details.
6
+
7
+ ## Installation
8
+
9
+ ```shell
10
+ pip install -U google-cloud-pipeline-components
11
+ ```
12
+
13
+ ## Usage
14
+ To write a resource as an output parameter
15
+
16
+ ```
17
+ from google_cloud_pipeline_components.proto.gcp_resources_pb2 import GcpResources
18
+ from google.protobuf.json_format import MessageToJson
19
+
20
+ dataflow_resources = GcpResources()
21
+ dr = dataflow_resources.resources.add()
22
+ dr.resource_type='DataflowJob'
23
+ dr.resource_uri='https://dataflow.googleapis.com/v1b3/projects/[your-project]/locations/us-east1/jobs/[dataflow-job-id]'
24
+
25
+ with open(gcp_resources, 'w') as f:
26
+ f.write(MessageToJson(dataflow_resources))
27
+
28
+ ```
29
+
30
+ To deserialize the resource
31
+ ```
32
+ from google.protobuf.json_format import Parse
33
+
34
+ input_gcp_resources = Parse(payload, GcpResources())
35
+ # input_gcp_resources is ready to be used. For example, input_gcp_resources.resources
36
+ ```
37
+
38
+
39
+ ## Supported resource_type
40
+ You can set the resource_type with arbitrary string. But only the following types will have the benefits listed above.
41
+ This list will be expanded to support more types in the future.
42
+ * BatchPredictionJob
43
+ * BigQueryJob
44
+ * CustomJob
45
+ * DataflowJob
46
+ * DataprocBatch
47
+ * DataprocLro
48
+ * HyperparameterTuningJob
49
+ * VertexLro
@@ -0,0 +1,25 @@
1
+ syntax = "proto3";
2
+
3
+ package gcp_launcher;
4
+
5
+ import "google/rpc/status.proto";
6
+
7
+ // The schema of the GCP resource. It will be used to parse the output parameter
8
+ // "gcp_resources"
9
+ message GcpResources {
10
+ // The metadata of a resource
11
+ message Resource {
12
+ // The type of the resource. E.g. DataflowJob
13
+ optional string resource_type = 1;
14
+ // The unique resource uri. E.g.
15
+ // https://dataflow.googleapis.com/v1b3/projects/project_1/locations/us-central1/jobs/123
16
+ optional string resource_uri = 2;
17
+ // The error from the resource.
18
+ google.rpc.Status error = 3;
19
+ // Optional. Used by component to save extra custom metadata for the resource.
20
+ repeated string labels = 4;
21
+ }
22
+
23
+ // A list of resources.
24
+ repeated Resource resources = 1;
25
+ }
@@ -0,0 +1,11 @@
1
+ syntax = "proto3";
2
+
3
+ package task_error;
4
+
5
+ // The message allows the 1st party clients of Vertex Pipline to specify
6
+ // arbitary error messages they want to catch during the execution of the
7
+ // pipeline.
8
+ message TaskError {
9
+ // The primary error message.
10
+ string error_message = 1;
11
+ }
@@ -0,0 +1,323 @@
1
+ syntax = "proto3";
2
+
3
+ package template_metadata;
4
+
5
+ import "google/protobuf/struct.proto";
6
+
7
+ option java_multiple_files = true;
8
+
9
+ message TemplateMetadata {
10
+ IOMetadata io_metadata = 1;
11
+ ValidationItems preflight_validations = 2;
12
+ }
13
+
14
+ message IOMetadata {
15
+ // The content of a create run page. Top-level of organization. Use repeated
16
+ // to enforce ordering.
17
+ repeated Page pages = 1;
18
+ // Corresponds to the schema Version of PipelineSpec, since this message is
19
+ // tightly coupled to PipelineSpec
20
+ // https://github.com/kubeflow/pipelines/blob/87db18e3a1df08a23a71f872dc8dac6b4bfb9a95/api/v2alpha1/pipeline_spec.proto#L62
21
+ string schema_version = 2;
22
+ }
23
+
24
+ message Page {
25
+ // The title of the page.
26
+ string name = 1;
27
+ // The description of the page.
28
+ string description = 2;
29
+ // The sections in the page. Second-level heirarchical organization of
30
+ // template inputs.
31
+ repeated Section sections = 3;
32
+ }
33
+
34
+ message Section {
35
+ // The name of the section.
36
+ string name = 1;
37
+ // The description of the section.
38
+ string description = 2;
39
+ // The inputs included in this section. Use repeated to enforce ordering.
40
+ repeated Input inputs = 3;
41
+ }
42
+
43
+ message Input {
44
+ // The name of the input.
45
+ // Corresponds to parameter/artifact name in ComponentSpec.input_definitions
46
+ // (https://github.com/kubeflow/pipelines/blob/066f229e27dc2ac8a58a03d7745d5471d718157c/api/v2alpha1/pipeline_spec.proto#L353-L357).
47
+ string name = 1;
48
+ // The display name for the input. Typically a human-readable version of the
49
+ // input parameter name.
50
+ string display_name = 2;
51
+ // The description of the input.
52
+ string description = 3;
53
+ // The explanation of the default value for the input. Tells the user why we
54
+ // selected this default.
55
+ string default_explanation = 4;
56
+ // The string the user sees if they are unsure how to select a parameter.
57
+ string help_text = 5;
58
+ // Detailed information about what types of values are supported for input
59
+ // type specified in PipelineSpec.
60
+ SemanticType semantic_type = 6;
61
+ }
62
+
63
+ message SemanticType {
64
+ // Mirrors PipelineSpec ParameterTypeEnum + artifacts.
65
+ // https://github.com/kubeflow/pipelines/blob/87db18e3a1df08a23a71f872dc8dac6b4bfb9a95/api/v2alpha1/pipeline_spec.proto#L416-L443
66
+ // If none of oneof type is set, use default rendering with no additional
67
+ // constraints.
68
+ oneof type {
69
+ // Correspond to PipelineSpec NUMBER_DOUBLE.
70
+ Float float_type = 1;
71
+ // Corresponds to PipelineSpec NUMBER_INTEGER.
72
+ Integer integer_type = 2;
73
+ // Corresponds to PipelineSpec STRING.
74
+ String string_type = 3;
75
+ // Corresponds to PipelineSpec BOOLEAN.
76
+ Boolean boolean_type = 4;
77
+ // Corresponds to PipelineSpec LIST.
78
+ List list_type = 6;
79
+ // Corresponds to PipelineSpec STRUCT.
80
+ Struct struct_type = 7;
81
+ // Corresponds to PipelineSpec artifacts.
82
+ Artifact artifact_type = 8;
83
+ }
84
+ }
85
+
86
+ // START: top-level types
87
+ message Float {
88
+ // The minimum value the float can take.
89
+ float min = 1;
90
+ // The maximum value the float can take.
91
+ float max = 2;
92
+ // The validation error if the float is outside of [min, max].
93
+ string validation_error = 3;
94
+ }
95
+
96
+ message Integer {
97
+ // The minimum value the integer can take.
98
+ int32 min = 1;
99
+ // The maximum value the integer can take.
100
+ int32 max = 2;
101
+ // The validation error if the integer is outside of [min, max].
102
+ string validation_error = 3;
103
+ }
104
+
105
+ message String {
106
+ oneof type {
107
+ // The user can enter arbitrary text.
108
+ FreeForm free_form = 1;
109
+ // The user can select one of the available options.
110
+ SelectOne select_one = 2;
111
+ // The user must provide or select a URI.
112
+ UriType uri_type = 3;
113
+ }
114
+ }
115
+
116
+ message Boolean {}
117
+
118
+ message List {
119
+ oneof type {
120
+ // The user can enter arbitrary text for each entry in the list.
121
+ FreeForm free_form = 1;
122
+ // The user can select one of the available options.
123
+ SelectMany select_many = 2;
124
+ // The user must provide or select one or more URIs.
125
+ UriType uri_type = 3;
126
+ }
127
+ }
128
+ message Struct {}
129
+
130
+ message Artifact {
131
+ // The encodes the constraints on the URI.
132
+ UriType uri = 1;
133
+ // The validation error if the URI does not comply with constraints.
134
+ string validation_error = 2;
135
+ }
136
+ // END: top-level types
137
+
138
+ // START: inner messages for top-level types
139
+ message FreeForm {
140
+ // The size of the free-form text box.
141
+ Size size = 1;
142
+ // The regex validation to apply to the free-form text box. Both regex and
143
+ // content can be set.
144
+ string regex = 2;
145
+ // The content of the free-form text box. To the degree possible, the input
146
+ // will be required to be this content type. Both regex and content can be
147
+ // set.
148
+ ContentType content_type = 3;
149
+ // The validation error if the free-form text box does pass regex or content
150
+ // validation.
151
+ string validation_error = 4;
152
+ }
153
+
154
+ message SelectOne {
155
+ // Specifies how the select one dropdown options are specified.
156
+ oneof type {
157
+ // The dropdown is author-specified options.
158
+ Options options = 1;
159
+
160
+ Location location = 2;
161
+ // The dropdown is a project picker.
162
+ bool project = 3;
163
+ // The dropdown is machine type picker.
164
+ MachineType machine_type = 4;
165
+ }
166
+ }
167
+
168
+ message SelectMany {
169
+ // The options in the dropdown. Use Options, rather than SelectOne, since
170
+ // SelectOne includes dropdown values for which >1 selection should be
171
+ // invalid.
172
+ Options options = 1;
173
+ // The number of options which may be selected.
174
+ int32 select_n = 2;
175
+ }
176
+
177
+ message Location {
178
+ oneof values {
179
+ // Any location which is permitted by the organization/project.
180
+ bool any = 1;
181
+ // An explicit list of location options, which will be filtered by the
182
+ // locations permitted by the organization/project.
183
+ Options options = 2;
184
+ }
185
+ }
186
+
187
+ message MachineType {
188
+ oneof values {
189
+ // Any machine type supported by CustomJobs
190
+ // https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types.
191
+ bool any = 1;
192
+ // An explicit list of supported machine types.
193
+ Options options = 2;
194
+ }
195
+ }
196
+
197
+ message Options {
198
+ // An explicit list of permitted options.
199
+ repeated google.protobuf.Value values = 1;
200
+ }
201
+
202
+ // Indicates the relative size of an element, such as a free-form text box.
203
+ enum Size {
204
+ SIZE_UNSET = 0;
205
+ SIZE_SMALL = 1;
206
+ SIZE_MEDIUM = 2;
207
+ SIZE_LARGE = 3;
208
+ }
209
+
210
+ // Content types, which inform field validation, the FE input component, and
211
+ // instructions.
212
+ enum ContentType {
213
+ UNSET_CONTENT = 0; // default
214
+ YAML_CONTENT = 1;
215
+ JSON_CONTENT = 2;
216
+ MARKDOWN_CONTENT = 3;
217
+ HTML_CONTENT = 4;
218
+ DATETIME_CONTENT = 5;
219
+ }
220
+
221
+ enum UriType {
222
+ // Arbitrary user-inputted URI.
223
+ ANY_URI = 0;
224
+ // Any GCS URI.
225
+ GCS_ANY_URI = 1;
226
+ // A GCS bucket URI.
227
+ GCS_BUCKET_URI = 2;
228
+ // A GCS object URI.
229
+ GCS_OBJECT_URI = 3;
230
+ // A BigQuery URI.
231
+ BIGQUERY_URI = 4;
232
+ }
233
+ // END: inner messages for top-level types
234
+
235
+ // Describes the details of validation items.
236
+ message ValidationItems {
237
+ // Validation for Google Cloud Service Account.
238
+ repeated GoogleCloudServiceAccountValidation sa_validations = 1;
239
+ // Validation for Google Cloud Project Quota.
240
+ repeated GoogleCloudProjectQuotaValidation quota_validations = 2;
241
+ // Validation for Google Cloud Api Enablement.
242
+ repeated GoogleCloudApiEnablementValidation api_validations = 3;
243
+ // Validation for Google Cloud Storage.
244
+ repeated GoogleCloudStorageValidation gcs_validations = 4;
245
+ }
246
+
247
+ // Describes the details for Google Cloud Storage Validation.
248
+ message GoogleCloudStorageValidation {
249
+ // Required. URI of the GCS object. Use placeholder to specify the dynamic
250
+ // value like bucket name. For Example:
251
+ // "gs://{{$.parameter.bucket}}/file_name"
252
+ string gcs_uri = 1;
253
+ // Required. Whether the gcs_uri is input or output.
254
+ bool is_input = 2;
255
+ // Required. Default service account principal email to access the gcs object.
256
+ // Example:
257
+ // "{{$.pipeline_google_cloud_project_id}}-compute@developer.gserviceaccount.com"
258
+ // Use placeholder to specify the dynamic value like project id.
259
+ string default_service_account = 3;
260
+ // Optional. If specified, the principal email will be overridden based on the
261
+ // placeholder. Currently support two placeholders: 1.
262
+ // "{{$.pipeline_google_cloud_service_account}}"(actual value is from
263
+ // PipelineJob.service_account 2.
264
+ // "{{$.parameter.service_account}}"(actual value is from the input parameter
265
+ // of the component/pipeline). If the value doesn't exist or is empty,
266
+ // overriding won't happen.
267
+ string override_placeholder = 4;
268
+
269
+ // Optional. List of GCS URIs.
270
+ repeated string gcs_uris = 5;
271
+ }
272
+
273
+ // Describes the details for Google Cloud Project Quota Validation.
274
+ message GoogleCloudProjectQuotaValidation {
275
+ // Required. Metric name of the quota. Example: "compute.googleapis.com/cpus"
276
+ string metric_name = 1;
277
+ // Required. Value of the quota demand. Example: 2 or 3.5
278
+ // We will validate if the demand is under the limit or not.
279
+ oneof value {
280
+ // A signed 64-bit integer value.
281
+ int64 int64_value = 2;
282
+ // A double precision floating point value.
283
+ double double_value = 3;
284
+ }
285
+ // Not implemented yet, project region is used instead.
286
+ // Optional. Region of the quota. Example: "us-central1"
287
+ // If not specified, the default region (project region) will be used.
288
+ string region = 4;
289
+ }
290
+
291
+ // Describes the details for Google Cloud Service Account Validation.
292
+ message GoogleCloudServiceAccountValidation {
293
+ // Required. Default principal email of the service account used for
294
+ // validation. Example:
295
+ // "{{$.pipeline_google_cloud_project_id}}-compute@developer.gserviceaccount.com"
296
+ // Use placeholder to specify the dynamic value like project id.
297
+ string default_principal_email = 1;
298
+
299
+ // Optional. If specified, the principal email will be overridden based on the
300
+ // placeholder. Currently support two placeholders: 1.
301
+ // "{{$.pipeline_google_cloud_service_account}}"(actual value is from
302
+ // PipelineJob.service_account 2.
303
+ // "{{$.parameter.service_account}}"(actual value is from the input parameter
304
+ // of the component/pipeline). If the value doesn't exist or is empty,
305
+ // overriding won't happen.
306
+ string override_placeholder = 2;
307
+
308
+ // Optional. Permission required to have for the service account.
309
+ // Pipeline service will check if provided SA has these permissions.
310
+ // Example: "aiplatform.metadataStores.get"
311
+ repeated string permissions = 3;
312
+
313
+ // Optional. Roles suggestions for users to grant to SA. We will ***not***
314
+ // verify if the SA has the role granted or not. The role names will occur in
315
+ // preflight validations' error message as an action item for users.
316
+ repeated string role_names = 4;
317
+ }
318
+
319
+ // Describes the details of Google Cloud Api Enablement Validation.
320
+ message GoogleCloudApiEnablementValidation {
321
+ // Required. Service names of Google Cloud Api.
322
+ repeated string service_names = 1;
323
+ }
@@ -658,7 +658,7 @@ deploymentSpec:
658
658
  \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\
659
659
  \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \
660
660
  \ ref.project, ref.dataset_id)\n\n"
661
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
661
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
662
662
  exec-bigquery-create-dataset-2:
663
663
  container:
664
664
  args:
@@ -693,7 +693,7 @@ deploymentSpec:
693
693
  \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\
694
694
  \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \
695
695
  \ ref.project, ref.dataset_id)\n\n"
696
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
696
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
697
697
  exec-bigquery-delete-dataset-with-prefix:
698
698
  container:
699
699
  args:
@@ -727,7 +727,7 @@ deploymentSpec:
727
727
  \ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\
728
728
  \ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\
729
729
  \n"
730
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
730
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
731
731
  exec-bigquery-query-job:
732
732
  container:
733
733
  args:
@@ -788,7 +788,7 @@ deploymentSpec:
788
788
  \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
789
789
  \ if write_disposition:\n config['write_disposition'] = write_disposition\n\
790
790
  \ return config\n\n"
791
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
791
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
792
792
  exec-get-first-valid:
793
793
  container:
794
794
  args:
@@ -812,7 +812,7 @@ deploymentSpec:
812
812
  \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\
813
813
  \n for value in json.loads(values):\n if value:\n return value\n\
814
814
  \ raise ValueError('No valid values.')\n\n"
815
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
815
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
816
816
  exec-get-model-metadata:
817
817
  container:
818
818
  args:
@@ -851,7 +851,7 @@ deploymentSpec:
851
851
  \ 'forecast_horizon',\n ],\n )(\n options.time_series_timestamp_column,\n\
852
852
  \ options.time_series_id_column,\n options.time_series_data_column,\n\
853
853
  \ options.horizon,\n )\n\n"
854
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
854
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
855
855
  exec-get-table-location:
856
856
  container:
857
857
  args:
@@ -887,7 +887,7 @@ deploymentSpec:
887
887
  \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
888
888
  \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
889
889
  \ return client.get_table(table).location\n\n"
890
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
890
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
891
891
  exec-load-table-from-uri:
892
892
  container:
893
893
  args:
@@ -928,7 +928,7 @@ deploymentSpec:
928
928
  \ source_format=source_format)\n client.load_table_from_uri(\n source_uris=csv_list,\n\
929
929
  \ destination=destination,\n project=project,\n location=location,\n\
930
930
  \ job_config=job_config).result()\n return destination\n\n"
931
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
931
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
932
932
  exec-maybe-replace-with-default:
933
933
  container:
934
934
  args:
@@ -950,7 +950,7 @@ deploymentSpec:
950
950
  \ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\
951
951
  \ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\
952
952
  \n return default if not value else value\n\n"
953
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
953
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
954
954
  exec-validate-inputs:
955
955
  container:
956
956
  args:
@@ -1046,7 +1046,7 @@ deploymentSpec:
1046
1046
  \ raise ValueError(\n 'Granularity unit should be one of the\
1047
1047
  \ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\
1048
1048
  \n"
1049
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20241121_0625
1049
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20250129_0625
1050
1050
  pipelineInfo:
1051
1051
  description: Forecasts using a BQML ARIMA_PLUS model.
1052
1052
  name: automl-tabular-bqml-arima-prediction