google-cloud-pipeline-components 2.14.0__py3-none-any.whl → 2.14.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of google-cloud-pipeline-components might be problematic. Click here for more details.

Files changed (31) hide show
  1. google_cloud_pipeline_components/_implementation/llm/deployment_graph.py +10 -26
  2. google_cloud_pipeline_components/_implementation/llm/generated/refined_image_versions.py +1 -1
  3. google_cloud_pipeline_components/_implementation/llm/infer_preprocessor.py +109 -0
  4. google_cloud_pipeline_components/_implementation/llm/online_evaluation_pairwise.py +8 -0
  5. google_cloud_pipeline_components/_implementation/llm/reward_model_graph.py +5 -6
  6. google_cloud_pipeline_components/_implementation/llm/rlhf_preprocessor.py +24 -0
  7. google_cloud_pipeline_components/_implementation/model_evaluation/__init__.py +0 -12
  8. google_cloud_pipeline_components/_implementation/model_evaluation/llm_embedding/evaluation_llm_embedding_pipeline.py +2 -1
  9. google_cloud_pipeline_components/preview/llm/infer/component.py +22 -25
  10. google_cloud_pipeline_components/preview/llm/rlhf/component.py +12 -2
  11. google_cloud_pipeline_components/preview/model_evaluation/__init__.py +4 -1
  12. google_cloud_pipeline_components/preview/model_evaluation/model_evaluation_import_component.py +209 -0
  13. google_cloud_pipeline_components/proto/task_error_pb2.py +33 -0
  14. google_cloud_pipeline_components/v1/automl/forecasting/prophet_predict_pipeline.yaml +13 -13
  15. google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer.py +10 -0
  16. google_cloud_pipeline_components/v1/automl/forecasting/prophet_trainer_pipeline.yaml +4 -1
  17. google_cloud_pipeline_components/v1/model_evaluation/error_analysis_pipeline.py +8 -10
  18. google_cloud_pipeline_components/v1/model_evaluation/evaluated_annotation_pipeline.py +2 -2
  19. google_cloud_pipeline_components/v1/model_evaluation/evaluation_automl_tabular_feature_attribution_pipeline.py +2 -2
  20. google_cloud_pipeline_components/v1/model_evaluation/evaluation_automl_tabular_pipeline.py +2 -2
  21. google_cloud_pipeline_components/v1/model_evaluation/evaluation_automl_unstructure_data_pipeline.py +2 -2
  22. google_cloud_pipeline_components/v1/model_evaluation/evaluation_feature_attribution_pipeline.py +2 -2
  23. google_cloud_pipeline_components/v1/model_evaluation/evaluation_llm_classification_pipeline.py +4 -2
  24. google_cloud_pipeline_components/v1/model_evaluation/evaluation_llm_text_generation_pipeline.py +4 -2
  25. google_cloud_pipeline_components/v1/model_evaluation/model_based_llm_evaluation/autosxs/autosxs_pipeline.py +1 -0
  26. google_cloud_pipeline_components/version.py +1 -1
  27. {google_cloud_pipeline_components-2.14.0.dist-info → google_cloud_pipeline_components-2.14.1.dist-info}/METADATA +1 -1
  28. {google_cloud_pipeline_components-2.14.0.dist-info → google_cloud_pipeline_components-2.14.1.dist-info}/RECORD +31 -28
  29. {google_cloud_pipeline_components-2.14.0.dist-info → google_cloud_pipeline_components-2.14.1.dist-info}/LICENSE +0 -0
  30. {google_cloud_pipeline_components-2.14.0.dist-info → google_cloud_pipeline_components-2.14.1.dist-info}/WHEEL +0 -0
  31. {google_cloud_pipeline_components-2.14.0.dist-info → google_cloud_pipeline_components-2.14.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,209 @@
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ from typing import List, Optional
17
+
18
+ from google_cloud_pipeline_components import _image
19
+ from google_cloud_pipeline_components.types.artifact_types import ClassificationMetrics
20
+ from google_cloud_pipeline_components.types.artifact_types import ForecastingMetrics
21
+ from google_cloud_pipeline_components.types.artifact_types import RegressionMetrics
22
+ from google_cloud_pipeline_components.types.artifact_types import VertexModel
23
+ from kfp import dsl
24
+ from kfp.dsl import Input
25
+ from kfp.dsl import Metrics
26
+
27
+
28
+ @dsl.container_component
29
+ def model_evaluation_import(
30
+ model: Input[VertexModel],
31
+ gcp_resources: dsl.OutputPath(str),
32
+ evaluation_resource_name: dsl.OutputPath(str),
33
+ metrics: Optional[Input[Metrics]] = None,
34
+ row_based_metrics: Optional[Input[Metrics]] = None,
35
+ problem_type: Optional[str] = None,
36
+ classification_metrics: Optional[Input[ClassificationMetrics]] = None,
37
+ forecasting_metrics: Optional[Input[ForecastingMetrics]] = None,
38
+ regression_metrics: Optional[Input[RegressionMetrics]] = None,
39
+ text_generation_metrics: Optional[Input[Metrics]] = None,
40
+ question_answering_metrics: Optional[Input[Metrics]] = None,
41
+ summarization_metrics: Optional[Input[Metrics]] = None,
42
+ explanation: Optional[Input[Metrics]] = None,
43
+ feature_attributions: Optional[Input[Metrics]] = None,
44
+ embedding_metrics: Optional[Input[Metrics]] = None,
45
+ display_name: str = "",
46
+ dataset_path: str = "",
47
+ dataset_paths: List[str] = [],
48
+ dataset_type: str = "",
49
+ ):
50
+ # fmt: off
51
+ """Imports a model evaluation artifact to an existing Vertex model with
52
+ ModelService.ImportModelEvaluation.
53
+
54
+ For more details, see
55
+ https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.models.evaluations
56
+ One of the metrics inputs must be provided, metrics & problem_type,
57
+ classification_metrics, regression_metrics, or forecasting_metrics, text_generation_metrics,
58
+ question_answering_metrics, summarization_metrics, embedding_metrics.
59
+
60
+ Args:
61
+ model: Vertex model resource that will be the parent resource of the
62
+ uploaded evaluation.
63
+ metrics: Path of metrics generated from an evaluation component.
64
+ row_based_metrics:
65
+ Path of row_based_metrics generated from an evaluation component.
66
+ problem_type: The problem type of the metrics being imported to the
67
+ VertexModel. `classification`, `regression`, `forecasting`,
68
+ `text-generation`, `question-answering`, and `summarization` are the
69
+ currently supported problem types. Must be provided when `metrics` is
70
+ provided.
71
+ classification_metrics: google.ClassificationMetrics artifact generated from
72
+ the ModelEvaluationClassificationOp component.
73
+ forecasting_metrics: google.ForecastingMetrics artifact generated from
74
+ the ModelEvaluationForecastingOp component.
75
+ regression_metrics: google.ClassificationMetrics artifact generated from
76
+ the ModelEvaluationRegressionOp component.
77
+ text_generation_metrics: system.Metrics artifact generated from
78
+ the LLMEvaluationTextGenerationOp component. Subject to change to
79
+ google.TextGenerationMetrics.
80
+ question_answering_metrics: system.Metrics artifact generated from
81
+ the LLMEvaluationTextGenerationOp component. Subject to change to
82
+ google.QuestionAnsweringMetrics.
83
+ summarization_metrics: system.Metrics artifact generated from
84
+ the LLMEvaluationTextGenerationOp component. Subject to change to
85
+ google.SummarizationMetrics.
86
+ explanation: Path for model explanation metrics generated from an evaluation
87
+ component.
88
+ feature_attributions: The feature attributions metrics artifact generated
89
+ from the feature attribution component.
90
+ embedding_metrics: The embedding metrics artifact generated from the
91
+ embedding retrieval metrics component.
92
+ display_name: The display name for the uploaded model evaluation resource.
93
+ """
94
+ # fmt: on
95
+ return dsl.ContainerSpec(
96
+ image=_image.GCPC_IMAGE_TAG,
97
+ command=[
98
+ "python3",
99
+ "-u",
100
+ "-m",
101
+ "google_cloud_pipeline_components.container._implementation.model_evaluation.import_model_evaluation",
102
+ ],
103
+ args=[
104
+ dsl.IfPresentPlaceholder(
105
+ input_name="metrics",
106
+ then=[
107
+ "--metrics",
108
+ metrics.uri,
109
+ "--metrics_explanation",
110
+ metrics.metadata["explanation_gcs_path"],
111
+ ],
112
+ ),
113
+ dsl.IfPresentPlaceholder(
114
+ input_name="row_based_metrics",
115
+ then=[
116
+ "--row_based_metrics",
117
+ row_based_metrics.uri,
118
+ ],
119
+ ),
120
+ dsl.IfPresentPlaceholder(
121
+ input_name="explanation",
122
+ then=[
123
+ "--explanation",
124
+ explanation.metadata["explanation_gcs_path"],
125
+ ],
126
+ ),
127
+ dsl.IfPresentPlaceholder(
128
+ input_name="classification_metrics",
129
+ then=[
130
+ "--classification_metrics",
131
+ classification_metrics.uri,
132
+ ],
133
+ ),
134
+ dsl.IfPresentPlaceholder(
135
+ input_name="forecasting_metrics",
136
+ then=[
137
+ "--forecasting_metrics",
138
+ forecasting_metrics.uri,
139
+ ],
140
+ ),
141
+ dsl.IfPresentPlaceholder(
142
+ input_name="regression_metrics",
143
+ then=[
144
+ "--regression_metrics",
145
+ regression_metrics.uri,
146
+ ],
147
+ ),
148
+ dsl.IfPresentPlaceholder(
149
+ input_name="text_generation_metrics",
150
+ then=[
151
+ "--text_generation_metrics",
152
+ text_generation_metrics.uri,
153
+ ],
154
+ ),
155
+ dsl.IfPresentPlaceholder(
156
+ input_name="question_answering_metrics",
157
+ then=[
158
+ "--question_answering_metrics",
159
+ question_answering_metrics.uri,
160
+ ],
161
+ ),
162
+ dsl.IfPresentPlaceholder(
163
+ input_name="summarization_metrics",
164
+ then=[
165
+ "--summarization_metrics",
166
+ summarization_metrics.uri,
167
+ ],
168
+ ),
169
+ dsl.IfPresentPlaceholder(
170
+ input_name="feature_attributions",
171
+ then=[
172
+ "--feature_attributions",
173
+ feature_attributions.uri,
174
+ ],
175
+ ),
176
+ dsl.IfPresentPlaceholder(
177
+ input_name="embedding_metrics",
178
+ then=[
179
+ "--embedding_metrics",
180
+ embedding_metrics.uri,
181
+ ],
182
+ ),
183
+ dsl.IfPresentPlaceholder(
184
+ input_name="problem_type",
185
+ then=[
186
+ "--problem_type",
187
+ problem_type,
188
+ ],
189
+ ),
190
+ "--display_name",
191
+ display_name,
192
+ "--dataset_path",
193
+ dataset_path,
194
+ "--dataset_paths",
195
+ dataset_paths,
196
+ "--dataset_type",
197
+ dataset_type,
198
+ "--pipeline_job_id",
199
+ dsl.PIPELINE_JOB_ID_PLACEHOLDER,
200
+ "--pipeline_job_resource_name",
201
+ dsl.PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER,
202
+ "--model_name",
203
+ model.metadata["resourceName"],
204
+ "--gcp_resources",
205
+ gcp_resources,
206
+ "--evaluation_resource_name",
207
+ evaluation_resource_name,
208
+ ],
209
+ )
@@ -0,0 +1,33 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # Protobuf Python Version: 0.20240502.0
5
+ """Generated protocol buffer code."""
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import runtime_version as _runtime_version
9
+ from google.protobuf import symbol_database as _symbol_database
10
+ from google.protobuf.internal import builder as _builder
11
+ # @@protoc_insertion_point(imports)
12
+
13
+ _sym_db = _symbol_database.Default()
14
+
15
+
16
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
17
+ b'\n\x13task_error.proto\x12\ntask_error""\n\tTaskError\x12\x15\n\rerror_message\x18\x01'
18
+ b' \x01(\tB\x02P\x01\x62\x06proto3'
19
+ )
20
+
21
+ _globals = globals()
22
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
23
+ _builder.BuildTopDescriptorsAndMessages(
24
+ DESCRIPTOR,
25
+ 'google_cloud_pipeline_components.google_cloud_pipeline_components.proto.task_error_pb2',
26
+ _globals,
27
+ )
28
+ if not _descriptor._USE_C_DESCRIPTORS:
29
+ _globals['DESCRIPTOR']._loaded_options = None
30
+ _globals['DESCRIPTOR']._serialized_options = b'P\001'
31
+ _globals['_TASKERROR']._serialized_start = 119
32
+ _globals['_TASKERROR']._serialized_end = 153
33
+ # @@protoc_insertion_point(module_scope)
@@ -1461,7 +1461,7 @@ deploymentSpec:
1461
1461
  \ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\
1462
1462
  \ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \
1463
1463
  \ ref.project, ref.dataset_id)\n\n"
1464
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1464
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1465
1465
  exec-bigquery-delete-dataset-with-prefix:
1466
1466
  container:
1467
1467
  args:
@@ -1495,7 +1495,7 @@ deploymentSpec:
1495
1495
  \ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\
1496
1496
  \ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\
1497
1497
  \n"
1498
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1498
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1499
1499
  exec-bigquery-query-job:
1500
1500
  container:
1501
1501
  args:
@@ -1583,7 +1583,7 @@ deploymentSpec:
1583
1583
  \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
1584
1584
  \ if write_disposition:\n config['write_disposition'] = write_disposition\n\
1585
1585
  \ return config\n\n"
1586
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1586
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1587
1587
  exec-build-job-configuration-query-2:
1588
1588
  container:
1589
1589
  args:
@@ -1617,7 +1617,7 @@ deploymentSpec:
1617
1617
  \ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
1618
1618
  \ if write_disposition:\n config['write_disposition'] = write_disposition\n\
1619
1619
  \ return config\n\n"
1620
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1620
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1621
1621
  exec-get-first-valid:
1622
1622
  container:
1623
1623
  args:
@@ -1641,7 +1641,7 @@ deploymentSpec:
1641
1641
  \ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\
1642
1642
  \n for value in json.loads(values):\n if value:\n return value\n\
1643
1643
  \ raise ValueError('No valid values.')\n\n"
1644
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1644
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1645
1645
  exec-get-table-location:
1646
1646
  container:
1647
1647
  args:
@@ -1677,7 +1677,7 @@ deploymentSpec:
1677
1677
  \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
1678
1678
  \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
1679
1679
  \ return client.get_table(table).location\n\n"
1680
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1680
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1681
1681
  exec-get-table-location-2:
1682
1682
  container:
1683
1683
  args:
@@ -1713,7 +1713,7 @@ deploymentSpec:
1713
1713
  \ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
1714
1714
  \ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
1715
1715
  \ return client.get_table(table).location\n\n"
1716
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1716
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1717
1717
  exec-load-table-from-uri:
1718
1718
  container:
1719
1719
  args:
@@ -1754,7 +1754,7 @@ deploymentSpec:
1754
1754
  \ source_format=source_format)\n client.load_table_from_uri(\n source_uris=csv_list,\n\
1755
1755
  \ destination=destination,\n project=project,\n location=location,\n\
1756
1756
  \ job_config=job_config).result()\n return destination\n\n"
1757
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1757
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1758
1758
  exec-make-vertex-model-artifact:
1759
1759
  container:
1760
1760
  args:
@@ -1778,7 +1778,7 @@ deploymentSpec:
1778
1778
  Creates a google.VertexModel artifact.\"\"\"\n vertex_model.metadata =\
1779
1779
  \ {'resourceName': model_resource_name}\n vertex_model.uri = (f'https://{location}-aiplatform.googleapis.com'\n\
1780
1780
  \ f'/v1/{model_resource_name}')\n\n"
1781
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1781
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1782
1782
  exec-maybe-replace-with-default:
1783
1783
  container:
1784
1784
  args:
@@ -1800,7 +1800,7 @@ deploymentSpec:
1800
1800
  \ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\
1801
1801
  \ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\
1802
1802
  \n return default if not value else value\n\n"
1803
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1803
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1804
1804
  exec-model-batch-predict:
1805
1805
  container:
1806
1806
  args:
@@ -1879,7 +1879,7 @@ deploymentSpec:
1879
1879
  \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\
1880
1880
  \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\
1881
1881
  \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n"
1882
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1882
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1883
1883
  exec-table-to-uri-2:
1884
1884
  container:
1885
1885
  args:
@@ -1909,7 +1909,7 @@ deploymentSpec:
1909
1909
  \ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\
1910
1910
  \ return collections.namedtuple(\n 'Outputs',\n ['project_id',\
1911
1911
  \ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n"
1912
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
1912
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
1913
1913
  exec-validate-inputs:
1914
1914
  container:
1915
1915
  args:
@@ -2005,7 +2005,7 @@ deploymentSpec:
2005
2005
  \ raise ValueError(\n 'Granularity unit should be one of the\
2006
2006
  \ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\
2007
2007
  \n"
2008
- image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
2008
+ image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
2009
2009
  pipelineInfo:
2010
2010
  description: Creates a batch prediction using a Prophet model.
2011
2011
  name: prophet-predict
@@ -180,6 +180,16 @@ def prophet_trainer(
180
180
  '--dataflow_use_public_ips=',
181
181
  dataflow_use_public_ips,
182
182
  '", "',
183
+ '--dataflow_staging_dir=',
184
+ root_dir,
185
+ (
186
+ f'/{dsl.PIPELINE_JOB_ID_PLACEHOLDER}/{dsl.PIPELINE_TASK_ID_PLACEHOLDER}/dataflow_staging", "'
187
+ ),
188
+ '--dataflow_tmp_dir=',
189
+ root_dir,
190
+ (
191
+ f'/{dsl.PIPELINE_JOB_ID_PLACEHOLDER}/{dsl.PIPELINE_TASK_ID_PLACEHOLDER}/dataflow_tmp", "'
192
+ ),
183
193
  '--gcp_resources_path=',
184
194
  gcp_resources,
185
195
  '", "',
@@ -2418,7 +2418,10 @@ deploymentSpec:
2418
2418
  "\", \"", "--dataflow_service_account=", "{{$.inputs.parameters[''dataflow_service_account'']}}",
2419
2419
  "\", \"", "--dataflow_subnetwork=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}",
2420
2420
  "\", \"", "--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}",
2421
- "\", \"", "--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}",
2421
+ "\", \"", "--dataflow_staging_dir=", "{{$.inputs.parameters[''root_dir'']}}",
2422
+ "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_staging\", \"",
2423
+ "--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\",
2424
+ \"", "--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}",
2422
2425
  "\", \"", "--executor_input={{$.json_escape[1]}}\"]}}]}}"]}'
2423
2426
  command:
2424
2427
  - python3
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -21,7 +21,7 @@ from google_cloud_pipeline_components._implementation.model_evaluation import Ev
21
21
  from google_cloud_pipeline_components._implementation.model_evaluation import EvaluationDatasetPreprocessorOp as DatasetPreprocessorOp
22
22
  from google_cloud_pipeline_components._implementation.model_evaluation import FeatureExtractorOp
23
23
  from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluatedAnnotationOp
24
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
24
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
25
25
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
26
26
  from google_cloud_pipeline_components.v1.dataset import GetVertexDatasetOp
27
27
  from google_cloud_pipeline_components.v1.model_evaluation.classification_component import model_evaluation_classification as ModelEvaluationClassificationOp
@@ -224,14 +224,12 @@ def vision_model_error_analysis_pipeline( # pylint: disable=dangerous-default-v
224
224
  )
225
225
 
226
226
  with dsl.Condition(
227
- (
228
- (
229
- test_dataset_resource_name == ''
230
- and training_dataset_resource_name == ''
231
- and test_dataset_annotation_set_name == ''
232
- and training_dataset_annotation_set_name == ''
233
- )
234
- ),
227
+ ((
228
+ test_dataset_resource_name == ''
229
+ and training_dataset_resource_name == ''
230
+ and test_dataset_annotation_set_name == ''
231
+ and training_dataset_annotation_set_name == ''
232
+ )),
235
233
  name='CustomDataset',
236
234
  ):
237
235
  dataset_preprocessor_task = DatasetPreprocessorOp(
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -19,7 +19,7 @@ from google_cloud_pipeline_components._implementation.model import GetVertexMode
19
19
  from google_cloud_pipeline_components._implementation.model_evaluation import EvaluatedAnnotationOp
20
20
  from google_cloud_pipeline_components._implementation.model_evaluation import EvaluationDatasetPreprocessorOp as DatasetPreprocessorOp
21
21
  from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluatedAnnotationOp
22
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
22
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
23
23
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
24
24
  from google_cloud_pipeline_components.v1.dataset import GetVertexDatasetOp
25
25
  from google_cloud_pipeline_components.v1.model_evaluation.classification_component import model_evaluation_classification as ModelEvaluationClassificationOp
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -17,7 +17,7 @@ from typing import Any, Dict, List, NamedTuple
17
17
  from google_cloud_pipeline_components import _placeholders
18
18
  from google_cloud_pipeline_components._implementation.model import GetVertexModelOp
19
19
  from google_cloud_pipeline_components._implementation.model_evaluation import FeatureAttributionGraphComponentOp
20
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
20
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
21
21
  from google_cloud_pipeline_components.types.artifact_types import ClassificationMetrics
22
22
  from google_cloud_pipeline_components.types.artifact_types import RegressionMetrics
23
23
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -16,7 +16,7 @@ from typing import Any, List, NamedTuple
16
16
 
17
17
  from google_cloud_pipeline_components import _placeholders
18
18
  from google_cloud_pipeline_components._implementation.model import GetVertexModelOp
19
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
19
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
20
20
  from google_cloud_pipeline_components.types.artifact_types import ClassificationMetrics
21
21
  from google_cloud_pipeline_components.types.artifact_types import RegressionMetrics
22
22
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -16,8 +16,8 @@ from typing import Any, List, NamedTuple
16
16
 
17
17
  from google_cloud_pipeline_components import _placeholders
18
18
  from google_cloud_pipeline_components._implementation.model import GetVertexModelOp
19
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
20
19
  from google_cloud_pipeline_components._implementation.model_evaluation import TargetFieldDataRemoverOp
20
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
21
21
  from google_cloud_pipeline_components.types.artifact_types import ClassificationMetrics
22
22
  from google_cloud_pipeline_components.types.artifact_types import RegressionMetrics
23
23
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -17,8 +17,8 @@ from typing import Any, Dict, List, NamedTuple
17
17
  from google_cloud_pipeline_components import _placeholders
18
18
  from google_cloud_pipeline_components._implementation.model import GetVertexModelOp
19
19
  from google_cloud_pipeline_components._implementation.model_evaluation import FeatureAttributionGraphComponentOp
20
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
21
20
  from google_cloud_pipeline_components._implementation.model_evaluation import TargetFieldDataRemoverOp
21
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
22
22
  from google_cloud_pipeline_components.types.artifact_types import ClassificationMetrics
23
23
  from google_cloud_pipeline_components.types.artifact_types import RegressionMetrics
24
24
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -17,13 +17,15 @@ from typing import Dict, List, NamedTuple
17
17
 
18
18
  from google_cloud_pipeline_components._implementation.model_evaluation import LLMEvaluationClassificationPredictionsPostprocessorOp
19
19
  from google_cloud_pipeline_components._implementation.model_evaluation import LLMEvaluationPreprocessorOp
20
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
21
20
  from google_cloud_pipeline_components._implementation.model_evaluation import ModelNamePreprocessorOp
21
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
22
22
  from google_cloud_pipeline_components.types.artifact_types import ClassificationMetrics
23
23
  from google_cloud_pipeline_components.types.artifact_types import VertexModel
24
24
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
25
25
  from google_cloud_pipeline_components.v1.model_evaluation.classification_component import model_evaluation_classification as ModelEvaluationClassificationOp
26
26
  from kfp import dsl
27
+
28
+
27
29
  # pylint: disable=unused-argument, unexpected-keyword-arg
28
30
 
29
31
  _PIPELINE_NAME = 'evaluation-llm-classification-pipeline'
@@ -1,4 +1,4 @@
1
- # Copyright 2023 The Kubeflow Authors. All Rights Reserved.
1
+ # Copyright 2024 The Kubeflow Authors. All Rights Reserved.
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -17,11 +17,13 @@ from typing import Dict, List, NamedTuple
17
17
 
18
18
  from google_cloud_pipeline_components._implementation.model_evaluation import LLMEvaluationPreprocessorOp
19
19
  from google_cloud_pipeline_components._implementation.model_evaluation import LLMEvaluationTextGenerationOp
20
- from google_cloud_pipeline_components._implementation.model_evaluation import ModelImportEvaluationOp
21
20
  from google_cloud_pipeline_components._implementation.model_evaluation import ModelNamePreprocessorOp
21
+ from google_cloud_pipeline_components.preview.model_evaluation.model_evaluation_import_component import model_evaluation_import as ModelImportEvaluationOp
22
22
  from google_cloud_pipeline_components.types.artifact_types import VertexModel
23
23
  from google_cloud_pipeline_components.v1.batch_predict_job import ModelBatchPredictOp
24
24
  from kfp import dsl
25
+
26
+
25
27
  # pylint: disable=unused-argument, unexpected-keyword-arg
26
28
 
27
29
 
@@ -121,6 +121,7 @@ def autosxs_pipeline(
121
121
  project=project,
122
122
  location=location,
123
123
  encryption_spec_key_name=encryption_spec_key_name,
124
+ autorater_prompt_parameters=autorater_prompt_parameters,
124
125
  ).set_display_name('AutoSxS Autorater')
125
126
 
126
127
  metrics = model_evaluation_text_generation_pairwise.model_evaluation_text_generation_pairwise(
@@ -13,4 +13,4 @@
13
13
  # limitations under the License.
14
14
  """Google Cloud Pipeline Components version."""
15
15
 
16
- __version__ = "2.14.0"
16
+ __version__ = "2.14.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: google-cloud-pipeline-components
3
- Version: 2.14.0
3
+ Version: 2.14.1
4
4
  Summary: This SDK enables a set of First Party (Google owned) pipeline components that allow users to take their experience from Vertex AI SDK and other Google Cloud services and create a corresponding pipeline using KFP or Managed Pipelines.
5
5
  Home-page: https://github.com/kubeflow/pipelines/tree/master/components/google-cloud
6
6
  Author: The Google Cloud Pipeline Components authors