google-cloud-ai_platform-v1 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (289) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +12 -0
  3. data/AUTHENTICATION.md +149 -0
  4. data/LICENSE.md +201 -0
  5. data/README.md +139 -0
  6. data/lib/google/cloud/ai_platform/v1/dataset_service/client.rb +1364 -0
  7. data/lib/google/cloud/ai_platform/v1/dataset_service/credentials.rb +47 -0
  8. data/lib/google/cloud/ai_platform/v1/dataset_service/operations.rb +767 -0
  9. data/lib/google/cloud/ai_platform/v1/dataset_service/paths.rb +111 -0
  10. data/lib/google/cloud/ai_platform/v1/dataset_service.rb +51 -0
  11. data/lib/google/cloud/ai_platform/v1/endpoint_service/client.rb +1076 -0
  12. data/lib/google/cloud/ai_platform/v1/endpoint_service/credentials.rb +47 -0
  13. data/lib/google/cloud/ai_platform/v1/endpoint_service/operations.rb +767 -0
  14. data/lib/google/cloud/ai_platform/v1/endpoint_service/paths.rb +124 -0
  15. data/lib/google/cloud/ai_platform/v1/endpoint_service.rb +50 -0
  16. data/lib/google/cloud/ai_platform/v1/featurestore_online_serving_service/client.rb +508 -0
  17. data/lib/google/cloud/ai_platform/v1/featurestore_online_serving_service/credentials.rb +47 -0
  18. data/lib/google/cloud/ai_platform/v1/featurestore_online_serving_service/paths.rb +54 -0
  19. data/lib/google/cloud/ai_platform/v1/featurestore_online_serving_service.rb +49 -0
  20. data/lib/google/cloud/ai_platform/v1/featurestore_service/client.rb +2707 -0
  21. data/lib/google/cloud/ai_platform/v1/featurestore_service/credentials.rb +47 -0
  22. data/lib/google/cloud/ai_platform/v1/featurestore_service/operations.rb +767 -0
  23. data/lib/google/cloud/ai_platform/v1/featurestore_service/paths.rb +113 -0
  24. data/lib/google/cloud/ai_platform/v1/featurestore_service.rb +50 -0
  25. data/lib/google/cloud/ai_platform/v1/index_endpoint_service/client.rb +1146 -0
  26. data/lib/google/cloud/ai_platform/v1/index_endpoint_service/credentials.rb +47 -0
  27. data/lib/google/cloud/ai_platform/v1/index_endpoint_service/operations.rb +767 -0
  28. data/lib/google/cloud/ai_platform/v1/index_endpoint_service/paths.rb +88 -0
  29. data/lib/google/cloud/ai_platform/v1/index_endpoint_service.rb +50 -0
  30. data/lib/google/cloud/ai_platform/v1/index_service/client.rb +823 -0
  31. data/lib/google/cloud/ai_platform/v1/index_service/credentials.rb +47 -0
  32. data/lib/google/cloud/ai_platform/v1/index_service/operations.rb +767 -0
  33. data/lib/google/cloud/ai_platform/v1/index_service/paths.rb +88 -0
  34. data/lib/google/cloud/ai_platform/v1/index_service.rb +50 -0
  35. data/lib/google/cloud/ai_platform/v1/job_service/client.rb +3236 -0
  36. data/lib/google/cloud/ai_platform/v1/job_service/credentials.rb +47 -0
  37. data/lib/google/cloud/ai_platform/v1/job_service/operations.rb +767 -0
  38. data/lib/google/cloud/ai_platform/v1/job_service/paths.rb +259 -0
  39. data/lib/google/cloud/ai_platform/v1/job_service.rb +50 -0
  40. data/lib/google/cloud/ai_platform/v1/metadata_service/client.rb +3654 -0
  41. data/lib/google/cloud/ai_platform/v1/metadata_service/credentials.rb +47 -0
  42. data/lib/google/cloud/ai_platform/v1/metadata_service/operations.rb +767 -0
  43. data/lib/google/cloud/ai_platform/v1/metadata_service/paths.rb +153 -0
  44. data/lib/google/cloud/ai_platform/v1/metadata_service.rb +50 -0
  45. data/lib/google/cloud/ai_platform/v1/migration_service/client.rb +538 -0
  46. data/lib/google/cloud/ai_platform/v1/migration_service/credentials.rb +47 -0
  47. data/lib/google/cloud/ai_platform/v1/migration_service/operations.rb +767 -0
  48. data/lib/google/cloud/ai_platform/v1/migration_service/paths.rb +148 -0
  49. data/lib/google/cloud/ai_platform/v1/migration_service.rb +51 -0
  50. data/lib/google/cloud/ai_platform/v1/model_service/client.rb +1355 -0
  51. data/lib/google/cloud/ai_platform/v1/model_service/credentials.rb +47 -0
  52. data/lib/google/cloud/ai_platform/v1/model_service/operations.rb +767 -0
  53. data/lib/google/cloud/ai_platform/v1/model_service/paths.rb +151 -0
  54. data/lib/google/cloud/ai_platform/v1/model_service.rb +50 -0
  55. data/lib/google/cloud/ai_platform/v1/pipeline_service/client.rb +1384 -0
  56. data/lib/google/cloud/ai_platform/v1/pipeline_service/credentials.rb +47 -0
  57. data/lib/google/cloud/ai_platform/v1/pipeline_service/operations.rb +767 -0
  58. data/lib/google/cloud/ai_platform/v1/pipeline_service/paths.rb +225 -0
  59. data/lib/google/cloud/ai_platform/v1/pipeline_service.rb +52 -0
  60. data/lib/google/cloud/ai_platform/v1/prediction_service/client.rb +650 -0
  61. data/lib/google/cloud/ai_platform/v1/prediction_service/credentials.rb +47 -0
  62. data/lib/google/cloud/ai_platform/v1/prediction_service/paths.rb +52 -0
  63. data/lib/google/cloud/ai_platform/v1/prediction_service.rb +49 -0
  64. data/lib/google/cloud/ai_platform/v1/specialist_pool_service/client.rb +826 -0
  65. data/lib/google/cloud/ai_platform/v1/specialist_pool_service/credentials.rb +47 -0
  66. data/lib/google/cloud/ai_platform/v1/specialist_pool_service/operations.rb +767 -0
  67. data/lib/google/cloud/ai_platform/v1/specialist_pool_service/paths.rb +69 -0
  68. data/lib/google/cloud/ai_platform/v1/specialist_pool_service.rb +55 -0
  69. data/lib/google/cloud/ai_platform/v1/tensorboard_service/client.rb +3224 -0
  70. data/lib/google/cloud/ai_platform/v1/tensorboard_service/credentials.rb +48 -0
  71. data/lib/google/cloud/ai_platform/v1/tensorboard_service/operations.rb +767 -0
  72. data/lib/google/cloud/ai_platform/v1/tensorboard_service/paths.rb +138 -0
  73. data/lib/google/cloud/ai_platform/v1/tensorboard_service.rb +50 -0
  74. data/lib/google/cloud/ai_platform/v1/version.rb +28 -0
  75. data/lib/google/cloud/ai_platform/v1/vizier_service/client.rb +1793 -0
  76. data/lib/google/cloud/ai_platform/v1/vizier_service/credentials.rb +47 -0
  77. data/lib/google/cloud/ai_platform/v1/vizier_service/operations.rb +767 -0
  78. data/lib/google/cloud/ai_platform/v1/vizier_service/paths.rb +109 -0
  79. data/lib/google/cloud/ai_platform/v1/vizier_service.rb +54 -0
  80. data/lib/google/cloud/ai_platform/v1.rb +52 -0
  81. data/lib/google/cloud/aiplatform/v1/accelerator_type_pb.rb +29 -0
  82. data/lib/google/cloud/aiplatform/v1/annotation_pb.rb +35 -0
  83. data/lib/google/cloud/aiplatform/v1/annotation_spec_pb.rb +30 -0
  84. data/lib/google/cloud/aiplatform/v1/artifact_pb.rb +45 -0
  85. data/lib/google/cloud/aiplatform/v1/batch_prediction_job_pb.rb +83 -0
  86. data/lib/google/cloud/aiplatform/v1/completion_stats_pb.rb +27 -0
  87. data/lib/google/cloud/aiplatform/v1/context_pb.rb +38 -0
  88. data/lib/google/cloud/aiplatform/v1/custom_job_pb.rb +84 -0
  89. data/lib/google/cloud/aiplatform/v1/data_item_pb.rb +32 -0
  90. data/lib/google/cloud/aiplatform/v1/data_labeling_job_pb.rb +78 -0
  91. data/lib/google/cloud/aiplatform/v1/dataset_pb.rb +53 -0
  92. data/lib/google/cloud/aiplatform/v1/dataset_service_pb.rb +126 -0
  93. data/lib/google/cloud/aiplatform/v1/dataset_service_services_pb.rb +64 -0
  94. data/lib/google/cloud/aiplatform/v1/deployed_index_ref_pb.rb +26 -0
  95. data/lib/google/cloud/aiplatform/v1/deployed_model_ref_pb.rb +26 -0
  96. data/lib/google/cloud/aiplatform/v1/encryption_spec_pb.rb +24 -0
  97. data/lib/google/cloud/aiplatform/v1/endpoint_pb.rb +67 -0
  98. data/lib/google/cloud/aiplatform/v1/endpoint_service_pb.rb +90 -0
  99. data/lib/google/cloud/aiplatform/v1/endpoint_service_services_pb.rb +58 -0
  100. data/lib/google/cloud/aiplatform/v1/entity_type_pb.rb +32 -0
  101. data/lib/google/cloud/aiplatform/v1/env_var_pb.rb +25 -0
  102. data/lib/google/cloud/aiplatform/v1/event_pb.rb +36 -0
  103. data/lib/google/cloud/aiplatform/v1/execution_pb.rb +48 -0
  104. data/lib/google/cloud/aiplatform/v1/explanation_metadata_pb.rb +107 -0
  105. data/lib/google/cloud/aiplatform/v1/explanation_pb.rb +106 -0
  106. data/lib/google/cloud/aiplatform/v1/feature_monitoring_stats_pb.rb +30 -0
  107. data/lib/google/cloud/aiplatform/v1/feature_pb.rb +46 -0
  108. data/lib/google/cloud/aiplatform/v1/feature_selector_pb.rb +28 -0
  109. data/lib/google/cloud/aiplatform/v1/featurestore_online_service_pb.rb +86 -0
  110. data/lib/google/cloud/aiplatform/v1/featurestore_online_service_services_pb.rb +51 -0
  111. data/lib/google/cloud/aiplatform/v1/featurestore_pb.rb +44 -0
  112. data/lib/google/cloud/aiplatform/v1/featurestore_service_pb.rb +280 -0
  113. data/lib/google/cloud/aiplatform/v1/featurestore_service_services_pb.rb +109 -0
  114. data/lib/google/cloud/aiplatform/v1/hyperparameter_tuning_job_pb.rb +46 -0
  115. data/lib/google/cloud/aiplatform/v1/index_endpoint_pb.rb +66 -0
  116. data/lib/google/cloud/aiplatform/v1/index_endpoint_service_pb.rb +101 -0
  117. data/lib/google/cloud/aiplatform/v1/index_endpoint_service_services_pb.rb +62 -0
  118. data/lib/google/cloud/aiplatform/v1/index_pb.rb +38 -0
  119. data/lib/google/cloud/aiplatform/v1/index_service_pb.rb +98 -0
  120. data/lib/google/cloud/aiplatform/v1/index_service_services_pb.rb +55 -0
  121. data/lib/google/cloud/aiplatform/v1/io_pb.rb +56 -0
  122. data/lib/google/cloud/aiplatform/v1/job_service_pb.rb +217 -0
  123. data/lib/google/cloud/aiplatform/v1/job_service_services_pb.rb +134 -0
  124. data/lib/google/cloud/aiplatform/v1/job_state_pb.rb +32 -0
  125. data/lib/google/cloud/aiplatform/v1/lineage_subgraph_pb.rb +28 -0
  126. data/lib/google/cloud/aiplatform/v1/machine_resources_pb.rb +59 -0
  127. data/lib/google/cloud/aiplatform/v1/manual_batch_tuning_parameters_pb.rb +24 -0
  128. data/lib/google/cloud/aiplatform/v1/metadata_schema_pb.rb +38 -0
  129. data/lib/google/cloud/aiplatform/v1/metadata_service_pb.rb +272 -0
  130. data/lib/google/cloud/aiplatform/v1/metadata_service_services_pb.rb +119 -0
  131. data/lib/google/cloud/aiplatform/v1/metadata_store_pb.rb +36 -0
  132. data/lib/google/cloud/aiplatform/v1/migratable_resource_pb.rb +59 -0
  133. data/lib/google/cloud/aiplatform/v1/migration_service_pb.rb +106 -0
  134. data/lib/google/cloud/aiplatform/v1/migration_service_services_pb.rb +51 -0
  135. data/lib/google/cloud/aiplatform/v1/model_deployment_monitoring_job_pb.rb +111 -0
  136. data/lib/google/cloud/aiplatform/v1/model_evaluation_pb.rb +33 -0
  137. data/lib/google/cloud/aiplatform/v1/model_evaluation_slice_pb.rb +36 -0
  138. data/lib/google/cloud/aiplatform/v1/model_monitoring_pb.rb +93 -0
  139. data/lib/google/cloud/aiplatform/v1/model_pb.rb +88 -0
  140. data/lib/google/cloud/aiplatform/v1/model_service_pb.rb +129 -0
  141. data/lib/google/cloud/aiplatform/v1/model_service_services_pb.rb +69 -0
  142. data/lib/google/cloud/aiplatform/v1/operation_pb.rb +32 -0
  143. data/lib/google/cloud/aiplatform/v1/pipeline_job_pb.rb +115 -0
  144. data/lib/google/cloud/aiplatform/v1/pipeline_service_pb.rb +88 -0
  145. data/lib/google/cloud/aiplatform/v1/pipeline_service_services_pb.rb +84 -0
  146. data/lib/google/cloud/aiplatform/v1/pipeline_state_pb.rb +31 -0
  147. data/lib/google/cloud/aiplatform/v1/prediction_service_pb.rb +57 -0
  148. data/lib/google/cloud/aiplatform/v1/prediction_service_services_pb.rb +66 -0
  149. data/lib/google/cloud/aiplatform/v1/specialist_pool_pb.rb +30 -0
  150. data/lib/google/cloud/aiplatform/v1/specialist_pool_service_pb.rb +66 -0
  151. data/lib/google/cloud/aiplatform/v1/specialist_pool_service_services_pb.rb +58 -0
  152. data/lib/google/cloud/aiplatform/v1/study_pb.rb +191 -0
  153. data/lib/google/cloud/aiplatform/v1/tensorboard_data_pb.rb +56 -0
  154. data/lib/google/cloud/aiplatform/v1/tensorboard_experiment_pb.rb +33 -0
  155. data/lib/google/cloud/aiplatform/v1/tensorboard_pb.rb +36 -0
  156. data/lib/google/cloud/aiplatform/v1/tensorboard_run_pb.rb +32 -0
  157. data/lib/google/cloud/aiplatform/v1/tensorboard_service_pb.rb +244 -0
  158. data/lib/google/cloud/aiplatform/v1/tensorboard_service_services_pb.rb +115 -0
  159. data/lib/google/cloud/aiplatform/v1/tensorboard_time_series_pb.rb +48 -0
  160. data/lib/google/cloud/aiplatform/v1/training_pipeline_pb.rb +95 -0
  161. data/lib/google/cloud/aiplatform/v1/types_pb.rb +35 -0
  162. data/lib/google/cloud/aiplatform/v1/unmanaged_container_model_pb.rb +27 -0
  163. data/lib/google/cloud/aiplatform/v1/user_action_reference_pb.rb +27 -0
  164. data/lib/google/cloud/aiplatform/v1/value_pb.rb +27 -0
  165. data/lib/google/cloud/aiplatform/v1/vizier_service_pb.rb +136 -0
  166. data/lib/google/cloud/aiplatform/v1/vizier_service_services_pb.rb +90 -0
  167. data/lib/google-cloud-ai_platform-v1.rb +21 -0
  168. data/proto_docs/README.md +4 -0
  169. data/proto_docs/google/api/field_behavior.rb +71 -0
  170. data/proto_docs/google/api/httpbody.rb +80 -0
  171. data/proto_docs/google/api/resource.rb +222 -0
  172. data/proto_docs/google/cloud/aiplatform/v1/accelerator_type.rb +50 -0
  173. data/proto_docs/google/cloud/aiplatform/v1/annotation.rb +92 -0
  174. data/proto_docs/google/cloud/aiplatform/v1/annotation_spec.rb +50 -0
  175. data/proto_docs/google/cloud/aiplatform/v1/artifact.rb +112 -0
  176. data/proto_docs/google/cloud/aiplatform/v1/batch_prediction_job.rb +278 -0
  177. data/proto_docs/google/cloud/aiplatform/v1/completion_stats.rb +46 -0
  178. data/proto_docs/google/cloud/aiplatform/v1/context.rb +92 -0
  179. data/proto_docs/google/cloud/aiplatform/v1/custom_job.rb +272 -0
  180. data/proto_docs/google/cloud/aiplatform/v1/data_item.rb +73 -0
  181. data/proto_docs/google/cloud/aiplatform/v1/data_labeling_job.rb +207 -0
  182. data/proto_docs/google/cloud/aiplatform/v1/dataset.rb +154 -0
  183. data/proto_docs/google/cloud/aiplatform/v1/dataset_service.rb +301 -0
  184. data/proto_docs/google/cloud/aiplatform/v1/deployed_index_ref.rb +38 -0
  185. data/proto_docs/google/cloud/aiplatform/v1/deployed_model_ref.rb +38 -0
  186. data/proto_docs/google/cloud/aiplatform/v1/encryption_spec.rb +40 -0
  187. data/proto_docs/google/cloud/aiplatform/v1/endpoint.rb +227 -0
  188. data/proto_docs/google/cloud/aiplatform/v1/endpoint_service.rb +258 -0
  189. data/proto_docs/google/cloud/aiplatform/v1/entity_type.rb +79 -0
  190. data/proto_docs/google/cloud/aiplatform/v1/env_var.rb +44 -0
  191. data/proto_docs/google/cloud/aiplatform/v1/event.rb +79 -0
  192. data/proto_docs/google/cloud/aiplatform/v1/execution.rb +118 -0
  193. data/proto_docs/google/cloud/aiplatform/v1/explanation.rb +445 -0
  194. data/proto_docs/google/cloud/aiplatform/v1/explanation_metadata.rb +419 -0
  195. data/proto_docs/google/cloud/aiplatform/v1/feature.rb +115 -0
  196. data/proto_docs/google/cloud/aiplatform/v1/feature_monitoring_stats.rb +88 -0
  197. data/proto_docs/google/cloud/aiplatform/v1/feature_selector.rb +49 -0
  198. data/proto_docs/google/cloud/aiplatform/v1/featurestore.rb +115 -0
  199. data/proto_docs/google/cloud/aiplatform/v1/featurestore_online_service.rb +203 -0
  200. data/proto_docs/google/cloud/aiplatform/v1/featurestore_service.rb +978 -0
  201. data/proto_docs/google/cloud/aiplatform/v1/hyperparameter_tuning_job.rb +109 -0
  202. data/proto_docs/google/cloud/aiplatform/v1/index.rb +98 -0
  203. data/proto_docs/google/cloud/aiplatform/v1/index_endpoint.rb +252 -0
  204. data/proto_docs/google/cloud/aiplatform/v1/index_endpoint_service.rb +240 -0
  205. data/proto_docs/google/cloud/aiplatform/v1/index_service.rb +220 -0
  206. data/proto_docs/google/cloud/aiplatform/v1/io.rb +134 -0
  207. data/proto_docs/google/cloud/aiplatform/v1/job_service.rb +660 -0
  208. data/proto_docs/google/cloud/aiplatform/v1/job_state.rb +60 -0
  209. data/proto_docs/google/cloud/aiplatform/v1/lineage_subgraph.rb +42 -0
  210. data/proto_docs/google/cloud/aiplatform/v1/machine_resources.rb +194 -0
  211. data/proto_docs/google/cloud/aiplatform/v1/manual_batch_tuning_parameters.rb +41 -0
  212. data/proto_docs/google/cloud/aiplatform/v1/metadata_schema.rb +74 -0
  213. data/proto_docs/google/cloud/aiplatform/v1/metadata_service.rb +912 -0
  214. data/proto_docs/google/cloud/aiplatform/v1/metadata_store.rb +62 -0
  215. data/proto_docs/google/cloud/aiplatform/v1/migratable_resource.rb +133 -0
  216. data/proto_docs/google/cloud/aiplatform/v1/migration_service.rb +260 -0
  217. data/proto_docs/google/cloud/aiplatform/v1/model.rb +562 -0
  218. data/proto_docs/google/cloud/aiplatform/v1/model_deployment_monitoring_job.rb +293 -0
  219. data/proto_docs/google/cloud/aiplatform/v1/model_evaluation.rb +60 -0
  220. data/proto_docs/google/cloud/aiplatform/v1/model_evaluation_slice.rb +68 -0
  221. data/proto_docs/google/cloud/aiplatform/v1/model_monitoring.rb +257 -0
  222. data/proto_docs/google/cloud/aiplatform/v1/model_service.rb +329 -0
  223. data/proto_docs/google/cloud/aiplatform/v1/operation.rb +55 -0
  224. data/proto_docs/google/cloud/aiplatform/v1/pipeline_job.rb +347 -0
  225. data/proto_docs/google/cloud/aiplatform/v1/pipeline_service.rb +258 -0
  226. data/proto_docs/google/cloud/aiplatform/v1/pipeline_state.rb +59 -0
  227. data/proto_docs/google/cloud/aiplatform/v1/prediction_service.rb +165 -0
  228. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/image_classification.rb +52 -0
  229. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/image_object_detection.rb +52 -0
  230. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/image_segmentation.rb +47 -0
  231. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/text_classification.rb +46 -0
  232. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/text_extraction.rb +53 -0
  233. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/text_sentiment.rb +46 -0
  234. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/video_action_recognition.rb +59 -0
  235. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/video_classification.rb +59 -0
  236. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/instance/video_object_tracking.rb +59 -0
  237. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/params/image_classification.rb +47 -0
  238. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/params/image_object_detection.rb +47 -0
  239. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/params/image_segmentation.rb +44 -0
  240. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/params/video_action_recognition.rb +47 -0
  241. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/params/video_classification.rb +72 -0
  242. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/params/video_object_tracking.rb +51 -0
  243. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/classification.rb +49 -0
  244. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/image_object_detection.rb +58 -0
  245. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/image_segmentation.rb +53 -0
  246. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/tabular_classification.rb +47 -0
  247. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/tabular_regression.rb +47 -0
  248. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/text_extraction.rb +60 -0
  249. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/text_sentiment.rb +45 -0
  250. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/video_action_recognition.rb +60 -0
  251. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/video_classification.rb +73 -0
  252. data/proto_docs/google/cloud/aiplatform/v1/schema/predict/prediction/video_object_tracking.rb +91 -0
  253. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_image_classification.rb +142 -0
  254. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_image_object_detection.rb +134 -0
  255. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_image_segmentation.rb +120 -0
  256. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_tables.rb +315 -0
  257. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_text_classification.rb +48 -0
  258. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_text_extraction.rb +46 -0
  259. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_text_sentiment.rb +55 -0
  260. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_video_action_recognition.rb +73 -0
  261. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_video_classification.rb +67 -0
  262. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/automl_video_object_tracking.rb +78 -0
  263. data/proto_docs/google/cloud/aiplatform/v1/schema/trainingjob/definition/export_evaluated_data_items_config.rb +51 -0
  264. data/proto_docs/google/cloud/aiplatform/v1/specialist_pool.rb +58 -0
  265. data/proto_docs/google/cloud/aiplatform/v1/specialist_pool_service.rb +136 -0
  266. data/proto_docs/google/cloud/aiplatform/v1/study.rb +543 -0
  267. data/proto_docs/google/cloud/aiplatform/v1/tensorboard.rb +89 -0
  268. data/proto_docs/google/cloud/aiplatform/v1/tensorboard_data.rb +110 -0
  269. data/proto_docs/google/cloud/aiplatform/v1/tensorboard_experiment.rb +82 -0
  270. data/proto_docs/google/cloud/aiplatform/v1/tensorboard_run.rb +85 -0
  271. data/proto_docs/google/cloud/aiplatform/v1/tensorboard_service.rb +706 -0
  272. data/proto_docs/google/cloud/aiplatform/v1/tensorboard_time_series.rb +101 -0
  273. data/proto_docs/google/cloud/aiplatform/v1/training_pipeline.rb +381 -0
  274. data/proto_docs/google/cloud/aiplatform/v1/types.rb +62 -0
  275. data/proto_docs/google/cloud/aiplatform/v1/unmanaged_container_model.rb +44 -0
  276. data/proto_docs/google/cloud/aiplatform/v1/user_action_reference.rb +49 -0
  277. data/proto_docs/google/cloud/aiplatform/v1/value.rb +41 -0
  278. data/proto_docs/google/cloud/aiplatform/v1/vizier_service.rb +332 -0
  279. data/proto_docs/google/longrunning/operations.rb +164 -0
  280. data/proto_docs/google/protobuf/any.rb +141 -0
  281. data/proto_docs/google/protobuf/duration.rb +98 -0
  282. data/proto_docs/google/protobuf/empty.rb +36 -0
  283. data/proto_docs/google/protobuf/field_mask.rb +229 -0
  284. data/proto_docs/google/protobuf/struct.rb +96 -0
  285. data/proto_docs/google/protobuf/timestamp.rb +129 -0
  286. data/proto_docs/google/protobuf/wrappers.rb +121 -0
  287. data/proto_docs/google/rpc/status.rb +46 -0
  288. data/proto_docs/google/type/money.rb +43 -0
  289. metadata +479 -0
@@ -0,0 +1,419 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2022 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module AIPlatform
23
+ module V1
24
+ # Metadata describing the Model's input and output for explanation.
25
+ # @!attribute [rw] inputs
26
+ # @return [::Google::Protobuf::Map{::String => ::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata}]
27
+ # Required. Map from feature names to feature input metadata. Keys are the name of the
28
+ # features. Values are the specification of the feature.
29
+ #
30
+ # An empty InputMetadata is valid. It describes a text feature which has the
31
+ # name specified as the key in {::Google::Cloud::AIPlatform::V1::ExplanationMetadata#inputs ExplanationMetadata.inputs}. The baseline
32
+ # of the empty feature is chosen by Vertex AI.
33
+ #
34
+ # For Vertex AI-provided Tensorflow images, the key can be any friendly
35
+ # name of the feature. Once specified,
36
+ # {::Google::Cloud::AIPlatform::V1::Attribution#feature_attributions featureAttributions} are keyed by
37
+ # this key (if not grouped with another feature).
38
+ #
39
+ # For custom images, the key must match with the key in
40
+ # {::Google::Cloud::AIPlatform::V1::ExplainRequest#instances instance}.
41
+ # @!attribute [rw] outputs
42
+ # @return [::Google::Protobuf::Map{::String => ::Google::Cloud::AIPlatform::V1::ExplanationMetadata::OutputMetadata}]
43
+ # Required. Map from output names to output metadata.
44
+ #
45
+ # For Vertex AI-provided Tensorflow images, keys can be any user defined
46
+ # string that consists of any UTF-8 characters.
47
+ #
48
+ # For custom images, keys are the name of the output field in the prediction
49
+ # to be explained.
50
+ #
51
+ # Currently only one key is allowed.
52
+ # @!attribute [rw] feature_attributions_schema_uri
53
+ # @return [::String]
54
+ # Points to a YAML file stored on Google Cloud Storage describing the format
55
+ # of the {::Google::Cloud::AIPlatform::V1::Attribution#feature_attributions feature attributions}.
56
+ # The schema is defined as an OpenAPI 3.0.2 [Schema
57
+ # Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject).
58
+ # AutoML tabular Models always have this field populated by Vertex AI.
59
+ # Note: The URI given on output may be different, including the URI scheme,
60
+ # than the one given on input. The output URI will point to a location where
61
+ # the user only has a read access.
62
+ class ExplanationMetadata
63
+ include ::Google::Protobuf::MessageExts
64
+ extend ::Google::Protobuf::MessageExts::ClassMethods
65
+
66
+ # Metadata of the input of a feature.
67
+ #
68
+ # Fields other than {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#input_baselines InputMetadata.input_baselines} are applicable only
69
+ # for Models that are using Vertex AI-provided images for Tensorflow.
70
+ # @!attribute [rw] input_baselines
71
+ # @return [::Array<::Google::Protobuf::Value>]
72
+ # Baseline inputs for this feature.
73
+ #
74
+ # If no baseline is specified, Vertex AI chooses the baseline for this
75
+ # feature. If multiple baselines are specified, Vertex AI returns the
76
+ # average attributions across them in {::Google::Cloud::AIPlatform::V1::Attribution#feature_attributions Attribution.feature_attributions}.
77
+ #
78
+ # For Vertex AI-provided Tensorflow images (both 1.x and 2.x), the shape
79
+ # of each baseline must match the shape of the input tensor. If a scalar is
80
+ # provided, we broadcast to the same shape as the input tensor.
81
+ #
82
+ # For custom images, the element of the baselines must be in the same
83
+ # format as the feature's input in the
84
+ # {::Google::Cloud::AIPlatform::V1::ExplainRequest#instances instance}[]. The schema of any single instance
85
+ # may be specified via Endpoint's DeployedModels'
86
+ # [Model's][google.cloud.aiplatform.v1.DeployedModel.model]
87
+ # [PredictSchemata's][google.cloud.aiplatform.v1.Model.predict_schemata]
88
+ # {::Google::Cloud::AIPlatform::V1::PredictSchemata#instance_schema_uri instance_schema_uri}.
89
+ # @!attribute [rw] input_tensor_name
90
+ # @return [::String]
91
+ # Name of the input tensor for this feature. Required and is only
92
+ # applicable to Vertex AI-provided images for Tensorflow.
93
+ # @!attribute [rw] encoding
94
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::Encoding]
95
+ # Defines how the feature is encoded into the input tensor. Defaults to
96
+ # IDENTITY.
97
+ # @!attribute [rw] modality
98
+ # @return [::String]
99
+ # Modality of the feature. Valid values are: numeric, image. Defaults to
100
+ # numeric.
101
+ # @!attribute [rw] feature_value_domain
102
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::FeatureValueDomain]
103
+ # The domain details of the input feature value. Like min/max, original
104
+ # mean or standard deviation if normalized.
105
+ # @!attribute [rw] indices_tensor_name
106
+ # @return [::String]
107
+ # Specifies the index of the values of the input tensor.
108
+ # Required when the input tensor is a sparse representation. Refer to
109
+ # Tensorflow documentation for more details:
110
+ # https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor.
111
+ # @!attribute [rw] dense_shape_tensor_name
112
+ # @return [::String]
113
+ # Specifies the shape of the values of the input if the input is a sparse
114
+ # representation. Refer to Tensorflow documentation for more details:
115
+ # https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor.
116
+ # @!attribute [rw] index_feature_mapping
117
+ # @return [::Array<::String>]
118
+ # A list of feature names for each index in the input tensor.
119
+ # Required when the input {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#encoding InputMetadata.encoding} is BAG_OF_FEATURES,
120
+ # BAG_OF_FEATURES_SPARSE, INDICATOR.
121
+ # @!attribute [rw] encoded_tensor_name
122
+ # @return [::String]
123
+ # Encoded tensor is a transformation of the input tensor. Must be provided
124
+ # if choosing
125
+ # {::Google::Cloud::AIPlatform::V1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution}
126
+ # or {::Google::Cloud::AIPlatform::V1::ExplanationParameters#xrai_attribution XRAI attribution} and the
127
+ # input tensor is not differentiable.
128
+ #
129
+ # An encoded tensor is generated if the input tensor is encoded by a lookup
130
+ # table.
131
+ # @!attribute [rw] encoded_baselines
132
+ # @return [::Array<::Google::Protobuf::Value>]
133
+ # A list of baselines for the encoded tensor.
134
+ #
135
+ # The shape of each baseline should match the shape of the encoded tensor.
136
+ # If a scalar is provided, Vertex AI broadcasts to the same shape as the
137
+ # encoded tensor.
138
+ # @!attribute [rw] visualization
139
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::Visualization]
140
+ # Visualization configurations for image explanation.
141
+ # @!attribute [rw] group_name
142
+ # @return [::String]
143
+ # Name of the group that the input belongs to. Features with the same group
144
+ # name will be treated as one feature when computing attributions. Features
145
+ # grouped together can have different shapes in value. If provided, there
146
+ # will be one single attribution generated in
147
+ # {::Google::Cloud::AIPlatform::V1::Attribution#feature_attributions Attribution.feature_attributions}, keyed by the group name.
148
+ class InputMetadata
149
+ include ::Google::Protobuf::MessageExts
150
+ extend ::Google::Protobuf::MessageExts::ClassMethods
151
+
152
+ # Domain details of the input feature value. Provides numeric information
153
+ # about the feature, such as its range (min, max). If the feature has been
154
+ # pre-processed, for example with z-scoring, then it provides information
155
+ # about how to recover the original feature. For example, if the input
156
+ # feature is an image and it has been pre-processed to obtain 0-mean and
157
+ # stddev = 1 values, then original_mean, and original_stddev refer to the
158
+ # mean and stddev of the original feature (e.g. image tensor) from which
159
+ # input feature (with mean = 0 and stddev = 1) was obtained.
160
+ # @!attribute [rw] min_value
161
+ # @return [::Float]
162
+ # The minimum permissible value for this feature.
163
+ # @!attribute [rw] max_value
164
+ # @return [::Float]
165
+ # The maximum permissible value for this feature.
166
+ # @!attribute [rw] original_mean
167
+ # @return [::Float]
168
+ # If this input feature has been normalized to a mean value of 0,
169
+ # the original_mean specifies the mean value of the domain prior to
170
+ # normalization.
171
+ # @!attribute [rw] original_stddev
172
+ # @return [::Float]
173
+ # If this input feature has been normalized to a standard deviation of
174
+ # 1.0, the original_stddev specifies the standard deviation of the domain
175
+ # prior to normalization.
176
+ class FeatureValueDomain
177
+ include ::Google::Protobuf::MessageExts
178
+ extend ::Google::Protobuf::MessageExts::ClassMethods
179
+ end
180
+
181
+ # Visualization configurations for image explanation.
182
+ # @!attribute [rw] type
183
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::Visualization::Type]
184
+ # Type of the image visualization. Only applicable to
185
+ # {::Google::Cloud::AIPlatform::V1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution}.
186
+ # OUTLINES shows regions of attribution, while PIXELS shows per-pixel
187
+ # attribution. Defaults to OUTLINES.
188
+ # @!attribute [rw] polarity
189
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::Visualization::Polarity]
190
+ # Whether to only highlight pixels with positive contributions, negative
191
+ # or both. Defaults to POSITIVE.
192
+ # @!attribute [rw] color_map
193
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::Visualization::ColorMap]
194
+ # The color scheme used for the highlighted areas.
195
+ #
196
+ # Defaults to PINK_GREEN for
197
+ # {::Google::Cloud::AIPlatform::V1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution},
198
+ # which shows positive attributions in green and negative in pink.
199
+ #
200
+ # Defaults to VIRIDIS for
201
+ # {::Google::Cloud::AIPlatform::V1::ExplanationParameters#xrai_attribution XRAI attribution}, which
202
+ # highlights the most influential regions in yellow and the least
203
+ # influential in blue.
204
+ # @!attribute [rw] clip_percent_upperbound
205
+ # @return [::Float]
206
+ # Excludes attributions above the specified percentile from the
207
+ # highlighted areas. Using the clip_percent_upperbound and
208
+ # clip_percent_lowerbound together can be useful for filtering out noise
209
+ # and making it easier to see areas of strong attribution. Defaults to
210
+ # 99.9.
211
+ # @!attribute [rw] clip_percent_lowerbound
212
+ # @return [::Float]
213
+ # Excludes attributions below the specified percentile, from the
214
+ # highlighted areas. Defaults to 62.
215
+ # @!attribute [rw] overlay_type
216
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata::Visualization::OverlayType]
217
+ # How the original image is displayed in the visualization.
218
+ # Adjusting the overlay can help increase visual clarity if the original
219
+ # image makes it difficult to view the visualization. Defaults to NONE.
220
+ class Visualization
221
+ include ::Google::Protobuf::MessageExts
222
+ extend ::Google::Protobuf::MessageExts::ClassMethods
223
+
224
+ # Type of the image visualization. Only applicable to
225
+ # {::Google::Cloud::AIPlatform::V1::ExplanationParameters#integrated_gradients_attribution Integrated Gradients attribution}.
226
+ module Type
227
+ # Should not be used.
228
+ TYPE_UNSPECIFIED = 0
229
+
230
+ # Shows which pixel contributed to the image prediction.
231
+ PIXELS = 1
232
+
233
+ # Shows which region contributed to the image prediction by outlining
234
+ # the region.
235
+ OUTLINES = 2
236
+ end
237
+
238
+ # Whether to only highlight pixels with positive contributions, negative
239
+ # or both. Defaults to POSITIVE.
240
+ module Polarity
241
+ # Default value. This is the same as POSITIVE.
242
+ POLARITY_UNSPECIFIED = 0
243
+
244
+ # Highlights the pixels/outlines that were most influential to the
245
+ # model's prediction.
246
+ POSITIVE = 1
247
+
248
+ # Setting polarity to negative highlights areas that does not lead to
249
+ # the models's current prediction.
250
+ NEGATIVE = 2
251
+
252
+ # Shows both positive and negative attributions.
253
+ BOTH = 3
254
+ end
255
+
256
+ # The color scheme used for highlighting areas.
257
+ module ColorMap
258
+ # Should not be used.
259
+ COLOR_MAP_UNSPECIFIED = 0
260
+
261
+ # Positive: green. Negative: pink.
262
+ PINK_GREEN = 1
263
+
264
+ # Viridis color map: A perceptually uniform color mapping which is
265
+ # easier to see by those with colorblindness and progresses from yellow
266
+ # to green to blue. Positive: yellow. Negative: blue.
267
+ VIRIDIS = 2
268
+
269
+ # Positive: red. Negative: red.
270
+ RED = 3
271
+
272
+ # Positive: green. Negative: green.
273
+ GREEN = 4
274
+
275
+ # Positive: green. Negative: red.
276
+ RED_GREEN = 6
277
+
278
+ # PiYG palette.
279
+ PINK_WHITE_GREEN = 5
280
+ end
281
+
282
+ # How the original image is displayed in the visualization.
283
+ module OverlayType
284
+ # Default value. This is the same as NONE.
285
+ OVERLAY_TYPE_UNSPECIFIED = 0
286
+
287
+ # No overlay.
288
+ NONE = 1
289
+
290
+ # The attributions are shown on top of the original image.
291
+ ORIGINAL = 2
292
+
293
+ # The attributions are shown on top of grayscaled version of the
294
+ # original image.
295
+ GRAYSCALE = 3
296
+
297
+ # The attributions are used as a mask to reveal predictive parts of
298
+ # the image and hide the un-predictive parts.
299
+ MASK_BLACK = 4
300
+ end
301
+ end
302
+
303
+ # Defines how a feature is encoded. Defaults to IDENTITY.
304
+ module Encoding
305
+ # Default value. This is the same as IDENTITY.
306
+ ENCODING_UNSPECIFIED = 0
307
+
308
+ # The tensor represents one feature.
309
+ IDENTITY = 1
310
+
311
+ # The tensor represents a bag of features where each index maps to
312
+ # a feature. {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#index_feature_mapping InputMetadata.index_feature_mapping} must be provided for
313
+ # this encoding. For example:
314
+ # ```
315
+ # input = [27, 6.0, 150]
316
+ # index_feature_mapping = ["age", "height", "weight"]
317
+ # ```
318
+ BAG_OF_FEATURES = 2
319
+
320
+ # The tensor represents a bag of features where each index maps to a
321
+ # feature. Zero values in the tensor indicates feature being
322
+ # non-existent. {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#index_feature_mapping InputMetadata.index_feature_mapping} must be provided
323
+ # for this encoding. For example:
324
+ # ```
325
+ # input = [2, 0, 5, 0, 1]
326
+ # index_feature_mapping = ["a", "b", "c", "d", "e"]
327
+ # ```
328
+ BAG_OF_FEATURES_SPARSE = 3
329
+
330
+ # The tensor is a list of binaries representing whether a feature exists
331
+ # or not (1 indicates existence). {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#index_feature_mapping InputMetadata.index_feature_mapping}
332
+ # must be provided for this encoding. For example:
333
+ # ```
334
+ # input = [1, 0, 1, 0, 1]
335
+ # index_feature_mapping = ["a", "b", "c", "d", "e"]
336
+ # ```
337
+ INDICATOR = 4
338
+
339
+ # The tensor is encoded into a 1-dimensional array represented by an
340
+ # encoded tensor. {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#encoded_tensor_name InputMetadata.encoded_tensor_name} must be provided
341
+ # for this encoding. For example:
342
+ # ```
343
+ # input = ["This", "is", "a", "test", "."]
344
+ # encoded = [0.1, 0.2, 0.3, 0.4, 0.5]
345
+ # ```
346
+ COMBINED_EMBEDDING = 5
347
+
348
+ # Select this encoding when the input tensor is encoded into a
349
+ # 2-dimensional array represented by an encoded tensor.
350
+ # {::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata#encoded_tensor_name InputMetadata.encoded_tensor_name} must be provided for this
351
+ # encoding. The first dimension of the encoded tensor's shape is the same
352
+ # as the input tensor's shape. For example:
353
+ # ```
354
+ # input = ["This", "is", "a", "test", "."]
355
+ # encoded = [[0.1, 0.2, 0.3, 0.4, 0.5],
356
+ # [0.2, 0.1, 0.4, 0.3, 0.5],
357
+ # [0.5, 0.1, 0.3, 0.5, 0.4],
358
+ # [0.5, 0.3, 0.1, 0.2, 0.4],
359
+ # [0.4, 0.3, 0.2, 0.5, 0.1]]
360
+ # ```
361
+ CONCAT_EMBEDDING = 6
362
+ end
363
+ end
364
+
365
+ # Metadata of the prediction output to be explained.
366
+ # @!attribute [rw] index_display_name_mapping
367
+ # @return [::Google::Protobuf::Value]
368
+ # Static mapping between the index and display name.
369
+ #
370
+ # Use this if the outputs are a deterministic n-dimensional array, e.g. a
371
+ # list of scores of all the classes in a pre-defined order for a
372
+ # multi-classification Model. It's not feasible if the outputs are
373
+ # non-deterministic, e.g. the Model produces top-k classes or sort the
374
+ # outputs by their values.
375
+ #
376
+ # The shape of the value must be an n-dimensional array of strings. The
377
+ # number of dimensions must match that of the outputs to be explained.
378
+ # The {::Google::Cloud::AIPlatform::V1::Attribution#output_display_name Attribution.output_display_name} is populated by locating in the
379
+ # mapping with {::Google::Cloud::AIPlatform::V1::Attribution#output_index Attribution.output_index}.
380
+ # @!attribute [rw] display_name_mapping_key
381
+ # @return [::String]
382
+ # Specify a field name in the prediction to look for the display name.
383
+ #
384
+ # Use this if the prediction contains the display names for the outputs.
385
+ #
386
+ # The display names in the prediction must have the same shape of the
387
+ # outputs, so that it can be located by {::Google::Cloud::AIPlatform::V1::Attribution#output_index Attribution.output_index} for
388
+ # a specific output.
389
+ # @!attribute [rw] output_tensor_name
390
+ # @return [::String]
391
+ # Name of the output tensor. Required and is only applicable to Vertex
392
+ # AI provided images for Tensorflow.
393
+ class OutputMetadata
394
+ include ::Google::Protobuf::MessageExts
395
+ extend ::Google::Protobuf::MessageExts::ClassMethods
396
+ end
397
+
398
+ # @!attribute [rw] key
399
+ # @return [::String]
400
+ # @!attribute [rw] value
401
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::InputMetadata]
402
+ class InputsEntry
403
+ include ::Google::Protobuf::MessageExts
404
+ extend ::Google::Protobuf::MessageExts::ClassMethods
405
+ end
406
+
407
+ # @!attribute [rw] key
408
+ # @return [::String]
409
+ # @!attribute [rw] value
410
+ # @return [::Google::Cloud::AIPlatform::V1::ExplanationMetadata::OutputMetadata]
411
+ class OutputsEntry
412
+ include ::Google::Protobuf::MessageExts
413
+ extend ::Google::Protobuf::MessageExts::ClassMethods
414
+ end
415
+ end
416
+ end
417
+ end
418
+ end
419
+ end
@@ -0,0 +1,115 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2022 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module AIPlatform
23
+ module V1
24
+ # Feature Metadata information that describes an attribute of an entity type.
25
+ # For example, apple is an entity type, and color is a feature that describes
26
+ # apple.
27
+ # @!attribute [rw] name
28
+ # @return [::String]
29
+ # Immutable. Name of the Feature.
30
+ # Format:
31
+ # `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
32
+ #
33
+ # The last part feature is assigned by the client. The feature can be up to
34
+ # 64 characters long and can consist only of ASCII Latin letters A-Z and a-z,
35
+ # underscore(_), and ASCII digits 0-9 starting with a letter. The value will
36
+ # be unique given an entity type.
37
+ # @!attribute [rw] description
38
+ # @return [::String]
39
+ # Description of the Feature.
40
+ # @!attribute [rw] value_type
41
+ # @return [::Google::Cloud::AIPlatform::V1::Feature::ValueType]
42
+ # Required. Immutable. Type of Feature value.
43
+ # @!attribute [r] create_time
44
+ # @return [::Google::Protobuf::Timestamp]
45
+ # Output only. Timestamp when this EntityType was created.
46
+ # @!attribute [r] update_time
47
+ # @return [::Google::Protobuf::Timestamp]
48
+ # Output only. Timestamp when this EntityType was most recently updated.
49
+ # @!attribute [rw] labels
50
+ # @return [::Google::Protobuf::Map{::String => ::String}]
51
+ # Optional. The labels with user-defined metadata to organize your Features.
52
+ #
53
+ # Label keys and values can be no longer than 64 characters
54
+ # (Unicode codepoints), can only contain lowercase letters, numeric
55
+ # characters, underscores and dashes. International characters are allowed.
56
+ #
57
+ # See https://goo.gl/xmQnxf for more information on and examples of labels.
58
+ # No more than 64 user labels can be associated with one Feature (System
59
+ # labels are excluded)."
60
+ # System reserved label keys are prefixed with "aiplatform.googleapis.com/"
61
+ # and are immutable.
62
+ # @!attribute [rw] etag
63
+ # @return [::String]
64
+ # Used to perform a consistent read-modify-write updates. If not set, a blind
65
+ # "overwrite" update happens.
66
+ class Feature
67
+ include ::Google::Protobuf::MessageExts
68
+ extend ::Google::Protobuf::MessageExts::ClassMethods
69
+
70
+ # @!attribute [rw] key
71
+ # @return [::String]
72
+ # @!attribute [rw] value
73
+ # @return [::String]
74
+ class LabelsEntry
75
+ include ::Google::Protobuf::MessageExts
76
+ extend ::Google::Protobuf::MessageExts::ClassMethods
77
+ end
78
+
79
+ # An enum representing the value type of a feature.
80
+ module ValueType
81
+ # The value type is unspecified.
82
+ VALUE_TYPE_UNSPECIFIED = 0
83
+
84
+ # Used for Feature that is a boolean.
85
+ BOOL = 1
86
+
87
+ # Used for Feature that is a list of boolean.
88
+ BOOL_ARRAY = 2
89
+
90
+ # Used for Feature that is double.
91
+ DOUBLE = 3
92
+
93
+ # Used for Feature that is a list of double.
94
+ DOUBLE_ARRAY = 4
95
+
96
+ # Used for Feature that is INT64.
97
+ INT64 = 9
98
+
99
+ # Used for Feature that is a list of INT64.
100
+ INT64_ARRAY = 10
101
+
102
+ # Used for Feature that is string.
103
+ STRING = 11
104
+
105
+ # Used for Feature that is a list of String.
106
+ STRING_ARRAY = 12
107
+
108
+ # Used for Feature that is bytes.
109
+ BYTES = 13
110
+ end
111
+ end
112
+ end
113
+ end
114
+ end
115
+ end
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2022 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module AIPlatform
23
+ module V1
24
+ # Stats and Anomaly generated at specific timestamp for specific Feature.
25
+ # The start_time and end_time are used to define the time range of the dataset
26
+ # that current stats belongs to, e.g. prediction traffic is bucketed into
27
+ # prediction datasets by time window. If the Dataset is not defined by time
28
+ # window, start_time = end_time. Timestamp of the stats and anomalies always
29
+ # refers to end_time. Raw stats and anomalies are stored in stats_uri or
30
+ # anomaly_uri in the tensorflow defined protos. Field data_stats contains
31
+ # almost identical information with the raw stats in Vertex AI
32
+ # defined proto, for UI to display.
33
+ # @!attribute [rw] score
34
+ # @return [::Float]
35
+ # Feature importance score, only populated when cross-feature monitoring is
36
+ # enabled. For now only used to represent feature attribution score within
37
+ # range [0, 1] for
38
+ # {::Google::Cloud::AIPlatform::V1::ModelDeploymentMonitoringObjectiveType::FEATURE_ATTRIBUTION_SKEW ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_SKEW} and
39
+ # {::Google::Cloud::AIPlatform::V1::ModelDeploymentMonitoringObjectiveType::FEATURE_ATTRIBUTION_DRIFT ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_DRIFT}.
40
+ # @!attribute [rw] stats_uri
41
+ # @return [::String]
42
+ # Path of the stats file for current feature values in Cloud Storage bucket.
43
+ # Format: gs://<bucket_name>/<object_name>/stats.
44
+ # Example: gs://monitoring_bucket/feature_name/stats.
45
+ # Stats are stored as binary format with Protobuf message
46
+ # [tensorflow.metadata.v0.FeatureNameStatistics](https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/statistics.proto).
47
+ # @!attribute [rw] anomaly_uri
48
+ # @return [::String]
49
+ # Path of the anomaly file for current feature values in Cloud Storage
50
+ # bucket.
51
+ # Format: gs://<bucket_name>/<object_name>/anomalies.
52
+ # Example: gs://monitoring_bucket/feature_name/anomalies.
53
+ # Stats are stored as binary format with Protobuf message
54
+ # Anoamlies are stored as binary format with Protobuf message
55
+ # [tensorflow.metadata.v0.AnomalyInfo]
56
+ # (https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/anomalies.proto).
57
+ # @!attribute [rw] distribution_deviation
58
+ # @return [::Float]
59
+ # Deviation from the current stats to baseline stats.
60
+ # 1. For categorical feature, the distribution distance is calculated by
61
+ # L-inifinity norm.
62
+ # 2. For numerical feature, the distribution distance is calculated by
63
+ # Jensen–Shannon divergence.
64
+ # @!attribute [rw] anomaly_detection_threshold
65
+ # @return [::Float]
66
+ # This is the threshold used when detecting anomalies.
67
+ # The threshold can be changed by user, so this one might be different from
68
+ # {::Google::Cloud::AIPlatform::V1::ThresholdConfig#value ThresholdConfig.value}.
69
+ # @!attribute [rw] start_time
70
+ # @return [::Google::Protobuf::Timestamp]
71
+ # The start timestamp of window where stats were generated.
72
+ # For objectives where time window doesn't make sense (e.g. Featurestore
73
+ # Snapshot Monitoring), start_time is only used to indicate the monitoring
74
+ # intervals, so it always equals to (end_time - monitoring_interval).
75
+ # @!attribute [rw] end_time
76
+ # @return [::Google::Protobuf::Timestamp]
77
+ # The end timestamp of window where stats were generated.
78
+ # For objectives where time window doesn't make sense (e.g. Featurestore
79
+ # Snapshot Monitoring), end_time indicates the timestamp of the data used to
80
+ # generate stats (e.g. timestamp we take snapshots for feature values).
81
+ class FeatureStatsAnomaly
82
+ include ::Google::Protobuf::MessageExts
83
+ extend ::Google::Protobuf::MessageExts::ClassMethods
84
+ end
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2022 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module AIPlatform
23
+ module V1
24
+ # Matcher for Features of an EntityType by Feature ID.
25
+ # @!attribute [rw] ids
26
+ # @return [::Array<::String>]
27
+ # Required. The following are accepted as `ids`:
28
+ #
29
+ # * A single-element list containing only `*`, which selects all Features
30
+ # in the target EntityType, or
31
+ # * A list containing only Feature IDs, which selects only Features with
32
+ # those IDs in the target EntityType.
33
+ class IdMatcher
34
+ include ::Google::Protobuf::MessageExts
35
+ extend ::Google::Protobuf::MessageExts::ClassMethods
36
+ end
37
+
38
+ # Selector for Features of an EntityType.
39
+ # @!attribute [rw] id_matcher
40
+ # @return [::Google::Cloud::AIPlatform::V1::IdMatcher]
41
+ # Required. Matches Features based on ID.
42
+ class FeatureSelector
43
+ include ::Google::Protobuf::MessageExts
44
+ extend ::Google::Protobuf::MessageExts::ClassMethods
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end