pulumi-gcp 8.5.0a1728368389__py3-none-any.whl → 8.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (395) hide show
  1. pulumi_gcp/__init__.py +32 -0
  2. pulumi_gcp/_utilities.py +1 -1
  3. pulumi_gcp/accessapproval/get_folder_service_account.py +9 -4
  4. pulumi_gcp/accessapproval/get_organization_service_account.py +9 -4
  5. pulumi_gcp/accessapproval/get_project_service_account.py +9 -4
  6. pulumi_gcp/accesscontextmanager/get_access_policy_iam_policy.py +9 -4
  7. pulumi_gcp/alloydb/get_locations.py +8 -4
  8. pulumi_gcp/alloydb/get_supported_database_flags.py +10 -4
  9. pulumi_gcp/apigateway/get_api_config_iam_policy.py +13 -4
  10. pulumi_gcp/apigateway/get_api_iam_policy.py +11 -4
  11. pulumi_gcp/apigateway/get_gateway_iam_policy.py +13 -4
  12. pulumi_gcp/apigee/get_environment_iam_policy.py +11 -4
  13. pulumi_gcp/appengine/get_default_service_account.py +12 -4
  14. pulumi_gcp/apphub/get_application.py +20 -4
  15. pulumi_gcp/apphub/get_discovered_service.py +14 -4
  16. pulumi_gcp/apphub/get_discovered_workload.py +14 -4
  17. pulumi_gcp/artifactregistry/get_docker_image.py +21 -4
  18. pulumi_gcp/artifactregistry/get_locations.py +8 -4
  19. pulumi_gcp/artifactregistry/get_repository.py +27 -4
  20. pulumi_gcp/artifactregistry/get_repository_iam_policy.py +13 -4
  21. pulumi_gcp/assuredworkloads/workload.py +7 -7
  22. pulumi_gcp/backupdisasterrecovery/get_management_server.py +13 -4
  23. pulumi_gcp/beyondcorp/get_app_connection.py +19 -4
  24. pulumi_gcp/beyondcorp/get_app_connector.py +17 -4
  25. pulumi_gcp/beyondcorp/get_app_gateway.py +20 -4
  26. pulumi_gcp/bigquery/get_connection_iam_policy.py +13 -4
  27. pulumi_gcp/bigquery/get_dataset.py +30 -4
  28. pulumi_gcp/bigquery/get_dataset_iam_policy.py +11 -4
  29. pulumi_gcp/bigquery/get_default_service_account.py +9 -4
  30. pulumi_gcp/bigquery/get_table_iam_policy.py +13 -4
  31. pulumi_gcp/bigquery/get_tables.py +10 -4
  32. pulumi_gcp/bigqueryanalyticshub/get_data_exchange_iam_policy.py +13 -4
  33. pulumi_gcp/bigqueryanalyticshub/get_listing_iam_policy.py +15 -4
  34. pulumi_gcp/bigquerydatapolicy/get_iam_policy.py +13 -4
  35. pulumi_gcp/bigtable/_inputs.py +58 -0
  36. pulumi_gcp/bigtable/gc_policy.py +7 -0
  37. pulumi_gcp/bigtable/get_instance_iam_policy.py +11 -4
  38. pulumi_gcp/bigtable/get_table_iam_policy.py +13 -4
  39. pulumi_gcp/bigtable/instance_iam_binding.py +13 -34
  40. pulumi_gcp/bigtable/instance_iam_member.py +13 -34
  41. pulumi_gcp/bigtable/instance_iam_policy.py +0 -47
  42. pulumi_gcp/bigtable/outputs.py +36 -0
  43. pulumi_gcp/billing/get_account_iam_policy.py +9 -4
  44. pulumi_gcp/binaryauthorization/get_attestor_iam_policy.py +11 -4
  45. pulumi_gcp/certificateauthority/get_authority.py +34 -4
  46. pulumi_gcp/certificateauthority/get_ca_pool_iam_policy.py +13 -4
  47. pulumi_gcp/certificateauthority/get_certificate_template_iam_policy.py +13 -4
  48. pulumi_gcp/certificatemanager/get_certificate_map.py +16 -4
  49. pulumi_gcp/certificatemanager/get_certificates.py +10 -4
  50. pulumi_gcp/cloudasset/get_resources_search_all.py +12 -4
  51. pulumi_gcp/cloudasset/get_search_all_resources.py +12 -4
  52. pulumi_gcp/cloudbuild/get_trigger.py +33 -4
  53. pulumi_gcp/cloudbuildv2/get_connection_iam_policy.py +13 -4
  54. pulumi_gcp/clouddeploy/get_custom_target_type_iam_policy.py +13 -4
  55. pulumi_gcp/clouddeploy/get_delivery_pipeline_iam_policy.py +13 -4
  56. pulumi_gcp/clouddeploy/get_target_iam_policy.py +13 -4
  57. pulumi_gcp/cloudfunctions/get_function.py +43 -4
  58. pulumi_gcp/cloudfunctions/get_function_iam_policy.py +13 -4
  59. pulumi_gcp/cloudfunctionsv2/get_function.py +23 -4
  60. pulumi_gcp/cloudfunctionsv2/get_function_iam_policy.py +13 -4
  61. pulumi_gcp/cloudidentity/get_group_lookup.py +8 -4
  62. pulumi_gcp/cloudidentity/get_group_memberships.py +8 -4
  63. pulumi_gcp/cloudidentity/get_group_transitive_memberships.py +8 -4
  64. pulumi_gcp/cloudidentity/get_groups.py +8 -4
  65. pulumi_gcp/cloudquota/get_s_quota_info.py +25 -4
  66. pulumi_gcp/cloudquota/get_s_quota_infos.py +10 -4
  67. pulumi_gcp/cloudrun/get_locations.py +8 -4
  68. pulumi_gcp/cloudrun/get_service.py +16 -4
  69. pulumi_gcp/cloudrun/get_service_iam_policy.py +13 -4
  70. pulumi_gcp/cloudrunv2/get_job.py +39 -4
  71. pulumi_gcp/cloudrunv2/get_job_iam_policy.py +13 -4
  72. pulumi_gcp/cloudrunv2/get_service.py +45 -4
  73. pulumi_gcp/cloudrunv2/get_service_iam_policy.py +13 -4
  74. pulumi_gcp/cloudtasks/get_queue_iam_policy.py +13 -4
  75. pulumi_gcp/composer/get_environment.py +16 -4
  76. pulumi_gcp/composer/get_image_versions.py +10 -4
  77. pulumi_gcp/composer/get_user_workloads_config_map.py +14 -4
  78. pulumi_gcp/composer/get_user_workloads_secret.py +14 -4
  79. pulumi_gcp/compute/_inputs.py +302 -18
  80. pulumi_gcp/compute/backend_service.py +111 -7
  81. pulumi_gcp/compute/get_address.py +21 -4
  82. pulumi_gcp/compute/get_addresses.py +12 -4
  83. pulumi_gcp/compute/get_backend_bucket.py +18 -4
  84. pulumi_gcp/compute/get_backend_bucket_iam_policy.py +11 -4
  85. pulumi_gcp/compute/get_backend_service.py +51 -5
  86. pulumi_gcp/compute/get_backend_service_iam_policy.py +11 -4
  87. pulumi_gcp/compute/get_certificate.py +17 -4
  88. pulumi_gcp/compute/get_default_service_account.py +12 -4
  89. pulumi_gcp/compute/get_disk.py +45 -4
  90. pulumi_gcp/compute/get_disk_iam_policy.py +13 -4
  91. pulumi_gcp/compute/get_forwarding_rule.py +43 -4
  92. pulumi_gcp/compute/get_forwarding_rules.py +10 -4
  93. pulumi_gcp/compute/get_global_address.py +19 -4
  94. pulumi_gcp/compute/get_global_forwarding_rule.py +31 -4
  95. pulumi_gcp/compute/get_hc_vpn_gateway.py +17 -4
  96. pulumi_gcp/compute/get_health_check.py +25 -4
  97. pulumi_gcp/compute/get_image.py +30 -4
  98. pulumi_gcp/compute/get_image_iam_policy.py +11 -4
  99. pulumi_gcp/compute/get_instance.py +49 -4
  100. pulumi_gcp/compute/get_instance_group.py +18 -4
  101. pulumi_gcp/compute/get_instance_group_manager.py +38 -4
  102. pulumi_gcp/compute/get_instance_iam_policy.py +13 -4
  103. pulumi_gcp/compute/get_instance_serial_port.py +14 -4
  104. pulumi_gcp/compute/get_instance_template.py +45 -4
  105. pulumi_gcp/compute/get_lbip_ranges.py +7 -4
  106. pulumi_gcp/compute/get_machine_image_iam_policy.py +11 -4
  107. pulumi_gcp/compute/get_machine_types.py +12 -4
  108. pulumi_gcp/compute/get_netblock_ip_ranges.py +10 -4
  109. pulumi_gcp/compute/get_network.py +14 -4
  110. pulumi_gcp/compute/get_network_endpoint_group.py +19 -4
  111. pulumi_gcp/compute/get_network_peering.py +17 -4
  112. pulumi_gcp/compute/get_networks.py +9 -4
  113. pulumi_gcp/compute/get_node_types.py +10 -4
  114. pulumi_gcp/compute/get_region_backend_service_iam_policy.py +13 -4
  115. pulumi_gcp/compute/get_region_disk.py +35 -4
  116. pulumi_gcp/compute/get_region_disk_iam_policy.py +13 -4
  117. pulumi_gcp/compute/get_region_instance_group.py +15 -4
  118. pulumi_gcp/compute/get_region_instance_group_manager.py +39 -4
  119. pulumi_gcp/compute/get_region_instance_template.py +44 -4
  120. pulumi_gcp/compute/get_region_network_endpoint_group.py +22 -4
  121. pulumi_gcp/compute/get_region_ssl_certificate.py +19 -4
  122. pulumi_gcp/compute/get_regions.py +10 -4
  123. pulumi_gcp/compute/get_reservation.py +19 -4
  124. pulumi_gcp/compute/get_resource_policy.py +17 -4
  125. pulumi_gcp/compute/get_router.py +18 -4
  126. pulumi_gcp/compute/get_router_nat.py +33 -4
  127. pulumi_gcp/compute/get_router_status.py +14 -4
  128. pulumi_gcp/compute/get_security_policy.py +18 -4
  129. pulumi_gcp/compute/get_snapshot.py +30 -4
  130. pulumi_gcp/compute/get_snapshot_iam_policy.py +11 -4
  131. pulumi_gcp/compute/get_ssl_policy.py +17 -4
  132. pulumi_gcp/compute/get_subnetwork.py +20 -4
  133. pulumi_gcp/compute/get_subnetwork_iam_policy.py +13 -4
  134. pulumi_gcp/compute/get_subnetworks.py +12 -4
  135. pulumi_gcp/compute/get_vpn_gateway.py +14 -4
  136. pulumi_gcp/compute/get_zones.py +12 -4
  137. pulumi_gcp/compute/network.py +236 -0
  138. pulumi_gcp/compute/outputs.py +401 -16
  139. pulumi_gcp/compute/region_backend_service.py +115 -7
  140. pulumi_gcp/compute/route.py +92 -0
  141. pulumi_gcp/compute/router_status.py +14 -4
  142. pulumi_gcp/compute/subnetwork.py +2 -2
  143. pulumi_gcp/container/_inputs.py +106 -0
  144. pulumi_gcp/container/aws_node_pool.py +59 -0
  145. pulumi_gcp/container/get_attached_install_manifest.py +14 -4
  146. pulumi_gcp/container/get_attached_versions.py +10 -4
  147. pulumi_gcp/container/get_aws_versions.py +11 -4
  148. pulumi_gcp/container/get_azure_versions.py +11 -4
  149. pulumi_gcp/container/get_cluster.py +87 -4
  150. pulumi_gcp/container/get_engine_versions.py +18 -4
  151. pulumi_gcp/container/get_registry_image.py +16 -4
  152. pulumi_gcp/container/get_registry_repository.py +10 -4
  153. pulumi_gcp/container/outputs.py +87 -0
  154. pulumi_gcp/containeranalysis/get_note_iam_policy.py +11 -4
  155. pulumi_gcp/datacatalog/get_entry_group_iam_policy.py +13 -4
  156. pulumi_gcp/datacatalog/get_policy_tag_iam_policy.py +9 -4
  157. pulumi_gcp/datacatalog/get_tag_template_iam_policy.py +13 -4
  158. pulumi_gcp/datacatalog/get_taxonomy_iam_policy.py +13 -4
  159. pulumi_gcp/dataform/get_repository_iam_policy.py +13 -4
  160. pulumi_gcp/datafusion/get_instance_iam_policy.py +13 -4
  161. pulumi_gcp/dataplex/get_aspect_type_iam_policy.py +13 -4
  162. pulumi_gcp/dataplex/get_asset_iam_policy.py +17 -4
  163. pulumi_gcp/dataplex/get_datascan_iam_policy.py +13 -4
  164. pulumi_gcp/dataplex/get_entry_group_iam_policy.py +13 -4
  165. pulumi_gcp/dataplex/get_entry_type_iam_policy.py +13 -4
  166. pulumi_gcp/dataplex/get_lake_iam_policy.py +13 -4
  167. pulumi_gcp/dataplex/get_task_iam_policy.py +15 -4
  168. pulumi_gcp/dataplex/get_zone_iam_policy.py +15 -4
  169. pulumi_gcp/dataproc/__init__.py +1 -0
  170. pulumi_gcp/dataproc/_inputs.py +1394 -0
  171. pulumi_gcp/dataproc/batch.py +1514 -0
  172. pulumi_gcp/dataproc/get_autoscaling_policy_iam_policy.py +13 -4
  173. pulumi_gcp/dataproc/get_cluster_iam_policy.py +13 -4
  174. pulumi_gcp/dataproc/get_job_iam_policy.py +13 -4
  175. pulumi_gcp/dataproc/get_metastore_federation_iam_policy.py +13 -4
  176. pulumi_gcp/dataproc/get_metastore_service.py +34 -4
  177. pulumi_gcp/dataproc/get_metastore_service_iam_policy.py +13 -4
  178. pulumi_gcp/dataproc/outputs.py +1127 -0
  179. pulumi_gcp/datastream/get_static_ips.py +10 -4
  180. pulumi_gcp/discoveryengine/_inputs.py +32 -5
  181. pulumi_gcp/discoveryengine/chat_engine.py +64 -0
  182. pulumi_gcp/discoveryengine/outputs.py +22 -3
  183. pulumi_gcp/dns/get_keys.py +11 -4
  184. pulumi_gcp/dns/get_managed_zone.py +14 -4
  185. pulumi_gcp/dns/get_managed_zone_iam_policy.py +11 -4
  186. pulumi_gcp/dns/get_managed_zones.py +8 -4
  187. pulumi_gcp/dns/get_record_set.py +15 -4
  188. pulumi_gcp/endpoints/get_service_consumers_iam_policy.py +11 -4
  189. pulumi_gcp/endpoints/get_service_iam_policy.py +9 -4
  190. pulumi_gcp/filestore/get_instance.py +25 -4
  191. pulumi_gcp/firebase/get_android_app.py +17 -4
  192. pulumi_gcp/firebase/get_android_app_config.py +11 -4
  193. pulumi_gcp/firebase/get_apple_app.py +16 -4
  194. pulumi_gcp/firebase/get_apple_app_config.py +11 -4
  195. pulumi_gcp/firebase/get_hosting_channel.py +16 -4
  196. pulumi_gcp/firebase/get_web_app.py +14 -4
  197. pulumi_gcp/firebase/get_web_app_config.py +16 -4
  198. pulumi_gcp/firestore/field.py +4 -4
  199. pulumi_gcp/folder/get_iam_policy.py +9 -4
  200. pulumi_gcp/folder/get_organization_policy.py +15 -4
  201. pulumi_gcp/gkebackup/get_backup_plan_iam_policy.py +13 -4
  202. pulumi_gcp/gkebackup/get_restore_plan_iam_policy.py +13 -4
  203. pulumi_gcp/gkehub/get_feature_iam_policy.py +13 -4
  204. pulumi_gcp/gkehub/get_membership_binding.py +23 -4
  205. pulumi_gcp/gkehub/get_membership_iam_policy.py +13 -4
  206. pulumi_gcp/gkehub/get_scope_iam_policy.py +11 -4
  207. pulumi_gcp/gkehub/membership_binding.py +6 -6
  208. pulumi_gcp/gkehub/membership_rbac_role_binding.py +4 -4
  209. pulumi_gcp/gkehub/namespace.py +4 -4
  210. pulumi_gcp/gkehub/scope_rbac_role_binding.py +4 -4
  211. pulumi_gcp/healthcare/__init__.py +1 -0
  212. pulumi_gcp/healthcare/_inputs.py +538 -0
  213. pulumi_gcp/healthcare/get_consent_store_iam_policy.py +11 -4
  214. pulumi_gcp/healthcare/get_dataset_iam_policy.py +9 -4
  215. pulumi_gcp/healthcare/get_dicom_store_iam_policy.py +9 -4
  216. pulumi_gcp/healthcare/get_fhir_store_iam_policy.py +9 -4
  217. pulumi_gcp/healthcare/get_hl7_v2_store_iam_policy.py +9 -4
  218. pulumi_gcp/healthcare/outputs.py +467 -0
  219. pulumi_gcp/healthcare/pipeline_job.py +1233 -0
  220. pulumi_gcp/iam/get_rule.py +10 -4
  221. pulumi_gcp/iam/get_testable_permissions.py +12 -4
  222. pulumi_gcp/iam/get_workload_identity_pool.py +14 -4
  223. pulumi_gcp/iam/get_workload_identity_pool_provider.py +22 -4
  224. pulumi_gcp/iap/get_app_engine_service_iam_policy.py +13 -4
  225. pulumi_gcp/iap/get_app_engine_version_iam_policy.py +15 -4
  226. pulumi_gcp/iap/get_client.py +11 -4
  227. pulumi_gcp/iap/get_tunnel_dest_group_iam_policy.py +13 -4
  228. pulumi_gcp/iap/get_tunnel_iam_policy.py +9 -4
  229. pulumi_gcp/iap/get_tunnel_instance_iam_policy.py +13 -4
  230. pulumi_gcp/iap/get_web_backend_service_iam_policy.py +11 -4
  231. pulumi_gcp/iap/get_web_iam_policy.py +9 -4
  232. pulumi_gcp/iap/get_web_region_backend_service_iam_policy.py +13 -4
  233. pulumi_gcp/iap/get_web_type_app_engine_iam_policy.py +11 -4
  234. pulumi_gcp/iap/get_web_type_compute_iam_policy.py +9 -4
  235. pulumi_gcp/iap/tunnel_dest_group.py +2 -2
  236. pulumi_gcp/integrationconnectors/managed_zone.py +8 -8
  237. pulumi_gcp/kms/get_crypto_key_iam_policy.py +9 -4
  238. pulumi_gcp/kms/get_crypto_key_latest_version.py +15 -4
  239. pulumi_gcp/kms/get_crypto_key_versions.py +11 -4
  240. pulumi_gcp/kms/get_crypto_keys.py +10 -4
  241. pulumi_gcp/kms/get_ekm_connection_iam_policy.py +13 -4
  242. pulumi_gcp/kms/get_key_ring_iam_policy.py +9 -4
  243. pulumi_gcp/kms/get_key_rings.py +12 -4
  244. pulumi_gcp/kms/get_kms_crypto_key.py +21 -4
  245. pulumi_gcp/kms/get_kms_crypto_key_version.py +14 -4
  246. pulumi_gcp/kms/get_kms_key_ring.py +11 -4
  247. pulumi_gcp/kms/get_kms_secret.py +12 -4
  248. pulumi_gcp/kms/get_kms_secret_asymmetric.py +12 -4
  249. pulumi_gcp/kms/get_kms_secret_ciphertext.py +10 -4
  250. pulumi_gcp/kms/key_handle.py +8 -8
  251. pulumi_gcp/logging/get_folder_settings.py +13 -4
  252. pulumi_gcp/logging/get_log_view_iam_policy.py +15 -4
  253. pulumi_gcp/logging/get_organization_settings.py +13 -4
  254. pulumi_gcp/logging/get_project_cmek_settings.py +12 -4
  255. pulumi_gcp/logging/get_project_settings.py +13 -4
  256. pulumi_gcp/logging/get_sink.py +14 -4
  257. pulumi_gcp/logging/log_scope.py +7 -7
  258. pulumi_gcp/monitoring/get_app_engine_service.py +14 -4
  259. pulumi_gcp/monitoring/get_cluster_istio_service.py +20 -4
  260. pulumi_gcp/monitoring/get_istio_canonical_service.py +18 -4
  261. pulumi_gcp/monitoring/get_mesh_istio_service.py +18 -4
  262. pulumi_gcp/monitoring/get_notification_channel.py +21 -4
  263. pulumi_gcp/monitoring/get_secret_version.py +16 -4
  264. pulumi_gcp/monitoring/get_uptime_check_i_ps.py +6 -4
  265. pulumi_gcp/netapp/backup_vault.py +2 -2
  266. pulumi_gcp/networkconnectivity/_inputs.py +70 -0
  267. pulumi_gcp/networkconnectivity/internal_range.py +117 -7
  268. pulumi_gcp/networkconnectivity/outputs.py +42 -0
  269. pulumi_gcp/networkconnectivity/spoke.py +10 -10
  270. pulumi_gcp/networksecurity/get_address_group_iam_policy.py +13 -4
  271. pulumi_gcp/networkservices/gateway.py +54 -0
  272. pulumi_gcp/notebooks/get_instance_iam_policy.py +13 -4
  273. pulumi_gcp/notebooks/get_runtime_iam_policy.py +13 -4
  274. pulumi_gcp/organizations/get_active_folder.py +12 -4
  275. pulumi_gcp/organizations/get_billing_account.py +15 -4
  276. pulumi_gcp/organizations/get_client_config.py +10 -4
  277. pulumi_gcp/organizations/get_client_open_id_user_info.py +6 -4
  278. pulumi_gcp/organizations/get_folder.py +17 -4
  279. pulumi_gcp/organizations/get_folders.py +8 -4
  280. pulumi_gcp/organizations/get_iam_policy.py +10 -4
  281. pulumi_gcp/organizations/get_organization.py +14 -4
  282. pulumi_gcp/organizations/get_project.py +18 -4
  283. pulumi_gcp/orgpolicy/policy.py +2 -2
  284. pulumi_gcp/privilegedaccessmanager/get_entitlement.py +22 -4
  285. pulumi_gcp/projects/get_iam_policy.py +9 -4
  286. pulumi_gcp/projects/get_organization_policy.py +15 -4
  287. pulumi_gcp/projects/get_project.py +8 -4
  288. pulumi_gcp/projects/get_project_service.py +12 -4
  289. pulumi_gcp/pubsub/_inputs.py +333 -1
  290. pulumi_gcp/pubsub/get_schema_iam_policy.py +11 -4
  291. pulumi_gcp/pubsub/get_subscription.py +25 -4
  292. pulumi_gcp/pubsub/get_subscription_iam_policy.py +11 -4
  293. pulumi_gcp/pubsub/get_topic.py +17 -4
  294. pulumi_gcp/pubsub/get_topic_iam_policy.py +11 -4
  295. pulumi_gcp/pubsub/outputs.py +410 -2
  296. pulumi_gcp/pubsub/subscription.py +6 -6
  297. pulumi_gcp/pubsub/topic.py +44 -0
  298. pulumi_gcp/pulumi-plugin.json +1 -1
  299. pulumi_gcp/redis/get_instance.py +44 -4
  300. pulumi_gcp/runtimeconfig/get_config.py +10 -4
  301. pulumi_gcp/runtimeconfig/get_config_iam_policy.py +11 -4
  302. pulumi_gcp/runtimeconfig/get_variable.py +14 -4
  303. pulumi_gcp/secretmanager/get_regional_secret.py +25 -4
  304. pulumi_gcp/secretmanager/get_regional_secret_iam_policy.py +13 -4
  305. pulumi_gcp/secretmanager/get_regional_secret_version.py +19 -4
  306. pulumi_gcp/secretmanager/get_regional_secret_version_access.py +15 -4
  307. pulumi_gcp/secretmanager/get_regional_secrets.py +12 -4
  308. pulumi_gcp/secretmanager/get_secret.py +23 -4
  309. pulumi_gcp/secretmanager/get_secret_iam_policy.py +11 -4
  310. pulumi_gcp/secretmanager/get_secret_version.py +16 -4
  311. pulumi_gcp/secretmanager/get_secret_version_access.py +13 -4
  312. pulumi_gcp/secretmanager/get_secrets.py +10 -4
  313. pulumi_gcp/secretmanager/outputs.py +2 -10
  314. pulumi_gcp/securesourcemanager/__init__.py +1 -0
  315. pulumi_gcp/securesourcemanager/branch_rule.py +975 -0
  316. pulumi_gcp/securesourcemanager/get_instance_iam_policy.py +13 -4
  317. pulumi_gcp/securesourcemanager/get_repository_iam_policy.py +13 -4
  318. pulumi_gcp/securesourcemanager/repository.py +0 -2
  319. pulumi_gcp/securitycenter/get_source_iam_policy.py +11 -4
  320. pulumi_gcp/securitycenter/get_v2_organization_source_iam_policy.py +11 -4
  321. pulumi_gcp/securityposture/posture.py +0 -2
  322. pulumi_gcp/securityposture/posture_deployment.py +0 -2
  323. pulumi_gcp/serviceaccount/get_account.py +14 -4
  324. pulumi_gcp/serviceaccount/get_account_access_token.py +14 -4
  325. pulumi_gcp/serviceaccount/get_account_id_token.py +14 -4
  326. pulumi_gcp/serviceaccount/get_account_jwt.py +14 -4
  327. pulumi_gcp/serviceaccount/get_account_key.py +13 -4
  328. pulumi_gcp/serviceaccount/get_iam_policy.py +9 -4
  329. pulumi_gcp/servicedirectory/get_namespace_iam_policy.py +9 -4
  330. pulumi_gcp/servicedirectory/get_service_iam_policy.py +9 -4
  331. pulumi_gcp/servicenetworking/get_peered_dns_domain.py +15 -4
  332. pulumi_gcp/serviceusage/consumer_quota_override.py +0 -2
  333. pulumi_gcp/siteverification/__init__.py +1 -0
  334. pulumi_gcp/siteverification/get_token.py +12 -4
  335. pulumi_gcp/siteverification/owner.py +398 -0
  336. pulumi_gcp/sourcerepo/get_repository.py +24 -5
  337. pulumi_gcp/sourcerepo/get_repository_iam_policy.py +11 -4
  338. pulumi_gcp/sourcerepo/repository.py +47 -0
  339. pulumi_gcp/spanner/backup_schedule.py +4 -2
  340. pulumi_gcp/spanner/get_database_iam_policy.py +13 -4
  341. pulumi_gcp/spanner/get_instance.py +22 -4
  342. pulumi_gcp/spanner/get_instance_iam_policy.py +11 -4
  343. pulumi_gcp/sql/get_backup_run.py +16 -4
  344. pulumi_gcp/sql/get_ca_certs.py +11 -4
  345. pulumi_gcp/sql/get_database.py +15 -4
  346. pulumi_gcp/sql/get_database_instance.py +32 -4
  347. pulumi_gcp/sql/get_database_instance_latest_recovery_time.py +10 -4
  348. pulumi_gcp/sql/get_database_instances.py +18 -4
  349. pulumi_gcp/sql/get_databases.py +10 -4
  350. pulumi_gcp/sql/get_tiers.py +8 -4
  351. pulumi_gcp/storage/get_bucket.py +34 -4
  352. pulumi_gcp/storage/get_bucket_iam_policy.py +9 -4
  353. pulumi_gcp/storage/get_bucket_object.py +30 -4
  354. pulumi_gcp/storage/get_bucket_object_content.py +31 -4
  355. pulumi_gcp/storage/get_bucket_objects.py +12 -4
  356. pulumi_gcp/storage/get_buckets.py +10 -4
  357. pulumi_gcp/storage/get_managed_folder_iam_policy.py +11 -4
  358. pulumi_gcp/storage/get_object_signed_url.py +22 -4
  359. pulumi_gcp/storage/get_project_service_account.py +11 -4
  360. pulumi_gcp/storage/get_transfer_project_service_account.py +10 -4
  361. pulumi_gcp/storage/get_transfer_project_servie_account.py +10 -4
  362. pulumi_gcp/tags/get_tag_key.py +14 -4
  363. pulumi_gcp/tags/get_tag_key_iam_policy.py +9 -4
  364. pulumi_gcp/tags/get_tag_keys.py +8 -4
  365. pulumi_gcp/tags/get_tag_value.py +14 -4
  366. pulumi_gcp/tags/get_tag_value_iam_policy.py +9 -4
  367. pulumi_gcp/tags/get_tag_values.py +8 -4
  368. pulumi_gcp/tags/tag_key.py +7 -7
  369. pulumi_gcp/tags/tag_value.py +7 -7
  370. pulumi_gcp/tpu/get_tensorflow_versions.py +10 -4
  371. pulumi_gcp/tpu/get_v2_accelerator_types.py +10 -4
  372. pulumi_gcp/tpu/get_v2_runtime_versions.py +10 -4
  373. pulumi_gcp/vertex/ai_feature_online_store_featureview.py +4 -4
  374. pulumi_gcp/vertex/get_ai_endpoint_iam_policy.py +13 -4
  375. pulumi_gcp/vertex/get_ai_featurestore_entitytype_iam_policy.py +11 -4
  376. pulumi_gcp/vertex/get_ai_featurestore_iam_policy.py +13 -4
  377. pulumi_gcp/vertex/get_ai_index.py +24 -4
  378. pulumi_gcp/vmwareengine/get_cluster.py +13 -4
  379. pulumi_gcp/vmwareengine/get_external_access_rule.py +21 -4
  380. pulumi_gcp/vmwareengine/get_external_address.py +16 -4
  381. pulumi_gcp/vmwareengine/get_network.py +16 -4
  382. pulumi_gcp/vmwareengine/get_network_peering.py +23 -4
  383. pulumi_gcp/vmwareengine/get_network_policy.py +20 -4
  384. pulumi_gcp/vmwareengine/get_nsx_credentials.py +9 -4
  385. pulumi_gcp/vmwareengine/get_private_cloud.py +22 -4
  386. pulumi_gcp/vmwareengine/get_subnet.py +20 -4
  387. pulumi_gcp/vmwareengine/get_vcenter_credentials.py +9 -4
  388. pulumi_gcp/vpcaccess/get_connector.py +22 -4
  389. pulumi_gcp/workbench/get_instance_iam_policy.py +13 -4
  390. pulumi_gcp/workstations/get_workstation_config_iam_policy.py +15 -4
  391. pulumi_gcp/workstations/get_workstation_iam_policy.py +17 -4
  392. {pulumi_gcp-8.5.0a1728368389.dist-info → pulumi_gcp-8.6.0.dist-info}/METADATA +2 -2
  393. {pulumi_gcp-8.5.0a1728368389.dist-info → pulumi_gcp-8.6.0.dist-info}/RECORD +395 -391
  394. {pulumi_gcp-8.5.0a1728368389.dist-info → pulumi_gcp-8.6.0.dist-info}/WHEEL +0 -0
  395. {pulumi_gcp-8.5.0a1728368389.dist-info → pulumi_gcp-8.6.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1514 @@
1
+ # coding=utf-8
2
+ # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ # *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+
5
+ import copy
6
+ import warnings
7
+ import sys
8
+ import pulumi
9
+ import pulumi.runtime
10
+ from typing import Any, Mapping, Optional, Sequence, Union, overload
11
+ if sys.version_info >= (3, 11):
12
+ from typing import NotRequired, TypedDict, TypeAlias
13
+ else:
14
+ from typing_extensions import NotRequired, TypedDict, TypeAlias
15
+ from .. import _utilities
16
+ from . import outputs
17
+ from ._inputs import *
18
+
19
+ __all__ = ['BatchArgs', 'Batch']
20
+
21
+ @pulumi.input_type
22
+ class BatchArgs:
23
+ def __init__(__self__, *,
24
+ batch_id: Optional[pulumi.Input[str]] = None,
25
+ environment_config: Optional[pulumi.Input['BatchEnvironmentConfigArgs']] = None,
26
+ labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
27
+ location: Optional[pulumi.Input[str]] = None,
28
+ project: Optional[pulumi.Input[str]] = None,
29
+ pyspark_batch: Optional[pulumi.Input['BatchPysparkBatchArgs']] = None,
30
+ runtime_config: Optional[pulumi.Input['BatchRuntimeConfigArgs']] = None,
31
+ spark_batch: Optional[pulumi.Input['BatchSparkBatchArgs']] = None,
32
+ spark_r_batch: Optional[pulumi.Input['BatchSparkRBatchArgs']] = None,
33
+ spark_sql_batch: Optional[pulumi.Input['BatchSparkSqlBatchArgs']] = None):
34
+ """
35
+ The set of arguments for constructing a Batch resource.
36
+ :param pulumi.Input[str] batch_id: The ID to use for the batch, which will become the final component of the batch's resource name.
37
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
38
+ :param pulumi.Input['BatchEnvironmentConfigArgs'] environment_config: Environment configuration for the batch execution.
39
+ Structure is documented below.
40
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: The labels to associate with this batch.
41
+
42
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
43
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
44
+ :param pulumi.Input[str] location: The location in which the batch will be created in.
45
+ :param pulumi.Input[str] project: The ID of the project in which the resource belongs.
46
+ If it is not provided, the provider project is used.
47
+ :param pulumi.Input['BatchPysparkBatchArgs'] pyspark_batch: PySpark batch config.
48
+ Structure is documented below.
49
+ :param pulumi.Input['BatchRuntimeConfigArgs'] runtime_config: Runtime configuration for the batch execution.
50
+ Structure is documented below.
51
+ :param pulumi.Input['BatchSparkBatchArgs'] spark_batch: Spark batch config.
52
+ Structure is documented below.
53
+ :param pulumi.Input['BatchSparkRBatchArgs'] spark_r_batch: SparkR batch config.
54
+ Structure is documented below.
55
+ :param pulumi.Input['BatchSparkSqlBatchArgs'] spark_sql_batch: Spark SQL batch config.
56
+ Structure is documented below.
57
+ """
58
+ if batch_id is not None:
59
+ pulumi.set(__self__, "batch_id", batch_id)
60
+ if environment_config is not None:
61
+ pulumi.set(__self__, "environment_config", environment_config)
62
+ if labels is not None:
63
+ pulumi.set(__self__, "labels", labels)
64
+ if location is not None:
65
+ pulumi.set(__self__, "location", location)
66
+ if project is not None:
67
+ pulumi.set(__self__, "project", project)
68
+ if pyspark_batch is not None:
69
+ pulumi.set(__self__, "pyspark_batch", pyspark_batch)
70
+ if runtime_config is not None:
71
+ pulumi.set(__self__, "runtime_config", runtime_config)
72
+ if spark_batch is not None:
73
+ pulumi.set(__self__, "spark_batch", spark_batch)
74
+ if spark_r_batch is not None:
75
+ pulumi.set(__self__, "spark_r_batch", spark_r_batch)
76
+ if spark_sql_batch is not None:
77
+ pulumi.set(__self__, "spark_sql_batch", spark_sql_batch)
78
+
79
+ @property
80
+ @pulumi.getter(name="batchId")
81
+ def batch_id(self) -> Optional[pulumi.Input[str]]:
82
+ """
83
+ The ID to use for the batch, which will become the final component of the batch's resource name.
84
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
85
+ """
86
+ return pulumi.get(self, "batch_id")
87
+
88
+ @batch_id.setter
89
+ def batch_id(self, value: Optional[pulumi.Input[str]]):
90
+ pulumi.set(self, "batch_id", value)
91
+
92
+ @property
93
+ @pulumi.getter(name="environmentConfig")
94
+ def environment_config(self) -> Optional[pulumi.Input['BatchEnvironmentConfigArgs']]:
95
+ """
96
+ Environment configuration for the batch execution.
97
+ Structure is documented below.
98
+ """
99
+ return pulumi.get(self, "environment_config")
100
+
101
+ @environment_config.setter
102
+ def environment_config(self, value: Optional[pulumi.Input['BatchEnvironmentConfigArgs']]):
103
+ pulumi.set(self, "environment_config", value)
104
+
105
+ @property
106
+ @pulumi.getter
107
+ def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
108
+ """
109
+ The labels to associate with this batch.
110
+
111
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
112
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
113
+ """
114
+ return pulumi.get(self, "labels")
115
+
116
+ @labels.setter
117
+ def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
118
+ pulumi.set(self, "labels", value)
119
+
120
+ @property
121
+ @pulumi.getter
122
+ def location(self) -> Optional[pulumi.Input[str]]:
123
+ """
124
+ The location in which the batch will be created in.
125
+ """
126
+ return pulumi.get(self, "location")
127
+
128
+ @location.setter
129
+ def location(self, value: Optional[pulumi.Input[str]]):
130
+ pulumi.set(self, "location", value)
131
+
132
+ @property
133
+ @pulumi.getter
134
+ def project(self) -> Optional[pulumi.Input[str]]:
135
+ """
136
+ The ID of the project in which the resource belongs.
137
+ If it is not provided, the provider project is used.
138
+ """
139
+ return pulumi.get(self, "project")
140
+
141
+ @project.setter
142
+ def project(self, value: Optional[pulumi.Input[str]]):
143
+ pulumi.set(self, "project", value)
144
+
145
+ @property
146
+ @pulumi.getter(name="pysparkBatch")
147
+ def pyspark_batch(self) -> Optional[pulumi.Input['BatchPysparkBatchArgs']]:
148
+ """
149
+ PySpark batch config.
150
+ Structure is documented below.
151
+ """
152
+ return pulumi.get(self, "pyspark_batch")
153
+
154
+ @pyspark_batch.setter
155
+ def pyspark_batch(self, value: Optional[pulumi.Input['BatchPysparkBatchArgs']]):
156
+ pulumi.set(self, "pyspark_batch", value)
157
+
158
+ @property
159
+ @pulumi.getter(name="runtimeConfig")
160
+ def runtime_config(self) -> Optional[pulumi.Input['BatchRuntimeConfigArgs']]:
161
+ """
162
+ Runtime configuration for the batch execution.
163
+ Structure is documented below.
164
+ """
165
+ return pulumi.get(self, "runtime_config")
166
+
167
+ @runtime_config.setter
168
+ def runtime_config(self, value: Optional[pulumi.Input['BatchRuntimeConfigArgs']]):
169
+ pulumi.set(self, "runtime_config", value)
170
+
171
+ @property
172
+ @pulumi.getter(name="sparkBatch")
173
+ def spark_batch(self) -> Optional[pulumi.Input['BatchSparkBatchArgs']]:
174
+ """
175
+ Spark batch config.
176
+ Structure is documented below.
177
+ """
178
+ return pulumi.get(self, "spark_batch")
179
+
180
+ @spark_batch.setter
181
+ def spark_batch(self, value: Optional[pulumi.Input['BatchSparkBatchArgs']]):
182
+ pulumi.set(self, "spark_batch", value)
183
+
184
+ @property
185
+ @pulumi.getter(name="sparkRBatch")
186
+ def spark_r_batch(self) -> Optional[pulumi.Input['BatchSparkRBatchArgs']]:
187
+ """
188
+ SparkR batch config.
189
+ Structure is documented below.
190
+ """
191
+ return pulumi.get(self, "spark_r_batch")
192
+
193
+ @spark_r_batch.setter
194
+ def spark_r_batch(self, value: Optional[pulumi.Input['BatchSparkRBatchArgs']]):
195
+ pulumi.set(self, "spark_r_batch", value)
196
+
197
+ @property
198
+ @pulumi.getter(name="sparkSqlBatch")
199
+ def spark_sql_batch(self) -> Optional[pulumi.Input['BatchSparkSqlBatchArgs']]:
200
+ """
201
+ Spark SQL batch config.
202
+ Structure is documented below.
203
+ """
204
+ return pulumi.get(self, "spark_sql_batch")
205
+
206
+ @spark_sql_batch.setter
207
+ def spark_sql_batch(self, value: Optional[pulumi.Input['BatchSparkSqlBatchArgs']]):
208
+ pulumi.set(self, "spark_sql_batch", value)
209
+
210
+
211
+ @pulumi.input_type
212
+ class _BatchState:
213
+ def __init__(__self__, *,
214
+ batch_id: Optional[pulumi.Input[str]] = None,
215
+ create_time: Optional[pulumi.Input[str]] = None,
216
+ creator: Optional[pulumi.Input[str]] = None,
217
+ effective_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
218
+ environment_config: Optional[pulumi.Input['BatchEnvironmentConfigArgs']] = None,
219
+ labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
220
+ location: Optional[pulumi.Input[str]] = None,
221
+ name: Optional[pulumi.Input[str]] = None,
222
+ operation: Optional[pulumi.Input[str]] = None,
223
+ project: Optional[pulumi.Input[str]] = None,
224
+ pulumi_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
225
+ pyspark_batch: Optional[pulumi.Input['BatchPysparkBatchArgs']] = None,
226
+ runtime_config: Optional[pulumi.Input['BatchRuntimeConfigArgs']] = None,
227
+ runtime_infos: Optional[pulumi.Input[Sequence[pulumi.Input['BatchRuntimeInfoArgs']]]] = None,
228
+ spark_batch: Optional[pulumi.Input['BatchSparkBatchArgs']] = None,
229
+ spark_r_batch: Optional[pulumi.Input['BatchSparkRBatchArgs']] = None,
230
+ spark_sql_batch: Optional[pulumi.Input['BatchSparkSqlBatchArgs']] = None,
231
+ state: Optional[pulumi.Input[str]] = None,
232
+ state_histories: Optional[pulumi.Input[Sequence[pulumi.Input['BatchStateHistoryArgs']]]] = None,
233
+ state_message: Optional[pulumi.Input[str]] = None,
234
+ state_time: Optional[pulumi.Input[str]] = None,
235
+ uuid: Optional[pulumi.Input[str]] = None):
236
+ """
237
+ Input properties used for looking up and filtering Batch resources.
238
+ :param pulumi.Input[str] batch_id: The ID to use for the batch, which will become the final component of the batch's resource name.
239
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
240
+ :param pulumi.Input[str] create_time: The time when the batch was created.
241
+ :param pulumi.Input[str] creator: The email address of the user who created the batch.
242
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] effective_labels: All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
243
+ :param pulumi.Input['BatchEnvironmentConfigArgs'] environment_config: Environment configuration for the batch execution.
244
+ Structure is documented below.
245
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: The labels to associate with this batch.
246
+
247
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
248
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
249
+ :param pulumi.Input[str] location: The location in which the batch will be created in.
250
+ :param pulumi.Input[str] name: The resource name of the batch.
251
+ :param pulumi.Input[str] operation: The resource name of the operation associated with this batch.
252
+ :param pulumi.Input[str] project: The ID of the project in which the resource belongs.
253
+ If it is not provided, the provider project is used.
254
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] pulumi_labels: The combination of labels configured directly on the resource
255
+ and default labels configured on the provider.
256
+ :param pulumi.Input['BatchPysparkBatchArgs'] pyspark_batch: PySpark batch config.
257
+ Structure is documented below.
258
+ :param pulumi.Input['BatchRuntimeConfigArgs'] runtime_config: Runtime configuration for the batch execution.
259
+ Structure is documented below.
260
+ :param pulumi.Input[Sequence[pulumi.Input['BatchRuntimeInfoArgs']]] runtime_infos: Runtime information about batch execution.
261
+ Structure is documented below.
262
+ :param pulumi.Input['BatchSparkBatchArgs'] spark_batch: Spark batch config.
263
+ Structure is documented below.
264
+ :param pulumi.Input['BatchSparkRBatchArgs'] spark_r_batch: SparkR batch config.
265
+ Structure is documented below.
266
+ :param pulumi.Input['BatchSparkSqlBatchArgs'] spark_sql_batch: Spark SQL batch config.
267
+ Structure is documented below.
268
+ :param pulumi.Input[str] state: (Output)
269
+ The state of the batch at this point in history. For possible values, see the [API documentation](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches#State).
270
+ :param pulumi.Input[Sequence[pulumi.Input['BatchStateHistoryArgs']]] state_histories: Historical state information for the batch.
271
+ Structure is documented below.
272
+ :param pulumi.Input[str] state_message: (Output)
273
+ Details about the state at this point in history.
274
+ :param pulumi.Input[str] state_time: Batch state details, such as a failure description if the state is FAILED.
275
+ :param pulumi.Input[str] uuid: A batch UUID (Unique Universal Identifier). The service generates this value when it creates the batch.
276
+ """
277
+ if batch_id is not None:
278
+ pulumi.set(__self__, "batch_id", batch_id)
279
+ if create_time is not None:
280
+ pulumi.set(__self__, "create_time", create_time)
281
+ if creator is not None:
282
+ pulumi.set(__self__, "creator", creator)
283
+ if effective_labels is not None:
284
+ pulumi.set(__self__, "effective_labels", effective_labels)
285
+ if environment_config is not None:
286
+ pulumi.set(__self__, "environment_config", environment_config)
287
+ if labels is not None:
288
+ pulumi.set(__self__, "labels", labels)
289
+ if location is not None:
290
+ pulumi.set(__self__, "location", location)
291
+ if name is not None:
292
+ pulumi.set(__self__, "name", name)
293
+ if operation is not None:
294
+ pulumi.set(__self__, "operation", operation)
295
+ if project is not None:
296
+ pulumi.set(__self__, "project", project)
297
+ if pulumi_labels is not None:
298
+ pulumi.set(__self__, "pulumi_labels", pulumi_labels)
299
+ if pyspark_batch is not None:
300
+ pulumi.set(__self__, "pyspark_batch", pyspark_batch)
301
+ if runtime_config is not None:
302
+ pulumi.set(__self__, "runtime_config", runtime_config)
303
+ if runtime_infos is not None:
304
+ pulumi.set(__self__, "runtime_infos", runtime_infos)
305
+ if spark_batch is not None:
306
+ pulumi.set(__self__, "spark_batch", spark_batch)
307
+ if spark_r_batch is not None:
308
+ pulumi.set(__self__, "spark_r_batch", spark_r_batch)
309
+ if spark_sql_batch is not None:
310
+ pulumi.set(__self__, "spark_sql_batch", spark_sql_batch)
311
+ if state is not None:
312
+ pulumi.set(__self__, "state", state)
313
+ if state_histories is not None:
314
+ pulumi.set(__self__, "state_histories", state_histories)
315
+ if state_message is not None:
316
+ pulumi.set(__self__, "state_message", state_message)
317
+ if state_time is not None:
318
+ pulumi.set(__self__, "state_time", state_time)
319
+ if uuid is not None:
320
+ pulumi.set(__self__, "uuid", uuid)
321
+
322
+ @property
323
+ @pulumi.getter(name="batchId")
324
+ def batch_id(self) -> Optional[pulumi.Input[str]]:
325
+ """
326
+ The ID to use for the batch, which will become the final component of the batch's resource name.
327
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
328
+ """
329
+ return pulumi.get(self, "batch_id")
330
+
331
+ @batch_id.setter
332
+ def batch_id(self, value: Optional[pulumi.Input[str]]):
333
+ pulumi.set(self, "batch_id", value)
334
+
335
+ @property
336
+ @pulumi.getter(name="createTime")
337
+ def create_time(self) -> Optional[pulumi.Input[str]]:
338
+ """
339
+ The time when the batch was created.
340
+ """
341
+ return pulumi.get(self, "create_time")
342
+
343
+ @create_time.setter
344
+ def create_time(self, value: Optional[pulumi.Input[str]]):
345
+ pulumi.set(self, "create_time", value)
346
+
347
+ @property
348
+ @pulumi.getter
349
+ def creator(self) -> Optional[pulumi.Input[str]]:
350
+ """
351
+ The email address of the user who created the batch.
352
+ """
353
+ return pulumi.get(self, "creator")
354
+
355
+ @creator.setter
356
+ def creator(self, value: Optional[pulumi.Input[str]]):
357
+ pulumi.set(self, "creator", value)
358
+
359
+ @property
360
+ @pulumi.getter(name="effectiveLabels")
361
+ def effective_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
362
+ """
363
+ All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
364
+ """
365
+ return pulumi.get(self, "effective_labels")
366
+
367
+ @effective_labels.setter
368
+ def effective_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
369
+ pulumi.set(self, "effective_labels", value)
370
+
371
+ @property
372
+ @pulumi.getter(name="environmentConfig")
373
+ def environment_config(self) -> Optional[pulumi.Input['BatchEnvironmentConfigArgs']]:
374
+ """
375
+ Environment configuration for the batch execution.
376
+ Structure is documented below.
377
+ """
378
+ return pulumi.get(self, "environment_config")
379
+
380
+ @environment_config.setter
381
+ def environment_config(self, value: Optional[pulumi.Input['BatchEnvironmentConfigArgs']]):
382
+ pulumi.set(self, "environment_config", value)
383
+
384
+ @property
385
+ @pulumi.getter
386
+ def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
387
+ """
388
+ The labels to associate with this batch.
389
+
390
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
391
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
392
+ """
393
+ return pulumi.get(self, "labels")
394
+
395
+ @labels.setter
396
+ def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
397
+ pulumi.set(self, "labels", value)
398
+
399
+ @property
400
+ @pulumi.getter
401
+ def location(self) -> Optional[pulumi.Input[str]]:
402
+ """
403
+ The location in which the batch will be created in.
404
+ """
405
+ return pulumi.get(self, "location")
406
+
407
+ @location.setter
408
+ def location(self, value: Optional[pulumi.Input[str]]):
409
+ pulumi.set(self, "location", value)
410
+
411
+ @property
412
+ @pulumi.getter
413
+ def name(self) -> Optional[pulumi.Input[str]]:
414
+ """
415
+ The resource name of the batch.
416
+ """
417
+ return pulumi.get(self, "name")
418
+
419
+ @name.setter
420
+ def name(self, value: Optional[pulumi.Input[str]]):
421
+ pulumi.set(self, "name", value)
422
+
423
+ @property
424
+ @pulumi.getter
425
+ def operation(self) -> Optional[pulumi.Input[str]]:
426
+ """
427
+ The resource name of the operation associated with this batch.
428
+ """
429
+ return pulumi.get(self, "operation")
430
+
431
+ @operation.setter
432
+ def operation(self, value: Optional[pulumi.Input[str]]):
433
+ pulumi.set(self, "operation", value)
434
+
435
+ @property
436
+ @pulumi.getter
437
+ def project(self) -> Optional[pulumi.Input[str]]:
438
+ """
439
+ The ID of the project in which the resource belongs.
440
+ If it is not provided, the provider project is used.
441
+ """
442
+ return pulumi.get(self, "project")
443
+
444
+ @project.setter
445
+ def project(self, value: Optional[pulumi.Input[str]]):
446
+ pulumi.set(self, "project", value)
447
+
448
+ @property
449
+ @pulumi.getter(name="pulumiLabels")
450
+ def pulumi_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
451
+ """
452
+ The combination of labels configured directly on the resource
453
+ and default labels configured on the provider.
454
+ """
455
+ return pulumi.get(self, "pulumi_labels")
456
+
457
+ @pulumi_labels.setter
458
+ def pulumi_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
459
+ pulumi.set(self, "pulumi_labels", value)
460
+
461
+ @property
462
+ @pulumi.getter(name="pysparkBatch")
463
+ def pyspark_batch(self) -> Optional[pulumi.Input['BatchPysparkBatchArgs']]:
464
+ """
465
+ PySpark batch config.
466
+ Structure is documented below.
467
+ """
468
+ return pulumi.get(self, "pyspark_batch")
469
+
470
+ @pyspark_batch.setter
471
+ def pyspark_batch(self, value: Optional[pulumi.Input['BatchPysparkBatchArgs']]):
472
+ pulumi.set(self, "pyspark_batch", value)
473
+
474
+ @property
475
+ @pulumi.getter(name="runtimeConfig")
476
+ def runtime_config(self) -> Optional[pulumi.Input['BatchRuntimeConfigArgs']]:
477
+ """
478
+ Runtime configuration for the batch execution.
479
+ Structure is documented below.
480
+ """
481
+ return pulumi.get(self, "runtime_config")
482
+
483
+ @runtime_config.setter
484
+ def runtime_config(self, value: Optional[pulumi.Input['BatchRuntimeConfigArgs']]):
485
+ pulumi.set(self, "runtime_config", value)
486
+
487
+ @property
488
+ @pulumi.getter(name="runtimeInfos")
489
+ def runtime_infos(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BatchRuntimeInfoArgs']]]]:
490
+ """
491
+ Runtime information about batch execution.
492
+ Structure is documented below.
493
+ """
494
+ return pulumi.get(self, "runtime_infos")
495
+
496
+ @runtime_infos.setter
497
+ def runtime_infos(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BatchRuntimeInfoArgs']]]]):
498
+ pulumi.set(self, "runtime_infos", value)
499
+
500
+ @property
501
+ @pulumi.getter(name="sparkBatch")
502
+ def spark_batch(self) -> Optional[pulumi.Input['BatchSparkBatchArgs']]:
503
+ """
504
+ Spark batch config.
505
+ Structure is documented below.
506
+ """
507
+ return pulumi.get(self, "spark_batch")
508
+
509
+ @spark_batch.setter
510
+ def spark_batch(self, value: Optional[pulumi.Input['BatchSparkBatchArgs']]):
511
+ pulumi.set(self, "spark_batch", value)
512
+
513
+ @property
514
+ @pulumi.getter(name="sparkRBatch")
515
+ def spark_r_batch(self) -> Optional[pulumi.Input['BatchSparkRBatchArgs']]:
516
+ """
517
+ SparkR batch config.
518
+ Structure is documented below.
519
+ """
520
+ return pulumi.get(self, "spark_r_batch")
521
+
522
+ @spark_r_batch.setter
523
+ def spark_r_batch(self, value: Optional[pulumi.Input['BatchSparkRBatchArgs']]):
524
+ pulumi.set(self, "spark_r_batch", value)
525
+
526
+ @property
527
+ @pulumi.getter(name="sparkSqlBatch")
528
+ def spark_sql_batch(self) -> Optional[pulumi.Input['BatchSparkSqlBatchArgs']]:
529
+ """
530
+ Spark SQL batch config.
531
+ Structure is documented below.
532
+ """
533
+ return pulumi.get(self, "spark_sql_batch")
534
+
535
+ @spark_sql_batch.setter
536
+ def spark_sql_batch(self, value: Optional[pulumi.Input['BatchSparkSqlBatchArgs']]):
537
+ pulumi.set(self, "spark_sql_batch", value)
538
+
539
+ @property
540
+ @pulumi.getter
541
+ def state(self) -> Optional[pulumi.Input[str]]:
542
+ """
543
+ (Output)
544
+ The state of the batch at this point in history. For possible values, see the [API documentation](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches#State).
545
+ """
546
+ return pulumi.get(self, "state")
547
+
548
+ @state.setter
549
+ def state(self, value: Optional[pulumi.Input[str]]):
550
+ pulumi.set(self, "state", value)
551
+
552
+ @property
553
+ @pulumi.getter(name="stateHistories")
554
+ def state_histories(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BatchStateHistoryArgs']]]]:
555
+ """
556
+ Historical state information for the batch.
557
+ Structure is documented below.
558
+ """
559
+ return pulumi.get(self, "state_histories")
560
+
561
+ @state_histories.setter
562
+ def state_histories(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BatchStateHistoryArgs']]]]):
563
+ pulumi.set(self, "state_histories", value)
564
+
565
+ @property
566
+ @pulumi.getter(name="stateMessage")
567
+ def state_message(self) -> Optional[pulumi.Input[str]]:
568
+ """
569
+ (Output)
570
+ Details about the state at this point in history.
571
+ """
572
+ return pulumi.get(self, "state_message")
573
+
574
+ @state_message.setter
575
+ def state_message(self, value: Optional[pulumi.Input[str]]):
576
+ pulumi.set(self, "state_message", value)
577
+
578
+ @property
579
+ @pulumi.getter(name="stateTime")
580
+ def state_time(self) -> Optional[pulumi.Input[str]]:
581
+ """
582
+ Batch state details, such as a failure description if the state is FAILED.
583
+ """
584
+ return pulumi.get(self, "state_time")
585
+
586
+ @state_time.setter
587
+ def state_time(self, value: Optional[pulumi.Input[str]]):
588
+ pulumi.set(self, "state_time", value)
589
+
590
+ @property
591
+ @pulumi.getter
592
+ def uuid(self) -> Optional[pulumi.Input[str]]:
593
+ """
594
+ A batch UUID (Unique Universal Identifier). The service generates this value when it creates the batch.
595
+ """
596
+ return pulumi.get(self, "uuid")
597
+
598
+ @uuid.setter
599
+ def uuid(self, value: Optional[pulumi.Input[str]]):
600
+ pulumi.set(self, "uuid", value)
601
+
602
+
603
+ class Batch(pulumi.CustomResource):
604
+ @overload
605
+ def __init__(__self__,
606
+ resource_name: str,
607
+ opts: Optional[pulumi.ResourceOptions] = None,
608
+ batch_id: Optional[pulumi.Input[str]] = None,
609
+ environment_config: Optional[pulumi.Input[Union['BatchEnvironmentConfigArgs', 'BatchEnvironmentConfigArgsDict']]] = None,
610
+ labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
611
+ location: Optional[pulumi.Input[str]] = None,
612
+ project: Optional[pulumi.Input[str]] = None,
613
+ pyspark_batch: Optional[pulumi.Input[Union['BatchPysparkBatchArgs', 'BatchPysparkBatchArgsDict']]] = None,
614
+ runtime_config: Optional[pulumi.Input[Union['BatchRuntimeConfigArgs', 'BatchRuntimeConfigArgsDict']]] = None,
615
+ spark_batch: Optional[pulumi.Input[Union['BatchSparkBatchArgs', 'BatchSparkBatchArgsDict']]] = None,
616
+ spark_r_batch: Optional[pulumi.Input[Union['BatchSparkRBatchArgs', 'BatchSparkRBatchArgsDict']]] = None,
617
+ spark_sql_batch: Optional[pulumi.Input[Union['BatchSparkSqlBatchArgs', 'BatchSparkSqlBatchArgsDict']]] = None,
618
+ __props__=None):
619
+ """
620
+ Dataproc Serverless Batches lets you run Spark workloads without requiring you to
621
+ provision and manage your own Dataproc cluster.
622
+
623
+ To get more information about Batch, see:
624
+
625
+ * [API documentation](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches)
626
+ * How-to Guides
627
+ * [Dataproc Serverless Batches Intro](https://cloud.google.com/dataproc-serverless/docs/overview)
628
+
629
+ ## Example Usage
630
+
631
+ ### Dataproc Batch Spark
632
+
633
+ ```python
634
+ import pulumi
635
+ import pulumi_gcp as gcp
636
+
637
+ example_batch_spark = gcp.dataproc.Batch("example_batch_spark",
638
+ batch_id="tf-test-batch_75125",
639
+ location="us-central1",
640
+ labels={
641
+ "batch_test": "terraform",
642
+ },
643
+ runtime_config={
644
+ "properties": {
645
+ "spark_dynamic_allocation_enabled": "false",
646
+ "spark_executor_instances": "2",
647
+ },
648
+ },
649
+ environment_config={
650
+ "execution_config": {
651
+ "subnetwork_uri": "default",
652
+ "ttl": "3600s",
653
+ "network_tags": ["tag1"],
654
+ },
655
+ },
656
+ spark_batch={
657
+ "main_class": "org.apache.spark.examples.SparkPi",
658
+ "args": ["10"],
659
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
660
+ })
661
+ ```
662
+ ### Dataproc Batch Spark Full
663
+
664
+ ```python
665
+ import pulumi
666
+ import pulumi_gcp as gcp
667
+
668
+ project = gcp.organizations.get_project()
669
+ gcs_account = gcp.storage.get_project_service_account()
670
+ bucket = gcp.storage.Bucket("bucket",
671
+ uniform_bucket_level_access=True,
672
+ name="dataproc-bucket",
673
+ location="US",
674
+ force_destroy=True)
675
+ key_ring = gcp.kms.KeyRing("key_ring",
676
+ name="example-keyring",
677
+ location="us-central1")
678
+ crypto_key = gcp.kms.CryptoKey("crypto_key",
679
+ name="example-key",
680
+ key_ring=key_ring.id,
681
+ purpose="ENCRYPT_DECRYPT")
682
+ crypto_key_member1 = gcp.kms.CryptoKeyIAMMember("crypto_key_member_1",
683
+ crypto_key_id=crypto_key.id,
684
+ role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
685
+ member=f"serviceAccount:service-{project.number}@dataproc-accounts.iam.gserviceaccount.com")
686
+ ms = gcp.dataproc.MetastoreService("ms",
687
+ service_id="dataproc-batch",
688
+ location="us-central1",
689
+ port=9080,
690
+ tier="DEVELOPER",
691
+ maintenance_window={
692
+ "hour_of_day": 2,
693
+ "day_of_week": "SUNDAY",
694
+ },
695
+ hive_metastore_config={
696
+ "version": "3.1.2",
697
+ })
698
+ basic = gcp.dataproc.Cluster("basic",
699
+ name="dataproc-batch",
700
+ region="us-central1",
701
+ cluster_config={
702
+ "software_config": {
703
+ "override_properties": {
704
+ "dataproc_dataproc_allow_zero_workers": "true",
705
+ "spark_spark_history_fs_log_directory": bucket.name.apply(lambda name: f"gs://{name}/*/spark-job-history"),
706
+ },
707
+ },
708
+ "endpoint_config": {
709
+ "enable_http_port_access": True,
710
+ },
711
+ "master_config": {
712
+ "num_instances": 1,
713
+ "machine_type": "e2-standard-2",
714
+ "disk_config": {
715
+ "boot_disk_size_gb": 35,
716
+ },
717
+ },
718
+ "metastore_config": {
719
+ "dataproc_metastore_service": ms.name,
720
+ },
721
+ })
722
+ example_batch_spark = gcp.dataproc.Batch("example_batch_spark",
723
+ batch_id="dataproc-batch",
724
+ location="us-central1",
725
+ labels={
726
+ "batch_test": "terraform",
727
+ },
728
+ runtime_config={
729
+ "properties": {
730
+ "spark_dynamic_allocation_enabled": "false",
731
+ "spark_executor_instances": "2",
732
+ },
733
+ "version": "2.2",
734
+ },
735
+ environment_config={
736
+ "execution_config": {
737
+ "ttl": "3600s",
738
+ "network_tags": ["tag1"],
739
+ "kms_key": crypto_key.id,
740
+ "network_uri": "default",
741
+ "service_account": f"{project.number}-compute@developer.gserviceaccount.com",
742
+ "staging_bucket": bucket.name,
743
+ },
744
+ "peripherals_config": {
745
+ "metastore_service": ms.name,
746
+ "spark_history_server_config": {
747
+ "dataproc_cluster": basic.id,
748
+ },
749
+ },
750
+ },
751
+ spark_batch={
752
+ "main_class": "org.apache.spark.examples.SparkPi",
753
+ "args": ["10"],
754
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
755
+ },
756
+ opts = pulumi.ResourceOptions(depends_on=[crypto_key_member1]))
757
+ ```
758
+ ### Dataproc Batch Sparksql
759
+
760
+ ```python
761
+ import pulumi
762
+ import pulumi_gcp as gcp
763
+
764
+ example_batch_sparsql = gcp.dataproc.Batch("example_batch_sparsql",
765
+ batch_id="tf-test-batch_88722",
766
+ location="us-central1",
767
+ runtime_config={
768
+ "properties": {
769
+ "spark_dynamic_allocation_enabled": "false",
770
+ "spark_executor_instances": "2",
771
+ },
772
+ },
773
+ environment_config={
774
+ "execution_config": {
775
+ "subnetwork_uri": "default",
776
+ },
777
+ },
778
+ spark_sql_batch={
779
+ "query_file_uri": "gs://dataproc-examples/spark-sql/natality/cigarette_correlations.sql",
780
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
781
+ "query_variables": {
782
+ "name": "value",
783
+ },
784
+ })
785
+ ```
786
+ ### Dataproc Batch Pyspark
787
+
788
+ ```python
789
+ import pulumi
790
+ import pulumi_gcp as gcp
791
+
792
+ example_batch_pyspark = gcp.dataproc.Batch("example_batch_pyspark",
793
+ batch_id="tf-test-batch_39249",
794
+ location="us-central1",
795
+ runtime_config={
796
+ "properties": {
797
+ "spark_dynamic_allocation_enabled": "false",
798
+ "spark_executor_instances": "2",
799
+ },
800
+ },
801
+ environment_config={
802
+ "execution_config": {
803
+ "subnetwork_uri": "default",
804
+ },
805
+ },
806
+ pyspark_batch={
807
+ "main_python_file_uri": "https://storage.googleapis.com/terraform-batches/test_util.py",
808
+ "args": ["10"],
809
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
810
+ "python_file_uris": ["gs://dataproc-examples/pyspark/hello-world/hello-world.py"],
811
+ "archive_uris": [
812
+ "https://storage.googleapis.com/terraform-batches/animals.txt.tar.gz#unpacked",
813
+ "https://storage.googleapis.com/terraform-batches/animals.txt.jar",
814
+ "https://storage.googleapis.com/terraform-batches/animals.txt",
815
+ ],
816
+ "file_uris": ["https://storage.googleapis.com/terraform-batches/people.txt"],
817
+ })
818
+ ```
819
+ ### Dataproc Batch Sparkr
820
+
821
+ ```python
822
+ import pulumi
823
+ import pulumi_gcp as gcp
824
+
825
+ example_batch_sparkr = gcp.dataproc.Batch("example_batch_sparkr",
826
+ batch_id="tf-test-batch_74391",
827
+ location="us-central1",
828
+ labels={
829
+ "batch_test": "terraform",
830
+ },
831
+ runtime_config={
832
+ "properties": {
833
+ "spark_dynamic_allocation_enabled": "false",
834
+ "spark_executor_instances": "2",
835
+ },
836
+ },
837
+ environment_config={
838
+ "execution_config": {
839
+ "subnetwork_uri": "default",
840
+ "ttl": "3600s",
841
+ "network_tags": ["tag1"],
842
+ },
843
+ },
844
+ spark_r_batch={
845
+ "main_r_file_uri": "https://storage.googleapis.com/terraform-batches/spark-r-flights.r",
846
+ "args": ["https://storage.googleapis.com/terraform-batches/flights.csv"],
847
+ })
848
+ ```
849
+
850
+ ## Import
851
+
852
+ Batch can be imported using any of these accepted formats:
853
+
854
+ * `projects/{{project}}/locations/{{location}}/batches/{{batch_id}}`
855
+
856
+ * `{{project}}/{{location}}/{{batch_id}}`
857
+
858
+ * `{{location}}/{{batch_id}}`
859
+
860
+ When using the `pulumi import` command, Batch can be imported using one of the formats above. For example:
861
+
862
+ ```sh
863
+ $ pulumi import gcp:dataproc/batch:Batch default projects/{{project}}/locations/{{location}}/batches/{{batch_id}}
864
+ ```
865
+
866
+ ```sh
867
+ $ pulumi import gcp:dataproc/batch:Batch default {{project}}/{{location}}/{{batch_id}}
868
+ ```
869
+
870
+ ```sh
871
+ $ pulumi import gcp:dataproc/batch:Batch default {{location}}/{{batch_id}}
872
+ ```
873
+
874
+ :param str resource_name: The name of the resource.
875
+ :param pulumi.ResourceOptions opts: Options for the resource.
876
+ :param pulumi.Input[str] batch_id: The ID to use for the batch, which will become the final component of the batch's resource name.
877
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
878
+ :param pulumi.Input[Union['BatchEnvironmentConfigArgs', 'BatchEnvironmentConfigArgsDict']] environment_config: Environment configuration for the batch execution.
879
+ Structure is documented below.
880
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: The labels to associate with this batch.
881
+
882
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
883
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
884
+ :param pulumi.Input[str] location: The location in which the batch will be created in.
885
+ :param pulumi.Input[str] project: The ID of the project in which the resource belongs.
886
+ If it is not provided, the provider project is used.
887
+ :param pulumi.Input[Union['BatchPysparkBatchArgs', 'BatchPysparkBatchArgsDict']] pyspark_batch: PySpark batch config.
888
+ Structure is documented below.
889
+ :param pulumi.Input[Union['BatchRuntimeConfigArgs', 'BatchRuntimeConfigArgsDict']] runtime_config: Runtime configuration for the batch execution.
890
+ Structure is documented below.
891
+ :param pulumi.Input[Union['BatchSparkBatchArgs', 'BatchSparkBatchArgsDict']] spark_batch: Spark batch config.
892
+ Structure is documented below.
893
+ :param pulumi.Input[Union['BatchSparkRBatchArgs', 'BatchSparkRBatchArgsDict']] spark_r_batch: SparkR batch config.
894
+ Structure is documented below.
895
+ :param pulumi.Input[Union['BatchSparkSqlBatchArgs', 'BatchSparkSqlBatchArgsDict']] spark_sql_batch: Spark SQL batch config.
896
+ Structure is documented below.
897
+ """
898
+ ...
899
+ @overload
900
+ def __init__(__self__,
901
+ resource_name: str,
902
+ args: Optional[BatchArgs] = None,
903
+ opts: Optional[pulumi.ResourceOptions] = None):
904
+ """
905
+ Dataproc Serverless Batches lets you run Spark workloads without requiring you to
906
+ provision and manage your own Dataproc cluster.
907
+
908
+ To get more information about Batch, see:
909
+
910
+ * [API documentation](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches)
911
+ * How-to Guides
912
+ * [Dataproc Serverless Batches Intro](https://cloud.google.com/dataproc-serverless/docs/overview)
913
+
914
+ ## Example Usage
915
+
916
+ ### Dataproc Batch Spark
917
+
918
+ ```python
919
+ import pulumi
920
+ import pulumi_gcp as gcp
921
+
922
+ example_batch_spark = gcp.dataproc.Batch("example_batch_spark",
923
+ batch_id="tf-test-batch_75125",
924
+ location="us-central1",
925
+ labels={
926
+ "batch_test": "terraform",
927
+ },
928
+ runtime_config={
929
+ "properties": {
930
+ "spark_dynamic_allocation_enabled": "false",
931
+ "spark_executor_instances": "2",
932
+ },
933
+ },
934
+ environment_config={
935
+ "execution_config": {
936
+ "subnetwork_uri": "default",
937
+ "ttl": "3600s",
938
+ "network_tags": ["tag1"],
939
+ },
940
+ },
941
+ spark_batch={
942
+ "main_class": "org.apache.spark.examples.SparkPi",
943
+ "args": ["10"],
944
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
945
+ })
946
+ ```
947
+ ### Dataproc Batch Spark Full
948
+
949
+ ```python
950
+ import pulumi
951
+ import pulumi_gcp as gcp
952
+
953
+ project = gcp.organizations.get_project()
954
+ gcs_account = gcp.storage.get_project_service_account()
955
+ bucket = gcp.storage.Bucket("bucket",
956
+ uniform_bucket_level_access=True,
957
+ name="dataproc-bucket",
958
+ location="US",
959
+ force_destroy=True)
960
+ key_ring = gcp.kms.KeyRing("key_ring",
961
+ name="example-keyring",
962
+ location="us-central1")
963
+ crypto_key = gcp.kms.CryptoKey("crypto_key",
964
+ name="example-key",
965
+ key_ring=key_ring.id,
966
+ purpose="ENCRYPT_DECRYPT")
967
+ crypto_key_member1 = gcp.kms.CryptoKeyIAMMember("crypto_key_member_1",
968
+ crypto_key_id=crypto_key.id,
969
+ role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
970
+ member=f"serviceAccount:service-{project.number}@dataproc-accounts.iam.gserviceaccount.com")
971
+ ms = gcp.dataproc.MetastoreService("ms",
972
+ service_id="dataproc-batch",
973
+ location="us-central1",
974
+ port=9080,
975
+ tier="DEVELOPER",
976
+ maintenance_window={
977
+ "hour_of_day": 2,
978
+ "day_of_week": "SUNDAY",
979
+ },
980
+ hive_metastore_config={
981
+ "version": "3.1.2",
982
+ })
983
+ basic = gcp.dataproc.Cluster("basic",
984
+ name="dataproc-batch",
985
+ region="us-central1",
986
+ cluster_config={
987
+ "software_config": {
988
+ "override_properties": {
989
+ "dataproc_dataproc_allow_zero_workers": "true",
990
+ "spark_spark_history_fs_log_directory": bucket.name.apply(lambda name: f"gs://{name}/*/spark-job-history"),
991
+ },
992
+ },
993
+ "endpoint_config": {
994
+ "enable_http_port_access": True,
995
+ },
996
+ "master_config": {
997
+ "num_instances": 1,
998
+ "machine_type": "e2-standard-2",
999
+ "disk_config": {
1000
+ "boot_disk_size_gb": 35,
1001
+ },
1002
+ },
1003
+ "metastore_config": {
1004
+ "dataproc_metastore_service": ms.name,
1005
+ },
1006
+ })
1007
+ example_batch_spark = gcp.dataproc.Batch("example_batch_spark",
1008
+ batch_id="dataproc-batch",
1009
+ location="us-central1",
1010
+ labels={
1011
+ "batch_test": "terraform",
1012
+ },
1013
+ runtime_config={
1014
+ "properties": {
1015
+ "spark_dynamic_allocation_enabled": "false",
1016
+ "spark_executor_instances": "2",
1017
+ },
1018
+ "version": "2.2",
1019
+ },
1020
+ environment_config={
1021
+ "execution_config": {
1022
+ "ttl": "3600s",
1023
+ "network_tags": ["tag1"],
1024
+ "kms_key": crypto_key.id,
1025
+ "network_uri": "default",
1026
+ "service_account": f"{project.number}-compute@developer.gserviceaccount.com",
1027
+ "staging_bucket": bucket.name,
1028
+ },
1029
+ "peripherals_config": {
1030
+ "metastore_service": ms.name,
1031
+ "spark_history_server_config": {
1032
+ "dataproc_cluster": basic.id,
1033
+ },
1034
+ },
1035
+ },
1036
+ spark_batch={
1037
+ "main_class": "org.apache.spark.examples.SparkPi",
1038
+ "args": ["10"],
1039
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
1040
+ },
1041
+ opts = pulumi.ResourceOptions(depends_on=[crypto_key_member1]))
1042
+ ```
1043
+ ### Dataproc Batch Sparksql
1044
+
1045
+ ```python
1046
+ import pulumi
1047
+ import pulumi_gcp as gcp
1048
+
1049
+ example_batch_sparsql = gcp.dataproc.Batch("example_batch_sparsql",
1050
+ batch_id="tf-test-batch_88722",
1051
+ location="us-central1",
1052
+ runtime_config={
1053
+ "properties": {
1054
+ "spark_dynamic_allocation_enabled": "false",
1055
+ "spark_executor_instances": "2",
1056
+ },
1057
+ },
1058
+ environment_config={
1059
+ "execution_config": {
1060
+ "subnetwork_uri": "default",
1061
+ },
1062
+ },
1063
+ spark_sql_batch={
1064
+ "query_file_uri": "gs://dataproc-examples/spark-sql/natality/cigarette_correlations.sql",
1065
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
1066
+ "query_variables": {
1067
+ "name": "value",
1068
+ },
1069
+ })
1070
+ ```
1071
+ ### Dataproc Batch Pyspark
1072
+
1073
+ ```python
1074
+ import pulumi
1075
+ import pulumi_gcp as gcp
1076
+
1077
+ example_batch_pyspark = gcp.dataproc.Batch("example_batch_pyspark",
1078
+ batch_id="tf-test-batch_39249",
1079
+ location="us-central1",
1080
+ runtime_config={
1081
+ "properties": {
1082
+ "spark_dynamic_allocation_enabled": "false",
1083
+ "spark_executor_instances": "2",
1084
+ },
1085
+ },
1086
+ environment_config={
1087
+ "execution_config": {
1088
+ "subnetwork_uri": "default",
1089
+ },
1090
+ },
1091
+ pyspark_batch={
1092
+ "main_python_file_uri": "https://storage.googleapis.com/terraform-batches/test_util.py",
1093
+ "args": ["10"],
1094
+ "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
1095
+ "python_file_uris": ["gs://dataproc-examples/pyspark/hello-world/hello-world.py"],
1096
+ "archive_uris": [
1097
+ "https://storage.googleapis.com/terraform-batches/animals.txt.tar.gz#unpacked",
1098
+ "https://storage.googleapis.com/terraform-batches/animals.txt.jar",
1099
+ "https://storage.googleapis.com/terraform-batches/animals.txt",
1100
+ ],
1101
+ "file_uris": ["https://storage.googleapis.com/terraform-batches/people.txt"],
1102
+ })
1103
+ ```
1104
+ ### Dataproc Batch Sparkr
1105
+
1106
+ ```python
1107
+ import pulumi
1108
+ import pulumi_gcp as gcp
1109
+
1110
+ example_batch_sparkr = gcp.dataproc.Batch("example_batch_sparkr",
1111
+ batch_id="tf-test-batch_74391",
1112
+ location="us-central1",
1113
+ labels={
1114
+ "batch_test": "terraform",
1115
+ },
1116
+ runtime_config={
1117
+ "properties": {
1118
+ "spark_dynamic_allocation_enabled": "false",
1119
+ "spark_executor_instances": "2",
1120
+ },
1121
+ },
1122
+ environment_config={
1123
+ "execution_config": {
1124
+ "subnetwork_uri": "default",
1125
+ "ttl": "3600s",
1126
+ "network_tags": ["tag1"],
1127
+ },
1128
+ },
1129
+ spark_r_batch={
1130
+ "main_r_file_uri": "https://storage.googleapis.com/terraform-batches/spark-r-flights.r",
1131
+ "args": ["https://storage.googleapis.com/terraform-batches/flights.csv"],
1132
+ })
1133
+ ```
1134
+
1135
+ ## Import
1136
+
1137
+ Batch can be imported using any of these accepted formats:
1138
+
1139
+ * `projects/{{project}}/locations/{{location}}/batches/{{batch_id}}`
1140
+
1141
+ * `{{project}}/{{location}}/{{batch_id}}`
1142
+
1143
+ * `{{location}}/{{batch_id}}`
1144
+
1145
+ When using the `pulumi import` command, Batch can be imported using one of the formats above. For example:
1146
+
1147
+ ```sh
1148
+ $ pulumi import gcp:dataproc/batch:Batch default projects/{{project}}/locations/{{location}}/batches/{{batch_id}}
1149
+ ```
1150
+
1151
+ ```sh
1152
+ $ pulumi import gcp:dataproc/batch:Batch default {{project}}/{{location}}/{{batch_id}}
1153
+ ```
1154
+
1155
+ ```sh
1156
+ $ pulumi import gcp:dataproc/batch:Batch default {{location}}/{{batch_id}}
1157
+ ```
1158
+
1159
+ :param str resource_name: The name of the resource.
1160
+ :param BatchArgs args: The arguments to use to populate this resource's properties.
1161
+ :param pulumi.ResourceOptions opts: Options for the resource.
1162
+ """
1163
+ ...
1164
+ def __init__(__self__, resource_name: str, *args, **kwargs):
1165
+ resource_args, opts = _utilities.get_resource_args_opts(BatchArgs, pulumi.ResourceOptions, *args, **kwargs)
1166
+ if resource_args is not None:
1167
+ __self__._internal_init(resource_name, opts, **resource_args.__dict__)
1168
+ else:
1169
+ __self__._internal_init(resource_name, *args, **kwargs)
1170
+
1171
+ def _internal_init(__self__,
1172
+ resource_name: str,
1173
+ opts: Optional[pulumi.ResourceOptions] = None,
1174
+ batch_id: Optional[pulumi.Input[str]] = None,
1175
+ environment_config: Optional[pulumi.Input[Union['BatchEnvironmentConfigArgs', 'BatchEnvironmentConfigArgsDict']]] = None,
1176
+ labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
1177
+ location: Optional[pulumi.Input[str]] = None,
1178
+ project: Optional[pulumi.Input[str]] = None,
1179
+ pyspark_batch: Optional[pulumi.Input[Union['BatchPysparkBatchArgs', 'BatchPysparkBatchArgsDict']]] = None,
1180
+ runtime_config: Optional[pulumi.Input[Union['BatchRuntimeConfigArgs', 'BatchRuntimeConfigArgsDict']]] = None,
1181
+ spark_batch: Optional[pulumi.Input[Union['BatchSparkBatchArgs', 'BatchSparkBatchArgsDict']]] = None,
1182
+ spark_r_batch: Optional[pulumi.Input[Union['BatchSparkRBatchArgs', 'BatchSparkRBatchArgsDict']]] = None,
1183
+ spark_sql_batch: Optional[pulumi.Input[Union['BatchSparkSqlBatchArgs', 'BatchSparkSqlBatchArgsDict']]] = None,
1184
+ __props__=None):
1185
+ opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
1186
+ if not isinstance(opts, pulumi.ResourceOptions):
1187
+ raise TypeError('Expected resource options to be a ResourceOptions instance')
1188
+ if opts.id is None:
1189
+ if __props__ is not None:
1190
+ raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
1191
+ __props__ = BatchArgs.__new__(BatchArgs)
1192
+
1193
+ __props__.__dict__["batch_id"] = batch_id
1194
+ __props__.__dict__["environment_config"] = environment_config
1195
+ __props__.__dict__["labels"] = labels
1196
+ __props__.__dict__["location"] = location
1197
+ __props__.__dict__["project"] = project
1198
+ __props__.__dict__["pyspark_batch"] = pyspark_batch
1199
+ __props__.__dict__["runtime_config"] = runtime_config
1200
+ __props__.__dict__["spark_batch"] = spark_batch
1201
+ __props__.__dict__["spark_r_batch"] = spark_r_batch
1202
+ __props__.__dict__["spark_sql_batch"] = spark_sql_batch
1203
+ __props__.__dict__["create_time"] = None
1204
+ __props__.__dict__["creator"] = None
1205
+ __props__.__dict__["effective_labels"] = None
1206
+ __props__.__dict__["name"] = None
1207
+ __props__.__dict__["operation"] = None
1208
+ __props__.__dict__["pulumi_labels"] = None
1209
+ __props__.__dict__["runtime_infos"] = None
1210
+ __props__.__dict__["state"] = None
1211
+ __props__.__dict__["state_histories"] = None
1212
+ __props__.__dict__["state_message"] = None
1213
+ __props__.__dict__["state_time"] = None
1214
+ __props__.__dict__["uuid"] = None
1215
+ secret_opts = pulumi.ResourceOptions(additional_secret_outputs=["effectiveLabels", "pulumiLabels"])
1216
+ opts = pulumi.ResourceOptions.merge(opts, secret_opts)
1217
+ super(Batch, __self__).__init__(
1218
+ 'gcp:dataproc/batch:Batch',
1219
+ resource_name,
1220
+ __props__,
1221
+ opts)
1222
+
1223
+ @staticmethod
1224
+ def get(resource_name: str,
1225
+ id: pulumi.Input[str],
1226
+ opts: Optional[pulumi.ResourceOptions] = None,
1227
+ batch_id: Optional[pulumi.Input[str]] = None,
1228
+ create_time: Optional[pulumi.Input[str]] = None,
1229
+ creator: Optional[pulumi.Input[str]] = None,
1230
+ effective_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
1231
+ environment_config: Optional[pulumi.Input[Union['BatchEnvironmentConfigArgs', 'BatchEnvironmentConfigArgsDict']]] = None,
1232
+ labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
1233
+ location: Optional[pulumi.Input[str]] = None,
1234
+ name: Optional[pulumi.Input[str]] = None,
1235
+ operation: Optional[pulumi.Input[str]] = None,
1236
+ project: Optional[pulumi.Input[str]] = None,
1237
+ pulumi_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
1238
+ pyspark_batch: Optional[pulumi.Input[Union['BatchPysparkBatchArgs', 'BatchPysparkBatchArgsDict']]] = None,
1239
+ runtime_config: Optional[pulumi.Input[Union['BatchRuntimeConfigArgs', 'BatchRuntimeConfigArgsDict']]] = None,
1240
+ runtime_infos: Optional[pulumi.Input[Sequence[pulumi.Input[Union['BatchRuntimeInfoArgs', 'BatchRuntimeInfoArgsDict']]]]] = None,
1241
+ spark_batch: Optional[pulumi.Input[Union['BatchSparkBatchArgs', 'BatchSparkBatchArgsDict']]] = None,
1242
+ spark_r_batch: Optional[pulumi.Input[Union['BatchSparkRBatchArgs', 'BatchSparkRBatchArgsDict']]] = None,
1243
+ spark_sql_batch: Optional[pulumi.Input[Union['BatchSparkSqlBatchArgs', 'BatchSparkSqlBatchArgsDict']]] = None,
1244
+ state: Optional[pulumi.Input[str]] = None,
1245
+ state_histories: Optional[pulumi.Input[Sequence[pulumi.Input[Union['BatchStateHistoryArgs', 'BatchStateHistoryArgsDict']]]]] = None,
1246
+ state_message: Optional[pulumi.Input[str]] = None,
1247
+ state_time: Optional[pulumi.Input[str]] = None,
1248
+ uuid: Optional[pulumi.Input[str]] = None) -> 'Batch':
1249
+ """
1250
+ Get an existing Batch resource's state with the given name, id, and optional extra
1251
+ properties used to qualify the lookup.
1252
+
1253
+ :param str resource_name: The unique name of the resulting resource.
1254
+ :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
1255
+ :param pulumi.ResourceOptions opts: Options for the resource.
1256
+ :param pulumi.Input[str] batch_id: The ID to use for the batch, which will become the final component of the batch's resource name.
1257
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
1258
+ :param pulumi.Input[str] create_time: The time when the batch was created.
1259
+ :param pulumi.Input[str] creator: The email address of the user who created the batch.
1260
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] effective_labels: All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
1261
+ :param pulumi.Input[Union['BatchEnvironmentConfigArgs', 'BatchEnvironmentConfigArgsDict']] environment_config: Environment configuration for the batch execution.
1262
+ Structure is documented below.
1263
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: The labels to associate with this batch.
1264
+
1265
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
1266
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
1267
+ :param pulumi.Input[str] location: The location in which the batch will be created in.
1268
+ :param pulumi.Input[str] name: The resource name of the batch.
1269
+ :param pulumi.Input[str] operation: The resource name of the operation associated with this batch.
1270
+ :param pulumi.Input[str] project: The ID of the project in which the resource belongs.
1271
+ If it is not provided, the provider project is used.
1272
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] pulumi_labels: The combination of labels configured directly on the resource
1273
+ and default labels configured on the provider.
1274
+ :param pulumi.Input[Union['BatchPysparkBatchArgs', 'BatchPysparkBatchArgsDict']] pyspark_batch: PySpark batch config.
1275
+ Structure is documented below.
1276
+ :param pulumi.Input[Union['BatchRuntimeConfigArgs', 'BatchRuntimeConfigArgsDict']] runtime_config: Runtime configuration for the batch execution.
1277
+ Structure is documented below.
1278
+ :param pulumi.Input[Sequence[pulumi.Input[Union['BatchRuntimeInfoArgs', 'BatchRuntimeInfoArgsDict']]]] runtime_infos: Runtime information about batch execution.
1279
+ Structure is documented below.
1280
+ :param pulumi.Input[Union['BatchSparkBatchArgs', 'BatchSparkBatchArgsDict']] spark_batch: Spark batch config.
1281
+ Structure is documented below.
1282
+ :param pulumi.Input[Union['BatchSparkRBatchArgs', 'BatchSparkRBatchArgsDict']] spark_r_batch: SparkR batch config.
1283
+ Structure is documented below.
1284
+ :param pulumi.Input[Union['BatchSparkSqlBatchArgs', 'BatchSparkSqlBatchArgsDict']] spark_sql_batch: Spark SQL batch config.
1285
+ Structure is documented below.
1286
+ :param pulumi.Input[str] state: (Output)
1287
+ The state of the batch at this point in history. For possible values, see the [API documentation](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches#State).
1288
+ :param pulumi.Input[Sequence[pulumi.Input[Union['BatchStateHistoryArgs', 'BatchStateHistoryArgsDict']]]] state_histories: Historical state information for the batch.
1289
+ Structure is documented below.
1290
+ :param pulumi.Input[str] state_message: (Output)
1291
+ Details about the state at this point in history.
1292
+ :param pulumi.Input[str] state_time: Batch state details, such as a failure description if the state is FAILED.
1293
+ :param pulumi.Input[str] uuid: A batch UUID (Unique Universal Identifier). The service generates this value when it creates the batch.
1294
+ """
1295
+ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
1296
+
1297
+ __props__ = _BatchState.__new__(_BatchState)
1298
+
1299
+ __props__.__dict__["batch_id"] = batch_id
1300
+ __props__.__dict__["create_time"] = create_time
1301
+ __props__.__dict__["creator"] = creator
1302
+ __props__.__dict__["effective_labels"] = effective_labels
1303
+ __props__.__dict__["environment_config"] = environment_config
1304
+ __props__.__dict__["labels"] = labels
1305
+ __props__.__dict__["location"] = location
1306
+ __props__.__dict__["name"] = name
1307
+ __props__.__dict__["operation"] = operation
1308
+ __props__.__dict__["project"] = project
1309
+ __props__.__dict__["pulumi_labels"] = pulumi_labels
1310
+ __props__.__dict__["pyspark_batch"] = pyspark_batch
1311
+ __props__.__dict__["runtime_config"] = runtime_config
1312
+ __props__.__dict__["runtime_infos"] = runtime_infos
1313
+ __props__.__dict__["spark_batch"] = spark_batch
1314
+ __props__.__dict__["spark_r_batch"] = spark_r_batch
1315
+ __props__.__dict__["spark_sql_batch"] = spark_sql_batch
1316
+ __props__.__dict__["state"] = state
1317
+ __props__.__dict__["state_histories"] = state_histories
1318
+ __props__.__dict__["state_message"] = state_message
1319
+ __props__.__dict__["state_time"] = state_time
1320
+ __props__.__dict__["uuid"] = uuid
1321
+ return Batch(resource_name, opts=opts, __props__=__props__)
1322
+
1323
+ @property
1324
+ @pulumi.getter(name="batchId")
1325
+ def batch_id(self) -> pulumi.Output[Optional[str]]:
1326
+ """
1327
+ The ID to use for the batch, which will become the final component of the batch's resource name.
1328
+ This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
1329
+ """
1330
+ return pulumi.get(self, "batch_id")
1331
+
1332
+ @property
1333
+ @pulumi.getter(name="createTime")
1334
+ def create_time(self) -> pulumi.Output[str]:
1335
+ """
1336
+ The time when the batch was created.
1337
+ """
1338
+ return pulumi.get(self, "create_time")
1339
+
1340
+ @property
1341
+ @pulumi.getter
1342
+ def creator(self) -> pulumi.Output[str]:
1343
+ """
1344
+ The email address of the user who created the batch.
1345
+ """
1346
+ return pulumi.get(self, "creator")
1347
+
1348
+ @property
1349
+ @pulumi.getter(name="effectiveLabels")
1350
+ def effective_labels(self) -> pulumi.Output[Mapping[str, str]]:
1351
+ """
1352
+ All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
1353
+ """
1354
+ return pulumi.get(self, "effective_labels")
1355
+
1356
+ @property
1357
+ @pulumi.getter(name="environmentConfig")
1358
+ def environment_config(self) -> pulumi.Output[Optional['outputs.BatchEnvironmentConfig']]:
1359
+ """
1360
+ Environment configuration for the batch execution.
1361
+ Structure is documented below.
1362
+ """
1363
+ return pulumi.get(self, "environment_config")
1364
+
1365
+ @property
1366
+ @pulumi.getter
1367
+ def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
1368
+ """
1369
+ The labels to associate with this batch.
1370
+
1371
+ **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
1372
+ Please refer to the field `effective_labels` for all of the labels present on the resource.
1373
+ """
1374
+ return pulumi.get(self, "labels")
1375
+
1376
+ @property
1377
+ @pulumi.getter
1378
+ def location(self) -> pulumi.Output[Optional[str]]:
1379
+ """
1380
+ The location in which the batch will be created in.
1381
+ """
1382
+ return pulumi.get(self, "location")
1383
+
1384
+ @property
1385
+ @pulumi.getter
1386
+ def name(self) -> pulumi.Output[str]:
1387
+ """
1388
+ The resource name of the batch.
1389
+ """
1390
+ return pulumi.get(self, "name")
1391
+
1392
+ @property
1393
+ @pulumi.getter
1394
+ def operation(self) -> pulumi.Output[str]:
1395
+ """
1396
+ The resource name of the operation associated with this batch.
1397
+ """
1398
+ return pulumi.get(self, "operation")
1399
+
1400
+ @property
1401
+ @pulumi.getter
1402
+ def project(self) -> pulumi.Output[str]:
1403
+ """
1404
+ The ID of the project in which the resource belongs.
1405
+ If it is not provided, the provider project is used.
1406
+ """
1407
+ return pulumi.get(self, "project")
1408
+
1409
+ @property
1410
+ @pulumi.getter(name="pulumiLabels")
1411
+ def pulumi_labels(self) -> pulumi.Output[Mapping[str, str]]:
1412
+ """
1413
+ The combination of labels configured directly on the resource
1414
+ and default labels configured on the provider.
1415
+ """
1416
+ return pulumi.get(self, "pulumi_labels")
1417
+
1418
+ @property
1419
+ @pulumi.getter(name="pysparkBatch")
1420
+ def pyspark_batch(self) -> pulumi.Output[Optional['outputs.BatchPysparkBatch']]:
1421
+ """
1422
+ PySpark batch config.
1423
+ Structure is documented below.
1424
+ """
1425
+ return pulumi.get(self, "pyspark_batch")
1426
+
1427
+ @property
1428
+ @pulumi.getter(name="runtimeConfig")
1429
+ def runtime_config(self) -> pulumi.Output[Optional['outputs.BatchRuntimeConfig']]:
1430
+ """
1431
+ Runtime configuration for the batch execution.
1432
+ Structure is documented below.
1433
+ """
1434
+ return pulumi.get(self, "runtime_config")
1435
+
1436
+ @property
1437
+ @pulumi.getter(name="runtimeInfos")
1438
+ def runtime_infos(self) -> pulumi.Output[Sequence['outputs.BatchRuntimeInfo']]:
1439
+ """
1440
+ Runtime information about batch execution.
1441
+ Structure is documented below.
1442
+ """
1443
+ return pulumi.get(self, "runtime_infos")
1444
+
1445
+ @property
1446
+ @pulumi.getter(name="sparkBatch")
1447
+ def spark_batch(self) -> pulumi.Output[Optional['outputs.BatchSparkBatch']]:
1448
+ """
1449
+ Spark batch config.
1450
+ Structure is documented below.
1451
+ """
1452
+ return pulumi.get(self, "spark_batch")
1453
+
1454
+ @property
1455
+ @pulumi.getter(name="sparkRBatch")
1456
+ def spark_r_batch(self) -> pulumi.Output[Optional['outputs.BatchSparkRBatch']]:
1457
+ """
1458
+ SparkR batch config.
1459
+ Structure is documented below.
1460
+ """
1461
+ return pulumi.get(self, "spark_r_batch")
1462
+
1463
+ @property
1464
+ @pulumi.getter(name="sparkSqlBatch")
1465
+ def spark_sql_batch(self) -> pulumi.Output[Optional['outputs.BatchSparkSqlBatch']]:
1466
+ """
1467
+ Spark SQL batch config.
1468
+ Structure is documented below.
1469
+ """
1470
+ return pulumi.get(self, "spark_sql_batch")
1471
+
1472
+ @property
1473
+ @pulumi.getter
1474
+ def state(self) -> pulumi.Output[str]:
1475
+ """
1476
+ (Output)
1477
+ The state of the batch at this point in history. For possible values, see the [API documentation](https://cloud.google.com/dataproc-serverless/docs/reference/rest/v1/projects.locations.batches#State).
1478
+ """
1479
+ return pulumi.get(self, "state")
1480
+
1481
+ @property
1482
+ @pulumi.getter(name="stateHistories")
1483
+ def state_histories(self) -> pulumi.Output[Sequence['outputs.BatchStateHistory']]:
1484
+ """
1485
+ Historical state information for the batch.
1486
+ Structure is documented below.
1487
+ """
1488
+ return pulumi.get(self, "state_histories")
1489
+
1490
+ @property
1491
+ @pulumi.getter(name="stateMessage")
1492
+ def state_message(self) -> pulumi.Output[str]:
1493
+ """
1494
+ (Output)
1495
+ Details about the state at this point in history.
1496
+ """
1497
+ return pulumi.get(self, "state_message")
1498
+
1499
+ @property
1500
+ @pulumi.getter(name="stateTime")
1501
+ def state_time(self) -> pulumi.Output[str]:
1502
+ """
1503
+ Batch state details, such as a failure description if the state is FAILED.
1504
+ """
1505
+ return pulumi.get(self, "state_time")
1506
+
1507
+ @property
1508
+ @pulumi.getter
1509
+ def uuid(self) -> pulumi.Output[str]:
1510
+ """
1511
+ A batch UUID (Unique Universal Identifier). The service generates this value when it creates the batch.
1512
+ """
1513
+ return pulumi.get(self, "uuid")
1514
+