pulumi-gcp 8.10.0a1731950704__py3-none-any.whl → 8.10.0a1732125494__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. pulumi_gcp/__init__.py +24 -0
  2. pulumi_gcp/accesscontextmanager/__init__.py +1 -0
  3. pulumi_gcp/accesscontextmanager/_inputs.py +90 -54
  4. pulumi_gcp/accesscontextmanager/get_access_policy.py +158 -0
  5. pulumi_gcp/accesscontextmanager/outputs.py +60 -36
  6. pulumi_gcp/artifactregistry/_inputs.py +56 -0
  7. pulumi_gcp/artifactregistry/outputs.py +65 -0
  8. pulumi_gcp/artifactregistry/repository.py +48 -0
  9. pulumi_gcp/backupdisasterrecovery/__init__.py +1 -0
  10. pulumi_gcp/backupdisasterrecovery/backup_vault.py +63 -0
  11. pulumi_gcp/backupdisasterrecovery/get_data_source.py +263 -0
  12. pulumi_gcp/backupdisasterrecovery/outputs.py +402 -0
  13. pulumi_gcp/certificateauthority/authority.py +28 -138
  14. pulumi_gcp/cloudrun/service.py +0 -10
  15. pulumi_gcp/cloudrunv2/get_service.py +15 -4
  16. pulumi_gcp/cloudrunv2/service.py +30 -2
  17. pulumi_gcp/compute/_inputs.py +12 -12
  18. pulumi_gcp/compute/outputs.py +10 -10
  19. pulumi_gcp/dataproc/__init__.py +1 -0
  20. pulumi_gcp/dataproc/_inputs.py +490 -0
  21. pulumi_gcp/dataproc/gdc_spark_application.py +1658 -0
  22. pulumi_gcp/dataproc/outputs.py +385 -0
  23. pulumi_gcp/filestore/_inputs.py +152 -0
  24. pulumi_gcp/filestore/get_instance.py +12 -1
  25. pulumi_gcp/filestore/instance.py +47 -0
  26. pulumi_gcp/filestore/outputs.py +229 -0
  27. pulumi_gcp/iam/__init__.py +2 -0
  28. pulumi_gcp/iam/_inputs.py +274 -0
  29. pulumi_gcp/iam/folders_policy_binding.py +917 -0
  30. pulumi_gcp/iam/organizations_policy_binding.py +901 -0
  31. pulumi_gcp/iam/outputs.py +198 -0
  32. pulumi_gcp/managedkafka/cluster.py +4 -0
  33. pulumi_gcp/managedkafka/topic.py +4 -0
  34. pulumi_gcp/pubsub/subscription.py +8 -8
  35. pulumi_gcp/pulumi-plugin.json +1 -1
  36. pulumi_gcp/redis/_inputs.py +213 -0
  37. pulumi_gcp/redis/cluster.py +289 -0
  38. pulumi_gcp/redis/outputs.py +185 -0
  39. pulumi_gcp/securesourcemanager/_inputs.py +33 -0
  40. pulumi_gcp/securesourcemanager/instance.py +90 -3
  41. pulumi_gcp/securesourcemanager/outputs.py +19 -0
  42. pulumi_gcp/spanner/database.py +14 -14
  43. pulumi_gcp/sql/_inputs.py +73 -0
  44. pulumi_gcp/sql/database_instance.py +60 -0
  45. pulumi_gcp/sql/outputs.py +146 -0
  46. pulumi_gcp/sql/user.py +2 -2
  47. pulumi_gcp/tags/location_tag_binding.py +8 -8
  48. {pulumi_gcp-8.10.0a1731950704.dist-info → pulumi_gcp-8.10.0a1732125494.dist-info}/METADATA +1 -1
  49. {pulumi_gcp-8.10.0a1731950704.dist-info → pulumi_gcp-8.10.0a1732125494.dist-info}/RECORD +51 -46
  50. {pulumi_gcp-8.10.0a1731950704.dist-info → pulumi_gcp-8.10.0a1732125494.dist-info}/WHEEL +0 -0
  51. {pulumi_gcp-8.10.0a1731950704.dist-info → pulumi_gcp-8.10.0a1732125494.dist-info}/top_level.txt +0 -0
@@ -6957,7 +6957,7 @@ class ImageGuestOsFeature(dict):
6957
6957
  type: str):
6958
6958
  """
6959
6959
  :param str type: The type of supported feature. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options.
6960
- Possible values are: `MULTI_IP_SUBNET`, `SECURE_BOOT`, `SEV_CAPABLE`, `UEFI_COMPATIBLE`, `VIRTIO_SCSI_MULTIQUEUE`, `WINDOWS`, `GVNIC`, `SEV_LIVE_MIGRATABLE`, `SEV_SNP_CAPABLE`, `SUSPEND_RESUME_COMPATIBLE`, `TDX_CAPABLE`, `SEV_LIVE_MIGRATABLE_V2`.
6960
+ Possible values are: `MULTI_IP_SUBNET`, `SECURE_BOOT`, `SEV_CAPABLE`, `UEFI_COMPATIBLE`, `VIRTIO_SCSI_MULTIQUEUE`, `WINDOWS`, `GVNIC`, `IDPF`, `SEV_LIVE_MIGRATABLE`, `SEV_SNP_CAPABLE`, `SUSPEND_RESUME_COMPATIBLE`, `TDX_CAPABLE`, `SEV_LIVE_MIGRATABLE_V2`.
6961
6961
  """
6962
6962
  pulumi.set(__self__, "type", type)
6963
6963
 
@@ -6966,7 +6966,7 @@ class ImageGuestOsFeature(dict):
6966
6966
  def type(self) -> str:
6967
6967
  """
6968
6968
  The type of supported feature. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options.
6969
- Possible values are: `MULTI_IP_SUBNET`, `SECURE_BOOT`, `SEV_CAPABLE`, `UEFI_COMPATIBLE`, `VIRTIO_SCSI_MULTIQUEUE`, `WINDOWS`, `GVNIC`, `SEV_LIVE_MIGRATABLE`, `SEV_SNP_CAPABLE`, `SUSPEND_RESUME_COMPATIBLE`, `TDX_CAPABLE`, `SEV_LIVE_MIGRATABLE_V2`.
6969
+ Possible values are: `MULTI_IP_SUBNET`, `SECURE_BOOT`, `SEV_CAPABLE`, `UEFI_COMPATIBLE`, `VIRTIO_SCSI_MULTIQUEUE`, `WINDOWS`, `GVNIC`, `IDPF`, `SEV_LIVE_MIGRATABLE`, `SEV_SNP_CAPABLE`, `SUSPEND_RESUME_COMPATIBLE`, `TDX_CAPABLE`, `SEV_LIVE_MIGRATABLE_V2`.
6970
6970
  """
6971
6971
  return pulumi.get(self, "type")
6972
6972
 
@@ -8461,7 +8461,7 @@ class InstanceFromMachineImageNetworkInterface(dict):
8461
8461
  :param str network: The name or self_link of the network attached to this interface.
8462
8462
  :param str network_attachment: The URL of the network attachment that this interface should connect to in the following format: projects/{projectNumber}/regions/{region_name}/networkAttachments/{network_attachment_name}.
8463
8463
  :param str network_ip: The private IP address assigned to the instance.
8464
- :param str nic_type: The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET
8464
+ :param str nic_type: The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF
8465
8465
  :param int queue_count: The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It will be empty if not specified.
8466
8466
  :param str security_policy: A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.
8467
8467
  :param str stack_type: The stack type for this network interface to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used.
@@ -8586,7 +8586,7 @@ class InstanceFromMachineImageNetworkInterface(dict):
8586
8586
  @pulumi.getter(name="nicType")
8587
8587
  def nic_type(self) -> Optional[str]:
8588
8588
  """
8589
- The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET
8589
+ The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF
8590
8590
  """
8591
8591
  return pulumi.get(self, "nic_type")
8592
8592
 
@@ -10123,7 +10123,7 @@ class InstanceFromTemplateNetworkInterface(dict):
10123
10123
  :param str network: The name or self_link of the network attached to this interface.
10124
10124
  :param str network_attachment: The URL of the network attachment that this interface should connect to in the following format: projects/{projectNumber}/regions/{region_name}/networkAttachments/{network_attachment_name}.
10125
10125
  :param str network_ip: The private IP address assigned to the instance.
10126
- :param str nic_type: The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET
10126
+ :param str nic_type: The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF
10127
10127
  :param int queue_count: The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It will be empty if not specified.
10128
10128
  :param str security_policy: A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.
10129
10129
  :param str stack_type: The stack type for this network interface to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used.
@@ -10248,7 +10248,7 @@ class InstanceFromTemplateNetworkInterface(dict):
10248
10248
  @pulumi.getter(name="nicType")
10249
10249
  def nic_type(self) -> Optional[str]:
10250
10250
  """
10251
- The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET
10251
+ The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF
10252
10252
  """
10253
10253
  return pulumi.get(self, "nic_type")
10254
10254
 
@@ -12290,7 +12290,7 @@ class InstanceNetworkInterface(dict):
12290
12290
  :param str network_attachment: The URL of the network attachment that this interface should connect to in the following format: `projects/{projectNumber}/regions/{region_name}/networkAttachments/{network_attachment_name}`.
12291
12291
  :param str network_ip: The private IP address to assign to the instance. If
12292
12292
  empty, the address will be automatically assigned.
12293
- :param str nic_type: The type of vNIC to be used on this interface. Possible values: GVNIC, VIRTIO_NET.
12293
+ :param str nic_type: The type of vNIC to be used on this interface. Possible values: GVNIC, VIRTIO_NET, IDPF.
12294
12294
  :param int queue_count: The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It will be empty if not specified.
12295
12295
  :param str security_policy: A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.
12296
12296
  :param str stack_type: The stack type for this network interface to identify whether the IPv6 feature is enabled or not. Values are IPV4_IPV6 or IPV4_ONLY. If not specified, IPV4_ONLY will be used.
@@ -12432,7 +12432,7 @@ class InstanceNetworkInterface(dict):
12432
12432
  @pulumi.getter(name="nicType")
12433
12433
  def nic_type(self) -> Optional[str]:
12434
12434
  """
12435
- The type of vNIC to be used on this interface. Possible values: GVNIC, VIRTIO_NET.
12435
+ The type of vNIC to be used on this interface. Possible values: GVNIC, VIRTIO_NET, IDPF.
12436
12436
  """
12437
12437
  return pulumi.get(self, "nic_type")
12438
12438
 
@@ -52588,7 +52588,7 @@ class GetInstanceNetworkInterfaceResult(dict):
52588
52588
  :param str network: The name or self_link of the network attached to this interface.
52589
52589
  :param str network_attachment: Beta The URL of the network attachment to this interface.
52590
52590
  :param str network_ip: The internal ip address of the instance, either manually or dynamically assigned.
52591
- :param str nic_type: The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET
52591
+ :param str nic_type: The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF
52592
52592
  :param int queue_count: The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It will be empty if not specified.
52593
52593
  :param str security_policy: A full or partial URL to a security policy to add to this instance. If this field is set to an empty string it will remove the associated security policy.
52594
52594
  :param str stack_type: The stack type for this network interface to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used.
@@ -52697,7 +52697,7 @@ class GetInstanceNetworkInterfaceResult(dict):
52697
52697
  @pulumi.getter(name="nicType")
52698
52698
  def nic_type(self) -> str:
52699
52699
  """
52700
- The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET
52700
+ The type of vNIC to be used on this interface. Possible values:GVNIC, VIRTIO_NET, IDPF
52701
52701
  """
52702
52702
  return pulumi.get(self, "nic_type")
52703
52703
 
@@ -16,6 +16,7 @@ from .cluster_iam_member import *
16
16
  from .cluster_iam_policy import *
17
17
  from .gdc_application_environment import *
18
18
  from .gdc_service_instance import *
19
+ from .gdc_spark_application import *
19
20
  from .get_autoscaling_policy_iam_policy import *
20
21
  from .get_cluster_iam_policy import *
21
22
  from .get_job_iam_policy import *
@@ -149,6 +149,16 @@ __all__ = [
149
149
  'GdcServiceInstanceGdceClusterArgsDict',
150
150
  'GdcServiceInstanceSparkServiceInstanceConfigArgs',
151
151
  'GdcServiceInstanceSparkServiceInstanceConfigArgsDict',
152
+ 'GdcSparkApplicationPysparkApplicationConfigArgs',
153
+ 'GdcSparkApplicationPysparkApplicationConfigArgsDict',
154
+ 'GdcSparkApplicationSparkApplicationConfigArgs',
155
+ 'GdcSparkApplicationSparkApplicationConfigArgsDict',
156
+ 'GdcSparkApplicationSparkRApplicationConfigArgs',
157
+ 'GdcSparkApplicationSparkRApplicationConfigArgsDict',
158
+ 'GdcSparkApplicationSparkSqlApplicationConfigArgs',
159
+ 'GdcSparkApplicationSparkSqlApplicationConfigArgsDict',
160
+ 'GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs',
161
+ 'GdcSparkApplicationSparkSqlApplicationConfigQueryListArgsDict',
152
162
  'JobHadoopConfigArgs',
153
163
  'JobHadoopConfigArgsDict',
154
164
  'JobHadoopConfigLoggingConfigArgs',
@@ -6669,6 +6679,486 @@ class GdcServiceInstanceSparkServiceInstanceConfigArgs:
6669
6679
  pass
6670
6680
 
6671
6681
 
6682
+ if not MYPY:
6683
+ class GdcSparkApplicationPysparkApplicationConfigArgsDict(TypedDict):
6684
+ main_python_file_uri: pulumi.Input[str]
6685
+ """
6686
+ The HCFS URI of the main Python file to use as the driver. Must be a .py file.
6687
+ """
6688
+ archive_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6689
+ """
6690
+ HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
6691
+ """
6692
+ args: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6693
+ """
6694
+ The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
6695
+ """
6696
+ file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6697
+ """
6698
+ HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
6699
+ """
6700
+ jar_file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6701
+ """
6702
+ HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
6703
+ """
6704
+ python_file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6705
+ """
6706
+ HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
6707
+ """
6708
+ elif False:
6709
+ GdcSparkApplicationPysparkApplicationConfigArgsDict: TypeAlias = Mapping[str, Any]
6710
+
6711
+ @pulumi.input_type
6712
+ class GdcSparkApplicationPysparkApplicationConfigArgs:
6713
+ def __init__(__self__, *,
6714
+ main_python_file_uri: pulumi.Input[str],
6715
+ archive_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6716
+ args: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6717
+ file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6718
+ jar_file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6719
+ python_file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
6720
+ """
6721
+ :param pulumi.Input[str] main_python_file_uri: The HCFS URI of the main Python file to use as the driver. Must be a .py file.
6722
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] archive_uris: HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
6723
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] args: The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
6724
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] file_uris: HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
6725
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] jar_file_uris: HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
6726
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] python_file_uris: HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
6727
+ """
6728
+ pulumi.set(__self__, "main_python_file_uri", main_python_file_uri)
6729
+ if archive_uris is not None:
6730
+ pulumi.set(__self__, "archive_uris", archive_uris)
6731
+ if args is not None:
6732
+ pulumi.set(__self__, "args", args)
6733
+ if file_uris is not None:
6734
+ pulumi.set(__self__, "file_uris", file_uris)
6735
+ if jar_file_uris is not None:
6736
+ pulumi.set(__self__, "jar_file_uris", jar_file_uris)
6737
+ if python_file_uris is not None:
6738
+ pulumi.set(__self__, "python_file_uris", python_file_uris)
6739
+
6740
+ @property
6741
+ @pulumi.getter(name="mainPythonFileUri")
6742
+ def main_python_file_uri(self) -> pulumi.Input[str]:
6743
+ """
6744
+ The HCFS URI of the main Python file to use as the driver. Must be a .py file.
6745
+ """
6746
+ return pulumi.get(self, "main_python_file_uri")
6747
+
6748
+ @main_python_file_uri.setter
6749
+ def main_python_file_uri(self, value: pulumi.Input[str]):
6750
+ pulumi.set(self, "main_python_file_uri", value)
6751
+
6752
+ @property
6753
+ @pulumi.getter(name="archiveUris")
6754
+ def archive_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6755
+ """
6756
+ HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
6757
+ """
6758
+ return pulumi.get(self, "archive_uris")
6759
+
6760
+ @archive_uris.setter
6761
+ def archive_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6762
+ pulumi.set(self, "archive_uris", value)
6763
+
6764
+ @property
6765
+ @pulumi.getter
6766
+ def args(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6767
+ """
6768
+ The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
6769
+ """
6770
+ return pulumi.get(self, "args")
6771
+
6772
+ @args.setter
6773
+ def args(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6774
+ pulumi.set(self, "args", value)
6775
+
6776
+ @property
6777
+ @pulumi.getter(name="fileUris")
6778
+ def file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6779
+ """
6780
+ HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
6781
+ """
6782
+ return pulumi.get(self, "file_uris")
6783
+
6784
+ @file_uris.setter
6785
+ def file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6786
+ pulumi.set(self, "file_uris", value)
6787
+
6788
+ @property
6789
+ @pulumi.getter(name="jarFileUris")
6790
+ def jar_file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6791
+ """
6792
+ HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
6793
+ """
6794
+ return pulumi.get(self, "jar_file_uris")
6795
+
6796
+ @jar_file_uris.setter
6797
+ def jar_file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6798
+ pulumi.set(self, "jar_file_uris", value)
6799
+
6800
+ @property
6801
+ @pulumi.getter(name="pythonFileUris")
6802
+ def python_file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6803
+ """
6804
+ HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
6805
+ """
6806
+ return pulumi.get(self, "python_file_uris")
6807
+
6808
+ @python_file_uris.setter
6809
+ def python_file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6810
+ pulumi.set(self, "python_file_uris", value)
6811
+
6812
+
6813
+ if not MYPY:
6814
+ class GdcSparkApplicationSparkApplicationConfigArgsDict(TypedDict):
6815
+ archive_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6816
+ """
6817
+ HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
6818
+ """
6819
+ args: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6820
+ """
6821
+ The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as `--conf`, since a collision can occur that causes an incorrect application submission.
6822
+ """
6823
+ file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6824
+ """
6825
+ HCFS URIs of files to be placed in the working directory of each executor.
6826
+ """
6827
+ jar_file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6828
+ """
6829
+ HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
6830
+ """
6831
+ main_class: NotRequired[pulumi.Input[str]]
6832
+ """
6833
+ The name of the driver main class. The jar file that contains the class must be in the classpath or specified in `jar_file_uris`.
6834
+ """
6835
+ main_jar_file_uri: NotRequired[pulumi.Input[str]]
6836
+ """
6837
+ The HCFS URI of the jar file that contains the main class.
6838
+ """
6839
+ elif False:
6840
+ GdcSparkApplicationSparkApplicationConfigArgsDict: TypeAlias = Mapping[str, Any]
6841
+
6842
+ @pulumi.input_type
6843
+ class GdcSparkApplicationSparkApplicationConfigArgs:
6844
+ def __init__(__self__, *,
6845
+ archive_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6846
+ args: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6847
+ file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6848
+ jar_file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6849
+ main_class: Optional[pulumi.Input[str]] = None,
6850
+ main_jar_file_uri: Optional[pulumi.Input[str]] = None):
6851
+ """
6852
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] archive_uris: HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
6853
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] args: The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as `--conf`, since a collision can occur that causes an incorrect application submission.
6854
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] file_uris: HCFS URIs of files to be placed in the working directory of each executor.
6855
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] jar_file_uris: HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
6856
+ :param pulumi.Input[str] main_class: The name of the driver main class. The jar file that contains the class must be in the classpath or specified in `jar_file_uris`.
6857
+ :param pulumi.Input[str] main_jar_file_uri: The HCFS URI of the jar file that contains the main class.
6858
+ """
6859
+ if archive_uris is not None:
6860
+ pulumi.set(__self__, "archive_uris", archive_uris)
6861
+ if args is not None:
6862
+ pulumi.set(__self__, "args", args)
6863
+ if file_uris is not None:
6864
+ pulumi.set(__self__, "file_uris", file_uris)
6865
+ if jar_file_uris is not None:
6866
+ pulumi.set(__self__, "jar_file_uris", jar_file_uris)
6867
+ if main_class is not None:
6868
+ pulumi.set(__self__, "main_class", main_class)
6869
+ if main_jar_file_uri is not None:
6870
+ pulumi.set(__self__, "main_jar_file_uri", main_jar_file_uri)
6871
+
6872
+ @property
6873
+ @pulumi.getter(name="archiveUris")
6874
+ def archive_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6875
+ """
6876
+ HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
6877
+ """
6878
+ return pulumi.get(self, "archive_uris")
6879
+
6880
+ @archive_uris.setter
6881
+ def archive_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6882
+ pulumi.set(self, "archive_uris", value)
6883
+
6884
+ @property
6885
+ @pulumi.getter
6886
+ def args(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6887
+ """
6888
+ The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as `--conf`, since a collision can occur that causes an incorrect application submission.
6889
+ """
6890
+ return pulumi.get(self, "args")
6891
+
6892
+ @args.setter
6893
+ def args(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6894
+ pulumi.set(self, "args", value)
6895
+
6896
+ @property
6897
+ @pulumi.getter(name="fileUris")
6898
+ def file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6899
+ """
6900
+ HCFS URIs of files to be placed in the working directory of each executor.
6901
+ """
6902
+ return pulumi.get(self, "file_uris")
6903
+
6904
+ @file_uris.setter
6905
+ def file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6906
+ pulumi.set(self, "file_uris", value)
6907
+
6908
+ @property
6909
+ @pulumi.getter(name="jarFileUris")
6910
+ def jar_file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
6911
+ """
6912
+ HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
6913
+ """
6914
+ return pulumi.get(self, "jar_file_uris")
6915
+
6916
+ @jar_file_uris.setter
6917
+ def jar_file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
6918
+ pulumi.set(self, "jar_file_uris", value)
6919
+
6920
+ @property
6921
+ @pulumi.getter(name="mainClass")
6922
+ def main_class(self) -> Optional[pulumi.Input[str]]:
6923
+ """
6924
+ The name of the driver main class. The jar file that contains the class must be in the classpath or specified in `jar_file_uris`.
6925
+ """
6926
+ return pulumi.get(self, "main_class")
6927
+
6928
+ @main_class.setter
6929
+ def main_class(self, value: Optional[pulumi.Input[str]]):
6930
+ pulumi.set(self, "main_class", value)
6931
+
6932
+ @property
6933
+ @pulumi.getter(name="mainJarFileUri")
6934
+ def main_jar_file_uri(self) -> Optional[pulumi.Input[str]]:
6935
+ """
6936
+ The HCFS URI of the jar file that contains the main class.
6937
+ """
6938
+ return pulumi.get(self, "main_jar_file_uri")
6939
+
6940
+ @main_jar_file_uri.setter
6941
+ def main_jar_file_uri(self, value: Optional[pulumi.Input[str]]):
6942
+ pulumi.set(self, "main_jar_file_uri", value)
6943
+
6944
+
6945
+ if not MYPY:
6946
+ class GdcSparkApplicationSparkRApplicationConfigArgsDict(TypedDict):
6947
+ main_r_file_uri: pulumi.Input[str]
6948
+ """
6949
+ The HCFS URI of the main R file to use as the driver. Must be a .R file.
6950
+ """
6951
+ archive_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6952
+ """
6953
+ HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
6954
+ """
6955
+ args: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6956
+ """
6957
+ The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
6958
+ """
6959
+ file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
6960
+ """
6961
+ HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
6962
+ """
6963
+ elif False:
6964
+ GdcSparkApplicationSparkRApplicationConfigArgsDict: TypeAlias = Mapping[str, Any]
6965
+
6966
+ @pulumi.input_type
6967
+ class GdcSparkApplicationSparkRApplicationConfigArgs:
6968
+ def __init__(__self__, *,
6969
+ main_r_file_uri: pulumi.Input[str],
6970
+ archive_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6971
+ args: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
6972
+ file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
6973
+ """
6974
+ :param pulumi.Input[str] main_r_file_uri: The HCFS URI of the main R file to use as the driver. Must be a .R file.
6975
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] archive_uris: HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
6976
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] args: The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
6977
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] file_uris: HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
6978
+ """
6979
+ pulumi.set(__self__, "main_r_file_uri", main_r_file_uri)
6980
+ if archive_uris is not None:
6981
+ pulumi.set(__self__, "archive_uris", archive_uris)
6982
+ if args is not None:
6983
+ pulumi.set(__self__, "args", args)
6984
+ if file_uris is not None:
6985
+ pulumi.set(__self__, "file_uris", file_uris)
6986
+
6987
+ @property
6988
+ @pulumi.getter(name="mainRFileUri")
6989
+ def main_r_file_uri(self) -> pulumi.Input[str]:
6990
+ """
6991
+ The HCFS URI of the main R file to use as the driver. Must be a .R file.
6992
+ """
6993
+ return pulumi.get(self, "main_r_file_uri")
6994
+
6995
+ @main_r_file_uri.setter
6996
+ def main_r_file_uri(self, value: pulumi.Input[str]):
6997
+ pulumi.set(self, "main_r_file_uri", value)
6998
+
6999
+ @property
7000
+ @pulumi.getter(name="archiveUris")
7001
+ def archive_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
7002
+ """
7003
+ HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
7004
+ """
7005
+ return pulumi.get(self, "archive_uris")
7006
+
7007
+ @archive_uris.setter
7008
+ def archive_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
7009
+ pulumi.set(self, "archive_uris", value)
7010
+
7011
+ @property
7012
+ @pulumi.getter
7013
+ def args(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
7014
+ """
7015
+ The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
7016
+ """
7017
+ return pulumi.get(self, "args")
7018
+
7019
+ @args.setter
7020
+ def args(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
7021
+ pulumi.set(self, "args", value)
7022
+
7023
+ @property
7024
+ @pulumi.getter(name="fileUris")
7025
+ def file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
7026
+ """
7027
+ HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
7028
+ """
7029
+ return pulumi.get(self, "file_uris")
7030
+
7031
+ @file_uris.setter
7032
+ def file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
7033
+ pulumi.set(self, "file_uris", value)
7034
+
7035
+
7036
+ if not MYPY:
7037
+ class GdcSparkApplicationSparkSqlApplicationConfigArgsDict(TypedDict):
7038
+ jar_file_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]
7039
+ """
7040
+ HCFS URIs of jar files to be added to the Spark CLASSPATH.
7041
+ """
7042
+ query_file_uri: NotRequired[pulumi.Input[str]]
7043
+ """
7044
+ The HCFS URI of the script that contains SQL queries.
7045
+ """
7046
+ query_list: NotRequired[pulumi.Input['GdcSparkApplicationSparkSqlApplicationConfigQueryListArgsDict']]
7047
+ """
7048
+ Represents a list of queries.
7049
+ Structure is documented below.
7050
+ """
7051
+ script_variables: NotRequired[pulumi.Input[Mapping[str, pulumi.Input[str]]]]
7052
+ """
7053
+ Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
7054
+ """
7055
+ elif False:
7056
+ GdcSparkApplicationSparkSqlApplicationConfigArgsDict: TypeAlias = Mapping[str, Any]
7057
+
7058
+ @pulumi.input_type
7059
+ class GdcSparkApplicationSparkSqlApplicationConfigArgs:
7060
+ def __init__(__self__, *,
7061
+ jar_file_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
7062
+ query_file_uri: Optional[pulumi.Input[str]] = None,
7063
+ query_list: Optional[pulumi.Input['GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs']] = None,
7064
+ script_variables: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
7065
+ """
7066
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] jar_file_uris: HCFS URIs of jar files to be added to the Spark CLASSPATH.
7067
+ :param pulumi.Input[str] query_file_uri: The HCFS URI of the script that contains SQL queries.
7068
+ :param pulumi.Input['GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs'] query_list: Represents a list of queries.
7069
+ Structure is documented below.
7070
+ :param pulumi.Input[Mapping[str, pulumi.Input[str]]] script_variables: Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
7071
+ """
7072
+ if jar_file_uris is not None:
7073
+ pulumi.set(__self__, "jar_file_uris", jar_file_uris)
7074
+ if query_file_uri is not None:
7075
+ pulumi.set(__self__, "query_file_uri", query_file_uri)
7076
+ if query_list is not None:
7077
+ pulumi.set(__self__, "query_list", query_list)
7078
+ if script_variables is not None:
7079
+ pulumi.set(__self__, "script_variables", script_variables)
7080
+
7081
+ @property
7082
+ @pulumi.getter(name="jarFileUris")
7083
+ def jar_file_uris(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
7084
+ """
7085
+ HCFS URIs of jar files to be added to the Spark CLASSPATH.
7086
+ """
7087
+ return pulumi.get(self, "jar_file_uris")
7088
+
7089
+ @jar_file_uris.setter
7090
+ def jar_file_uris(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
7091
+ pulumi.set(self, "jar_file_uris", value)
7092
+
7093
+ @property
7094
+ @pulumi.getter(name="queryFileUri")
7095
+ def query_file_uri(self) -> Optional[pulumi.Input[str]]:
7096
+ """
7097
+ The HCFS URI of the script that contains SQL queries.
7098
+ """
7099
+ return pulumi.get(self, "query_file_uri")
7100
+
7101
+ @query_file_uri.setter
7102
+ def query_file_uri(self, value: Optional[pulumi.Input[str]]):
7103
+ pulumi.set(self, "query_file_uri", value)
7104
+
7105
+ @property
7106
+ @pulumi.getter(name="queryList")
7107
+ def query_list(self) -> Optional[pulumi.Input['GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs']]:
7108
+ """
7109
+ Represents a list of queries.
7110
+ Structure is documented below.
7111
+ """
7112
+ return pulumi.get(self, "query_list")
7113
+
7114
+ @query_list.setter
7115
+ def query_list(self, value: Optional[pulumi.Input['GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs']]):
7116
+ pulumi.set(self, "query_list", value)
7117
+
7118
+ @property
7119
+ @pulumi.getter(name="scriptVariables")
7120
+ def script_variables(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
7121
+ """
7122
+ Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`).
7123
+ """
7124
+ return pulumi.get(self, "script_variables")
7125
+
7126
+ @script_variables.setter
7127
+ def script_variables(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
7128
+ pulumi.set(self, "script_variables", value)
7129
+
7130
+
7131
+ if not MYPY:
7132
+ class GdcSparkApplicationSparkSqlApplicationConfigQueryListArgsDict(TypedDict):
7133
+ queries: pulumi.Input[Sequence[pulumi.Input[str]]]
7134
+ """
7135
+ The queries to run.
7136
+ """
7137
+ elif False:
7138
+ GdcSparkApplicationSparkSqlApplicationConfigQueryListArgsDict: TypeAlias = Mapping[str, Any]
7139
+
7140
+ @pulumi.input_type
7141
+ class GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs:
7142
+ def __init__(__self__, *,
7143
+ queries: pulumi.Input[Sequence[pulumi.Input[str]]]):
7144
+ """
7145
+ :param pulumi.Input[Sequence[pulumi.Input[str]]] queries: The queries to run.
7146
+ """
7147
+ pulumi.set(__self__, "queries", queries)
7148
+
7149
+ @property
7150
+ @pulumi.getter
7151
+ def queries(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
7152
+ """
7153
+ The queries to run.
7154
+ """
7155
+ return pulumi.get(self, "queries")
7156
+
7157
+ @queries.setter
7158
+ def queries(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
7159
+ pulumi.set(self, "queries", value)
7160
+
7161
+
6672
7162
  if not MYPY:
6673
7163
  class JobHadoopConfigArgsDict(TypedDict):
6674
7164
  archive_uris: NotRequired[pulumi.Input[Sequence[pulumi.Input[str]]]]