pulumi-gcp 7.34.0__py3-none-any.whl → 7.34.0a1722421695__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pulumi-gcp might be problematic. Click here for more details.

Files changed (78) hide show
  1. pulumi_gcp/__init__.py +0 -40
  2. pulumi_gcp/alloydb/_inputs.py +0 -174
  3. pulumi_gcp/alloydb/instance.py +0 -54
  4. pulumi_gcp/alloydb/outputs.py +0 -133
  5. pulumi_gcp/apigee/__init__.py +0 -2
  6. pulumi_gcp/apigee/instance.py +2 -2
  7. pulumi_gcp/apigee/nat_address.py +2 -2
  8. pulumi_gcp/apigee/organization.py +4 -4
  9. pulumi_gcp/apphub/service_project_attachment.py +11 -11
  10. pulumi_gcp/bigquery/_inputs.py +0 -36
  11. pulumi_gcp/bigquery/app_profile.py +0 -54
  12. pulumi_gcp/bigquery/outputs.py +0 -38
  13. pulumi_gcp/bigquery/reservation.py +4 -34
  14. pulumi_gcp/bigquery/table.py +21 -65
  15. pulumi_gcp/bigtable/table.py +26 -27
  16. pulumi_gcp/certificateauthority/authority.py +4 -4
  17. pulumi_gcp/cloudfunctions/function.py +0 -47
  18. pulumi_gcp/cloudfunctions/get_function.py +1 -11
  19. pulumi_gcp/cloudfunctionsv2/function.py +2 -2
  20. pulumi_gcp/cloudrun/_inputs.py +21 -24
  21. pulumi_gcp/cloudrun/outputs.py +24 -20
  22. pulumi_gcp/cloudrunv2/_inputs.py +0 -3
  23. pulumi_gcp/cloudrunv2/outputs.py +0 -4
  24. pulumi_gcp/compute/__init__.py +0 -2
  25. pulumi_gcp/compute/_inputs.py +328 -2333
  26. pulumi_gcp/compute/backend_service.py +0 -6
  27. pulumi_gcp/compute/outputs.py +7 -1466
  28. pulumi_gcp/compute/public_advertised_prefix.py +2 -30
  29. pulumi_gcp/compute/router_peer.py +0 -437
  30. pulumi_gcp/compute/service_attachment.py +14 -7
  31. pulumi_gcp/container/_inputs.py +18 -215
  32. pulumi_gcp/container/node_pool.py +14 -0
  33. pulumi_gcp/container/outputs.py +12 -226
  34. pulumi_gcp/databasemigrationservice/private_connection.py +6 -10
  35. pulumi_gcp/dataloss/_inputs.py +21 -707
  36. pulumi_gcp/dataloss/outputs.py +14 -588
  37. pulumi_gcp/datastore/data_store_index.py +12 -24
  38. pulumi_gcp/datastream/_inputs.py +3 -83
  39. pulumi_gcp/datastream/outputs.py +3 -51
  40. pulumi_gcp/datastream/stream.py +0 -170
  41. pulumi_gcp/firebase/database_instance.py +8 -8
  42. pulumi_gcp/firebase/hosting_site.py +8 -8
  43. pulumi_gcp/firestore/index.py +10 -10
  44. pulumi_gcp/gkeonprem/_inputs.py +78 -78
  45. pulumi_gcp/gkeonprem/outputs.py +52 -52
  46. pulumi_gcp/iap/client.py +4 -4
  47. pulumi_gcp/integrationconnectors/_inputs.py +30 -30
  48. pulumi_gcp/integrationconnectors/outputs.py +20 -20
  49. pulumi_gcp/netapp/volume.py +1 -1
  50. pulumi_gcp/networkconnectivity/_inputs.py +6 -3
  51. pulumi_gcp/networkconnectivity/hub.py +49 -60
  52. pulumi_gcp/networkconnectivity/outputs.py +4 -2
  53. pulumi_gcp/networkconnectivity/spoke.py +104 -159
  54. pulumi_gcp/networksecurity/tls_inspection_policy.py +2 -2
  55. pulumi_gcp/orgpolicy/policy.py +4 -4
  56. pulumi_gcp/projects/get_project_service.py +1 -11
  57. pulumi_gcp/projects/service.py +0 -68
  58. pulumi_gcp/projects/service_identity.py +2 -30
  59. pulumi_gcp/pubsub/subscription.py +6 -6
  60. pulumi_gcp/pulumi-plugin.json +1 -1
  61. pulumi_gcp/securesourcemanager/instance.py +4 -528
  62. pulumi_gcp/securitycenter/__init__.py +0 -1
  63. pulumi_gcp/sql/database_instance.py +2 -2
  64. pulumi_gcp/vertex/ai_feature_online_store_featureview.py +4 -4
  65. pulumi_gcp/vmwareengine/get_private_cloud.py +1 -21
  66. pulumi_gcp/vmwareengine/private_cloud.py +2 -121
  67. pulumi_gcp/workbench/_inputs.py +0 -77
  68. pulumi_gcp/workbench/instance.py +4 -18
  69. pulumi_gcp/workbench/outputs.py +1 -67
  70. {pulumi_gcp-7.34.0.dist-info → pulumi_gcp-7.34.0a1722421695.dist-info}/METADATA +1 -1
  71. {pulumi_gcp-7.34.0.dist-info → pulumi_gcp-7.34.0a1722421695.dist-info}/RECORD +73 -78
  72. pulumi_gcp/apigee/environment_keyvaluemaps.py +0 -370
  73. pulumi_gcp/apigee/environment_keyvaluemaps_entries.py +0 -440
  74. pulumi_gcp/compute/resize_request.py +0 -782
  75. pulumi_gcp/compute/router_route_policy.py +0 -616
  76. pulumi_gcp/securitycenter/v2_organization_mute_config.py +0 -673
  77. {pulumi_gcp-7.34.0.dist-info → pulumi_gcp-7.34.0a1722421695.dist-info}/WHEEL +0 -0
  78. {pulumi_gcp-7.34.0.dist-info → pulumi_gcp-7.34.0a1722421695.dist-info}/top_level.txt +0 -0
@@ -216,9 +216,7 @@ class DataStoreIndex(pulumi.CustomResource):
216
216
  properties: Optional[pulumi.Input[Sequence[pulumi.Input[Union['DataStoreIndexPropertyArgs', 'DataStoreIndexPropertyArgsDict']]]]] = None,
217
217
  __props__=None):
218
218
  """
219
- > **Warning:** `datastore_index` is deprecated and will be removed in a future major release. Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database. `firestore_index` supports both Firestore in Datastore Mode and Firestore Native indexes and supports both named and the (default) database.
220
-
221
- Describes a composite index for Firestore in Datastore Mode.
219
+ Describes a composite index for Cloud Datastore.
222
220
 
223
221
  To get more information about Index, see:
224
222
 
@@ -226,15 +224,11 @@ class DataStoreIndex(pulumi.CustomResource):
226
224
  * How-to Guides
227
225
  * [Official Documentation](https://cloud.google.com/datastore/docs/concepts/indexes)
228
226
 
229
- > **Warning:** `datastore_index` is deprecated and will be removed in a future major release.
230
- Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database.
231
- This resource creates a Datastore Index on a project that has already
232
- enabled a Datastore-compatible database. If you haven't already created it, you may
233
- create a `firestore.Database` resource with `location_id` set
234
- to your chosen location, and `type` set to `"DATASTORE_MODE"`.
235
- If you wish to use App Engine, you may instead create a `appengine.Application` resource with
236
- `database_type` set to `"CLOUD_DATASTORE_COMPATIBILITY"`.
237
- Your Datastore location will be the same as the App Engine location specified.
227
+ > **Warning:** This resource creates a Datastore Index on a project that has already
228
+ enabled a Datastore-compatible database. If you haven't already enabled
229
+ one, you can create a `appengine.Application` resource with
230
+ `database_type` set to `"CLOUD_DATASTORE_COMPATIBILITY"` to do so. Your
231
+ Datastore location will be the same as the App Engine location specified.
238
232
 
239
233
  ## Example Usage
240
234
 
@@ -311,9 +305,7 @@ class DataStoreIndex(pulumi.CustomResource):
311
305
  args: DataStoreIndexArgs,
312
306
  opts: Optional[pulumi.ResourceOptions] = None):
313
307
  """
314
- > **Warning:** `datastore_index` is deprecated and will be removed in a future major release. Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database. `firestore_index` supports both Firestore in Datastore Mode and Firestore Native indexes and supports both named and the (default) database.
315
-
316
- Describes a composite index for Firestore in Datastore Mode.
308
+ Describes a composite index for Cloud Datastore.
317
309
 
318
310
  To get more information about Index, see:
319
311
 
@@ -321,15 +313,11 @@ class DataStoreIndex(pulumi.CustomResource):
321
313
  * How-to Guides
322
314
  * [Official Documentation](https://cloud.google.com/datastore/docs/concepts/indexes)
323
315
 
324
- > **Warning:** `datastore_index` is deprecated and will be removed in a future major release.
325
- Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database.
326
- This resource creates a Datastore Index on a project that has already
327
- enabled a Datastore-compatible database. If you haven't already created it, you may
328
- create a `firestore.Database` resource with `location_id` set
329
- to your chosen location, and `type` set to `"DATASTORE_MODE"`.
330
- If you wish to use App Engine, you may instead create a `appengine.Application` resource with
331
- `database_type` set to `"CLOUD_DATASTORE_COMPATIBILITY"`.
332
- Your Datastore location will be the same as the App Engine location specified.
316
+ > **Warning:** This resource creates a Datastore Index on a project that has already
317
+ enabled a Datastore-compatible database. If you haven't already enabled
318
+ one, you can create a `appengine.Application` resource with
319
+ `database_type` set to `"CLOUD_DATASTORE_COMPATIBILITY"` to do so. Your
320
+ Datastore location will be the same as the App Engine location specified.
333
321
 
334
322
  ## Example Usage
335
323
 
@@ -77,10 +77,6 @@ __all__ = [
77
77
  'StreamDestinationConfigArgsDict',
78
78
  'StreamDestinationConfigBigqueryDestinationConfigArgs',
79
79
  'StreamDestinationConfigBigqueryDestinationConfigArgsDict',
80
- 'StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs',
81
- 'StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict',
82
- 'StreamDestinationConfigBigqueryDestinationConfigMergeArgs',
83
- 'StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict',
84
80
  'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs',
85
81
  'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgsDict',
86
82
  'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs',
@@ -2575,7 +2571,7 @@ if not MYPY:
2575
2571
  """
2576
2572
  bigquery_destination_config: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgsDict']]
2577
2573
  """
2578
- A configuration for how data should be loaded to Google BigQuery.
2574
+ A configuration for how data should be loaded to Cloud Storage.
2579
2575
  Structure is documented below.
2580
2576
  """
2581
2577
  gcs_destination_config: NotRequired[pulumi.Input['StreamDestinationConfigGcsDestinationConfigArgsDict']]
@@ -2594,7 +2590,7 @@ class StreamDestinationConfigArgs:
2594
2590
  gcs_destination_config: Optional[pulumi.Input['StreamDestinationConfigGcsDestinationConfigArgs']] = None):
2595
2591
  """
2596
2592
  :param pulumi.Input[str] destination_connection_profile: Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
2597
- :param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgs'] bigquery_destination_config: A configuration for how data should be loaded to Google BigQuery.
2593
+ :param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgs'] bigquery_destination_config: A configuration for how data should be loaded to Cloud Storage.
2598
2594
  Structure is documented below.
2599
2595
  :param pulumi.Input['StreamDestinationConfigGcsDestinationConfigArgs'] gcs_destination_config: A configuration for how data should be loaded to Cloud Storage.
2600
2596
  Structure is documented below.
@@ -2621,7 +2617,7 @@ class StreamDestinationConfigArgs:
2621
2617
  @pulumi.getter(name="bigqueryDestinationConfig")
2622
2618
  def bigquery_destination_config(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgs']]:
2623
2619
  """
2624
- A configuration for how data should be loaded to Google BigQuery.
2620
+ A configuration for how data should be loaded to Cloud Storage.
2625
2621
  Structure is documented below.
2626
2622
  """
2627
2623
  return pulumi.get(self, "bigquery_destination_config")
@@ -2646,12 +2642,6 @@ class StreamDestinationConfigArgs:
2646
2642
 
2647
2643
  if not MYPY:
2648
2644
  class StreamDestinationConfigBigqueryDestinationConfigArgsDict(TypedDict):
2649
- append_only: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict']]
2650
- """
2651
- AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
2652
- events) to a source table will be written to the destination Google BigQuery table, retaining the
2653
- historical state of the data.
2654
- """
2655
2645
  data_freshness: NotRequired[pulumi.Input[str]]
2656
2646
  """
2657
2647
  The guaranteed data freshness (in seconds) when querying tables created by the stream.
@@ -2659,12 +2649,6 @@ if not MYPY:
2659
2649
  will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost.
2660
2650
  A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
2661
2651
  """
2662
- merge: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict']]
2663
- """
2664
- Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
2665
- table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
2666
- in the source database. With Merge mode, no historical record of the change events is kept.
2667
- """
2668
2652
  single_target_dataset: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgsDict']]
2669
2653
  """
2670
2654
  A single target dataset to which all data will be streamed.
@@ -2681,52 +2665,26 @@ elif False:
2681
2665
  @pulumi.input_type
2682
2666
  class StreamDestinationConfigBigqueryDestinationConfigArgs:
2683
2667
  def __init__(__self__, *,
2684
- append_only: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs']] = None,
2685
2668
  data_freshness: Optional[pulumi.Input[str]] = None,
2686
- merge: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs']] = None,
2687
2669
  single_target_dataset: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs']] = None,
2688
2670
  source_hierarchy_datasets: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs']] = None):
2689
2671
  """
2690
- :param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs'] append_only: AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
2691
- events) to a source table will be written to the destination Google BigQuery table, retaining the
2692
- historical state of the data.
2693
2672
  :param pulumi.Input[str] data_freshness: The guaranteed data freshness (in seconds) when querying tables created by the stream.
2694
2673
  Editing this field will only affect new tables created in the future, but existing tables
2695
2674
  will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost.
2696
2675
  A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
2697
- :param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs'] merge: Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
2698
- table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
2699
- in the source database. With Merge mode, no historical record of the change events is kept.
2700
2676
  :param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs'] single_target_dataset: A single target dataset to which all data will be streamed.
2701
2677
  Structure is documented below.
2702
2678
  :param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs'] source_hierarchy_datasets: Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy.
2703
2679
  Structure is documented below.
2704
2680
  """
2705
- if append_only is not None:
2706
- pulumi.set(__self__, "append_only", append_only)
2707
2681
  if data_freshness is not None:
2708
2682
  pulumi.set(__self__, "data_freshness", data_freshness)
2709
- if merge is not None:
2710
- pulumi.set(__self__, "merge", merge)
2711
2683
  if single_target_dataset is not None:
2712
2684
  pulumi.set(__self__, "single_target_dataset", single_target_dataset)
2713
2685
  if source_hierarchy_datasets is not None:
2714
2686
  pulumi.set(__self__, "source_hierarchy_datasets", source_hierarchy_datasets)
2715
2687
 
2716
- @property
2717
- @pulumi.getter(name="appendOnly")
2718
- def append_only(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs']]:
2719
- """
2720
- AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
2721
- events) to a source table will be written to the destination Google BigQuery table, retaining the
2722
- historical state of the data.
2723
- """
2724
- return pulumi.get(self, "append_only")
2725
-
2726
- @append_only.setter
2727
- def append_only(self, value: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs']]):
2728
- pulumi.set(self, "append_only", value)
2729
-
2730
2688
  @property
2731
2689
  @pulumi.getter(name="dataFreshness")
2732
2690
  def data_freshness(self) -> Optional[pulumi.Input[str]]:
@@ -2742,20 +2700,6 @@ class StreamDestinationConfigBigqueryDestinationConfigArgs:
2742
2700
  def data_freshness(self, value: Optional[pulumi.Input[str]]):
2743
2701
  pulumi.set(self, "data_freshness", value)
2744
2702
 
2745
- @property
2746
- @pulumi.getter
2747
- def merge(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs']]:
2748
- """
2749
- Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
2750
- table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
2751
- in the source database. With Merge mode, no historical record of the change events is kept.
2752
- """
2753
- return pulumi.get(self, "merge")
2754
-
2755
- @merge.setter
2756
- def merge(self, value: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs']]):
2757
- pulumi.set(self, "merge", value)
2758
-
2759
2703
  @property
2760
2704
  @pulumi.getter(name="singleTargetDataset")
2761
2705
  def single_target_dataset(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs']]:
@@ -2783,30 +2727,6 @@ class StreamDestinationConfigBigqueryDestinationConfigArgs:
2783
2727
  pulumi.set(self, "source_hierarchy_datasets", value)
2784
2728
 
2785
2729
 
2786
- if not MYPY:
2787
- class StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict(TypedDict):
2788
- pass
2789
- elif False:
2790
- StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict: TypeAlias = Mapping[str, Any]
2791
-
2792
- @pulumi.input_type
2793
- class StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs:
2794
- def __init__(__self__):
2795
- pass
2796
-
2797
-
2798
- if not MYPY:
2799
- class StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict(TypedDict):
2800
- pass
2801
- elif False:
2802
- StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict: TypeAlias = Mapping[str, Any]
2803
-
2804
- @pulumi.input_type
2805
- class StreamDestinationConfigBigqueryDestinationConfigMergeArgs:
2806
- def __init__(__self__):
2807
- pass
2808
-
2809
-
2810
2730
  if not MYPY:
2811
2731
  class StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgsDict(TypedDict):
2812
2732
  dataset_id: pulumi.Input[str]
@@ -47,8 +47,6 @@ __all__ = [
47
47
  'StreamBackfillNone',
48
48
  'StreamDestinationConfig',
49
49
  'StreamDestinationConfigBigqueryDestinationConfig',
50
- 'StreamDestinationConfigBigqueryDestinationConfigAppendOnly',
51
- 'StreamDestinationConfigBigqueryDestinationConfigMerge',
52
50
  'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset',
53
51
  'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets',
54
52
  'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplate',
@@ -1925,7 +1923,7 @@ class StreamDestinationConfig(dict):
1925
1923
  gcs_destination_config: Optional['outputs.StreamDestinationConfigGcsDestinationConfig'] = None):
1926
1924
  """
1927
1925
  :param str destination_connection_profile: Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
1928
- :param 'StreamDestinationConfigBigqueryDestinationConfigArgs' bigquery_destination_config: A configuration for how data should be loaded to Google BigQuery.
1926
+ :param 'StreamDestinationConfigBigqueryDestinationConfigArgs' bigquery_destination_config: A configuration for how data should be loaded to Cloud Storage.
1929
1927
  Structure is documented below.
1930
1928
  :param 'StreamDestinationConfigGcsDestinationConfigArgs' gcs_destination_config: A configuration for how data should be loaded to Cloud Storage.
1931
1929
  Structure is documented below.
@@ -1948,7 +1946,7 @@ class StreamDestinationConfig(dict):
1948
1946
  @pulumi.getter(name="bigqueryDestinationConfig")
1949
1947
  def bigquery_destination_config(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfig']:
1950
1948
  """
1951
- A configuration for how data should be loaded to Google BigQuery.
1949
+ A configuration for how data should be loaded to Cloud Storage.
1952
1950
  Structure is documented below.
1953
1951
  """
1954
1952
  return pulumi.get(self, "bigquery_destination_config")
@@ -1968,9 +1966,7 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
1968
1966
  @staticmethod
1969
1967
  def __key_warning(key: str):
1970
1968
  suggest = None
1971
- if key == "appendOnly":
1972
- suggest = "append_only"
1973
- elif key == "dataFreshness":
1969
+ if key == "dataFreshness":
1974
1970
  suggest = "data_freshness"
1975
1971
  elif key == "singleTargetDataset":
1976
1972
  suggest = "single_target_dataset"
@@ -1989,48 +1985,26 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
1989
1985
  return super().get(key, default)
1990
1986
 
1991
1987
  def __init__(__self__, *,
1992
- append_only: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigAppendOnly'] = None,
1993
1988
  data_freshness: Optional[str] = None,
1994
- merge: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigMerge'] = None,
1995
1989
  single_target_dataset: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset'] = None,
1996
1990
  source_hierarchy_datasets: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets'] = None):
1997
1991
  """
1998
- :param 'StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs' append_only: AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
1999
- events) to a source table will be written to the destination Google BigQuery table, retaining the
2000
- historical state of the data.
2001
1992
  :param str data_freshness: The guaranteed data freshness (in seconds) when querying tables created by the stream.
2002
1993
  Editing this field will only affect new tables created in the future, but existing tables
2003
1994
  will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost.
2004
1995
  A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
2005
- :param 'StreamDestinationConfigBigqueryDestinationConfigMergeArgs' merge: Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
2006
- table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
2007
- in the source database. With Merge mode, no historical record of the change events is kept.
2008
1996
  :param 'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs' single_target_dataset: A single target dataset to which all data will be streamed.
2009
1997
  Structure is documented below.
2010
1998
  :param 'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs' source_hierarchy_datasets: Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy.
2011
1999
  Structure is documented below.
2012
2000
  """
2013
- if append_only is not None:
2014
- pulumi.set(__self__, "append_only", append_only)
2015
2001
  if data_freshness is not None:
2016
2002
  pulumi.set(__self__, "data_freshness", data_freshness)
2017
- if merge is not None:
2018
- pulumi.set(__self__, "merge", merge)
2019
2003
  if single_target_dataset is not None:
2020
2004
  pulumi.set(__self__, "single_target_dataset", single_target_dataset)
2021
2005
  if source_hierarchy_datasets is not None:
2022
2006
  pulumi.set(__self__, "source_hierarchy_datasets", source_hierarchy_datasets)
2023
2007
 
2024
- @property
2025
- @pulumi.getter(name="appendOnly")
2026
- def append_only(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigAppendOnly']:
2027
- """
2028
- AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
2029
- events) to a source table will be written to the destination Google BigQuery table, retaining the
2030
- historical state of the data.
2031
- """
2032
- return pulumi.get(self, "append_only")
2033
-
2034
2008
  @property
2035
2009
  @pulumi.getter(name="dataFreshness")
2036
2010
  def data_freshness(self) -> Optional[str]:
@@ -2042,16 +2016,6 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
2042
2016
  """
2043
2017
  return pulumi.get(self, "data_freshness")
2044
2018
 
2045
- @property
2046
- @pulumi.getter
2047
- def merge(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigMerge']:
2048
- """
2049
- Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
2050
- table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
2051
- in the source database. With Merge mode, no historical record of the change events is kept.
2052
- """
2053
- return pulumi.get(self, "merge")
2054
-
2055
2019
  @property
2056
2020
  @pulumi.getter(name="singleTargetDataset")
2057
2021
  def single_target_dataset(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset']:
@@ -2071,18 +2035,6 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
2071
2035
  return pulumi.get(self, "source_hierarchy_datasets")
2072
2036
 
2073
2037
 
2074
- @pulumi.output_type
2075
- class StreamDestinationConfigBigqueryDestinationConfigAppendOnly(dict):
2076
- def __init__(__self__):
2077
- pass
2078
-
2079
-
2080
- @pulumi.output_type
2081
- class StreamDestinationConfigBigqueryDestinationConfigMerge(dict):
2082
- def __init__(__self__):
2083
- pass
2084
-
2085
-
2086
2038
  @pulumi.output_type
2087
2039
  class StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset(dict):
2088
2040
  @staticmethod
@@ -1117,91 +1117,6 @@ class Stream(pulumi.CustomResource):
1117
1117
  backfill_none={},
1118
1118
  opts = pulumi.ResourceOptions(depends_on=[bigquery_key_user]))
1119
1119
  ```
1120
- ### Datastream Stream Bigquery Append Only
1121
-
1122
- ```python
1123
- import pulumi
1124
- import pulumi_gcp as gcp
1125
- import pulumi_random as random
1126
-
1127
- project = gcp.organizations.get_project()
1128
- instance = gcp.sql.DatabaseInstance("instance",
1129
- name="my-instance",
1130
- database_version="MYSQL_8_0",
1131
- region="us-central1",
1132
- settings={
1133
- "tier": "db-f1-micro",
1134
- "backup_configuration": {
1135
- "enabled": True,
1136
- "binary_log_enabled": True,
1137
- },
1138
- "ip_configuration": {
1139
- "authorized_networks": [
1140
- {
1141
- "value": "34.71.242.81",
1142
- },
1143
- {
1144
- "value": "34.72.28.29",
1145
- },
1146
- {
1147
- "value": "34.67.6.157",
1148
- },
1149
- {
1150
- "value": "34.67.234.134",
1151
- },
1152
- {
1153
- "value": "34.72.239.218",
1154
- },
1155
- ],
1156
- },
1157
- },
1158
- deletion_protection=True)
1159
- db = gcp.sql.Database("db",
1160
- instance=instance.name,
1161
- name="db")
1162
- pwd = random.RandomPassword("pwd",
1163
- length=16,
1164
- special=False)
1165
- user = gcp.sql.User("user",
1166
- name="user",
1167
- instance=instance.name,
1168
- host="%",
1169
- password=pwd.result)
1170
- source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
1171
- display_name="Source connection profile",
1172
- location="us-central1",
1173
- connection_profile_id="source-profile",
1174
- mysql_profile={
1175
- "hostname": instance.public_ip_address,
1176
- "username": user.name,
1177
- "password": user.password,
1178
- })
1179
- destination_connection_profile = gcp.datastream.ConnectionProfile("destination_connection_profile",
1180
- display_name="Connection profile",
1181
- location="us-central1",
1182
- connection_profile_id="destination-profile",
1183
- bigquery_profile={})
1184
- default = gcp.datastream.Stream("default",
1185
- stream_id="my-stream",
1186
- location="us-central1",
1187
- display_name="my stream",
1188
- source_config={
1189
- "source_connection_profile": source_connection_profile.id,
1190
- "mysql_source_config": {},
1191
- },
1192
- destination_config={
1193
- "destination_connection_profile": destination_connection_profile.id,
1194
- "bigquery_destination_config": {
1195
- "source_hierarchy_datasets": {
1196
- "dataset_template": {
1197
- "location": "us-central1",
1198
- },
1199
- },
1200
- "append_only": {},
1201
- },
1202
- },
1203
- backfill_none={})
1204
- ```
1205
1120
 
1206
1121
  ## Import
1207
1122
 
@@ -1863,91 +1778,6 @@ class Stream(pulumi.CustomResource):
1863
1778
  backfill_none={},
1864
1779
  opts = pulumi.ResourceOptions(depends_on=[bigquery_key_user]))
1865
1780
  ```
1866
- ### Datastream Stream Bigquery Append Only
1867
-
1868
- ```python
1869
- import pulumi
1870
- import pulumi_gcp as gcp
1871
- import pulumi_random as random
1872
-
1873
- project = gcp.organizations.get_project()
1874
- instance = gcp.sql.DatabaseInstance("instance",
1875
- name="my-instance",
1876
- database_version="MYSQL_8_0",
1877
- region="us-central1",
1878
- settings={
1879
- "tier": "db-f1-micro",
1880
- "backup_configuration": {
1881
- "enabled": True,
1882
- "binary_log_enabled": True,
1883
- },
1884
- "ip_configuration": {
1885
- "authorized_networks": [
1886
- {
1887
- "value": "34.71.242.81",
1888
- },
1889
- {
1890
- "value": "34.72.28.29",
1891
- },
1892
- {
1893
- "value": "34.67.6.157",
1894
- },
1895
- {
1896
- "value": "34.67.234.134",
1897
- },
1898
- {
1899
- "value": "34.72.239.218",
1900
- },
1901
- ],
1902
- },
1903
- },
1904
- deletion_protection=True)
1905
- db = gcp.sql.Database("db",
1906
- instance=instance.name,
1907
- name="db")
1908
- pwd = random.RandomPassword("pwd",
1909
- length=16,
1910
- special=False)
1911
- user = gcp.sql.User("user",
1912
- name="user",
1913
- instance=instance.name,
1914
- host="%",
1915
- password=pwd.result)
1916
- source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
1917
- display_name="Source connection profile",
1918
- location="us-central1",
1919
- connection_profile_id="source-profile",
1920
- mysql_profile={
1921
- "hostname": instance.public_ip_address,
1922
- "username": user.name,
1923
- "password": user.password,
1924
- })
1925
- destination_connection_profile = gcp.datastream.ConnectionProfile("destination_connection_profile",
1926
- display_name="Connection profile",
1927
- location="us-central1",
1928
- connection_profile_id="destination-profile",
1929
- bigquery_profile={})
1930
- default = gcp.datastream.Stream("default",
1931
- stream_id="my-stream",
1932
- location="us-central1",
1933
- display_name="my stream",
1934
- source_config={
1935
- "source_connection_profile": source_connection_profile.id,
1936
- "mysql_source_config": {},
1937
- },
1938
- destination_config={
1939
- "destination_connection_profile": destination_connection_profile.id,
1940
- "bigquery_destination_config": {
1941
- "source_hierarchy_datasets": {
1942
- "dataset_template": {
1943
- "location": "us-central1",
1944
- },
1945
- },
1946
- "append_only": {},
1947
- },
1948
- },
1949
- backfill_none={})
1950
- ```
1951
1781
 
1952
1782
  ## Import
1953
1783
 
@@ -145,8 +145,8 @@ class _DatabaseInstanceState:
145
145
 
146
146
 
147
147
  - - -
148
- :param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in
149
- the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
148
+ :param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in the
149
+ format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
150
150
  PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
151
151
  Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
152
152
  :param pulumi.Input[str] project: The ID of the project in which the resource belongs.
@@ -223,8 +223,8 @@ class _DatabaseInstanceState:
223
223
  @pulumi.getter
224
224
  def name(self) -> Optional[pulumi.Input[str]]:
225
225
  """
226
- The fully-qualified resource name of the Firebase Realtime Database, in
227
- the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
226
+ The fully-qualified resource name of the Firebase Realtime Database, in the
227
+ format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
228
228
  PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
229
229
  Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
230
230
  """
@@ -564,8 +564,8 @@ class DatabaseInstance(pulumi.CustomResource):
564
564
 
565
565
 
566
566
  - - -
567
- :param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in
568
- the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
567
+ :param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in the
568
+ format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
569
569
  PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
570
570
  Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
571
571
  :param pulumi.Input[str] project: The ID of the project in which the resource belongs.
@@ -627,8 +627,8 @@ class DatabaseInstance(pulumi.CustomResource):
627
627
  @pulumi.getter
628
628
  def name(self) -> pulumi.Output[str]:
629
629
  """
630
- The fully-qualified resource name of the Firebase Realtime Database, in
631
- the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
630
+ The fully-qualified resource name of the Firebase Realtime Database, in the
631
+ format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
632
632
  PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
633
633
  Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
634
634
  """
@@ -93,8 +93,8 @@ class _HostingSiteState:
93
93
  :param pulumi.Input[str] app_id: Optional. The [ID of a Web App](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects.webApps#WebApp.FIELDS.app_id)
94
94
  associated with the Hosting site.
95
95
  :param pulumi.Input[str] default_url: The default URL for the site in the form of https://{name}.web.app
96
- :param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in
97
- the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
96
+ :param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in the
97
+ format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
98
98
  Firebase project's
99
99
  [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
100
100
  [`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
@@ -146,8 +146,8 @@ class _HostingSiteState:
146
146
  @pulumi.getter
147
147
  def name(self) -> Optional[pulumi.Input[str]]:
148
148
  """
149
- Output only. The fully-qualified resource name of the Hosting site, in
150
- the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
149
+ Output only. The fully-qualified resource name of the Hosting site, in the
150
+ format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
151
151
  Firebase project's
152
152
  [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
153
153
  [`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
@@ -386,8 +386,8 @@ class HostingSite(pulumi.CustomResource):
386
386
  :param pulumi.Input[str] app_id: Optional. The [ID of a Web App](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects.webApps#WebApp.FIELDS.app_id)
387
387
  associated with the Hosting site.
388
388
  :param pulumi.Input[str] default_url: The default URL for the site in the form of https://{name}.web.app
389
- :param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in
390
- the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
389
+ :param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in the
390
+ format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
391
391
  Firebase project's
392
392
  [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
393
393
  [`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
@@ -431,8 +431,8 @@ class HostingSite(pulumi.CustomResource):
431
431
  @pulumi.getter
432
432
  def name(self) -> pulumi.Output[str]:
433
433
  """
434
- Output only. The fully-qualified resource name of the Hosting site, in
435
- the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
434
+ Output only. The fully-qualified resource name of the Hosting site, in the
435
+ format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
436
436
  Firebase project's
437
437
  [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
438
438
  [`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).