google-apis-dataplex_v1 0.68.0 → 0.70.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c4c28c5e6461b0a27679a78aef047130cb7c37e54ec67d7b40bb60b713a58611
4
- data.tar.gz: e9a0503ffc779a3c88cf6e949bf2df398fd0a29c60970913cf1b3d99ea15e854
3
+ metadata.gz: eea6e567cc3ea8be01e5bfe698578a284eda9c87ab519b4a902e231059269098
4
+ data.tar.gz: 54756c25ccb87b57927179ab9ffa3320e1440a697402c060b7818d3547075640
5
5
  SHA512:
6
- metadata.gz: 43e7c84107313ad60bd99eb4bfdf4d3cf308d4b628af3ddbd59da2d869c7501d430e54297ada5490cf5748a48b7f6ace70c8ffabdcfe5e31fb94c2f07135d619
7
- data.tar.gz: e95594241315ea47ad3efe258b71a8f102ab8a90a9c594653c806a2035b45d5a9eceb83be2111c23e413ce1332016035e6f9916bcc7511e7ebca7c5d296f6572
6
+ metadata.gz: ed18bcefe321fb69396ff32d29f4ee753e22f7a12ba7f87dbd6e2e758185dac36184e8dc1566c511f76baf48598c81a3abbd360d4d5d2f4c72edacc751f15b5f
7
+ data.tar.gz: fbdb77ff63859ebe5a5cad1bb051386ef9df2918873956bf880da4ca48629eebdf621ce92a6470a4b0963e0e6d2eec8788702a5f4360b21f9ab83bf22f88d1c3
data/CHANGELOG.md CHANGED
@@ -1,5 +1,14 @@
1
1
  # Release history for google-apis-dataplex_v1
2
2
 
3
+ ### v0.70.0 (2025-03-16)
4
+
5
+ * Regenerated from discovery document revision 20250307
6
+
7
+ ### v0.69.0 (2025-03-02)
8
+
9
+ * Regenerated from discovery document revision 20250222
10
+ * Regenerated using generator version 0.16.0
11
+
3
12
  ### v0.68.0 (2024-12-15)
4
13
 
5
14
  * Regenerated from discovery document revision 20241206
@@ -1169,6 +1169,37 @@ module Google
1169
1169
  end
1170
1170
  end
1171
1171
 
1172
+ # Payload associated with Business Glossary related log events.
1173
+ class GoogleCloudDataplexV1BusinessGlossaryEvent
1174
+ include Google::Apis::Core::Hashable
1175
+
1176
+ # The type of the event.
1177
+ # Corresponds to the JSON property `eventType`
1178
+ # @return [String]
1179
+ attr_accessor :event_type
1180
+
1181
+ # The log message.
1182
+ # Corresponds to the JSON property `message`
1183
+ # @return [String]
1184
+ attr_accessor :message
1185
+
1186
+ # Name of the resource.
1187
+ # Corresponds to the JSON property `resource`
1188
+ # @return [String]
1189
+ attr_accessor :resource
1190
+
1191
+ def initialize(**args)
1192
+ update!(**args)
1193
+ end
1194
+
1195
+ # Update properties of this object
1196
+ def update!(**args)
1197
+ @event_type = args[:event_type] if args.key?(:event_type)
1198
+ @message = args[:message] if args.key?(:message)
1199
+ @resource = args[:resource] if args.key?(:resource)
1200
+ end
1201
+ end
1202
+
1172
1203
  # Cancel task jobs.
1173
1204
  class GoogleCloudDataplexV1CancelJobRequest
1174
1205
  include Google::Apis::Core::Hashable
@@ -1584,9 +1615,7 @@ module Google
1584
1615
  class GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing
1585
1616
  include Google::Apis::Core::Hashable
1586
1617
 
1587
- # Output only. The BigQuery dataset to publish to. It takes the form projects/`
1588
- # project_id`/datasets/`dataset_id`. If not set, the service creates a default
1589
- # publishing dataset.
1618
+ # Output only. The BigQuery dataset the discovered tables are published to.
1590
1619
  # Corresponds to the JSON property `dataset`
1591
1620
  # @return [String]
1592
1621
  attr_accessor :dataset
@@ -1637,6 +1666,26 @@ module Google
1637
1666
  # @return [String]
1638
1667
  attr_accessor :connection
1639
1668
 
1669
+ # Optional. The location of the BigQuery dataset to publish BigLake external or
1670
+ # non-BigLake external tables to. 1. If the Cloud Storage bucket is located in a
1671
+ # multi-region bucket, then BigQuery dataset can be in the same multi-region
1672
+ # bucket or any single region that is included in the same multi-region bucket.
1673
+ # The datascan can be created in any single region that is included in the same
1674
+ # multi-region bucket 2. If the Cloud Storage bucket is located in a dual-region
1675
+ # bucket, then BigQuery dataset can be located in regions that are included in
1676
+ # the dual-region bucket, or in a multi-region that includes the dual-region.
1677
+ # The datascan can be created in any single region that is included in the same
1678
+ # dual-region bucket. 3. If the Cloud Storage bucket is located in a single
1679
+ # region, then BigQuery dataset can be in the same single region or any multi-
1680
+ # region bucket that includes the same single region. The datascan will be
1681
+ # created in the same single region as the bucket. 4. If the BigQuery dataset is
1682
+ # in single region, it must be in the same single region as the datascan.For
1683
+ # supported values, refer to https://cloud.google.com/bigquery/docs/locations#
1684
+ # supported_locations.
1685
+ # Corresponds to the JSON property `location`
1686
+ # @return [String]
1687
+ attr_accessor :location
1688
+
1640
1689
  # Optional. Determines whether to publish discovered tables as BigLake external
1641
1690
  # tables or non-BigLake external tables.
1642
1691
  # Corresponds to the JSON property `tableType`
@@ -1650,6 +1699,7 @@ module Google
1650
1699
  # Update properties of this object
1651
1700
  def update!(**args)
1652
1701
  @connection = args[:connection] if args.key?(:connection)
1702
+ @location = args[:location] if args.key?(:location)
1653
1703
  @table_type = args[:table_type] if args.key?(:table_type)
1654
1704
  end
1655
1705
  end
@@ -2164,8 +2214,9 @@ module Google
2164
2214
  attr_accessor :post_scan_actions
2165
2215
 
2166
2216
  # Optional. A filter applied to all rows in a single DataScan job. The filter
2167
- # needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL
2168
- # syntax. Example: col1 >= 0 AND col2 < 10
2217
+ # needs to be a valid SQL expression for a WHERE clause in GoogleSQL syntax (
2218
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#
2219
+ # where_clause).Example: col1 >= 0 AND col2 < 10
2169
2220
  # Corresponds to the JSON property `rowFilter`
2170
2221
  # @return [String]
2171
2222
  attr_accessor :row_filter
@@ -2503,8 +2554,9 @@ module Google
2503
2554
  attr_accessor :regex_expectation
2504
2555
 
2505
2556
  # Evaluates whether each row passes the specified condition.The SQL expression
2506
- # needs to use BigQuery standard SQL syntax and should produce a boolean value
2507
- # per row as the result.Example: col1 >= 0 AND col2 < 10
2557
+ # needs to use GoogleSQL syntax (https://cloud.google.com/bigquery/docs/
2558
+ # reference/standard-sql/query-syntax) and should produce a boolean value per
2559
+ # row as the result.Example: col1 >= 0 AND col2 < 10
2508
2560
  # Corresponds to the JSON property `rowConditionExpectation`
2509
2561
  # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualityRuleRowConditionExpectation]
2510
2562
  attr_accessor :row_condition_expectation
@@ -2516,10 +2568,11 @@ module Google
2516
2568
 
2517
2569
  # A SQL statement that is evaluated to return rows that match an invalid state.
2518
2570
  # If any rows are are returned, this rule fails.The SQL statement must use
2519
- # BigQuery standard SQL syntax, and must not contain any semicolons.You can use
2520
- # the data reference parameter $`data()` to reference the source table with all
2521
- # of its precondition filters applied. Examples of precondition filters include
2522
- # row filters, incremental data filters, and sampling. For more information, see
2571
+ # GoogleSQL syntax (https://cloud.google.com/bigquery/docs/reference/standard-
2572
+ # sql/query-syntax), and must not contain any semicolons.You can use the data
2573
+ # reference parameter $`data()` to reference the source table with all of its
2574
+ # precondition filters applied. Examples of precondition filters include row
2575
+ # filters, incremental data filters, and sampling. For more information, see
2523
2576
  # Data reference parameter (https://cloud.google.com/dataplex/docs/auto-data-
2524
2577
  # quality-overview#data-reference-parameter).Example: SELECT * FROM $`data()`
2525
2578
  # WHERE price < 0
@@ -2540,8 +2593,9 @@ module Google
2540
2593
  alias_method :suspended?, :suspended
2541
2594
 
2542
2595
  # Evaluates whether the provided expression is true.The SQL expression needs to
2543
- # use BigQuery standard SQL syntax and should produce a scalar boolean result.
2544
- # Example: MIN(col1) >= 0
2596
+ # use GoogleSQL syntax (https://cloud.google.com/bigquery/docs/reference/
2597
+ # standard-sql/query-syntax) and should produce a scalar boolean result.Example:
2598
+ # MIN(col1) >= 0
2545
2599
  # Corresponds to the JSON property `tableConditionExpectation`
2546
2600
  # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataQualityRuleTableConditionExpectation]
2547
2601
  attr_accessor :table_condition_expectation
@@ -2674,6 +2728,7 @@ module Google
2674
2728
  # row-level type rules.Evaluated count can be configured to either include all
2675
2729
  # rows (default) - with null rows automatically failing rule evaluation, or
2676
2730
  # exclude null rows from the evaluated_count, by setting ignore_nulls = true.
2731
+ # This field is not set for rule SqlAssertion.
2677
2732
  # Corresponds to the JSON property `evaluatedCount`
2678
2733
  # @return [Fixnum]
2679
2734
  attr_accessor :evaluated_count
@@ -2701,8 +2756,7 @@ module Google
2701
2756
  attr_accessor :passed
2702
2757
  alias_method :passed?, :passed
2703
2758
 
2704
- # The number of rows which passed a rule evaluation.This field is only valid for
2705
- # row-level type rules.
2759
+ # This field is not set for rule SqlAssertion.
2706
2760
  # Corresponds to the JSON property `passedCount`
2707
2761
  # @return [Fixnum]
2708
2762
  attr_accessor :passed_count
@@ -2730,8 +2784,9 @@ module Google
2730
2784
  end
2731
2785
 
2732
2786
  # Evaluates whether each row passes the specified condition.The SQL expression
2733
- # needs to use BigQuery standard SQL syntax and should produce a boolean value
2734
- # per row as the result.Example: col1 >= 0 AND col2 < 10
2787
+ # needs to use GoogleSQL syntax (https://cloud.google.com/bigquery/docs/
2788
+ # reference/standard-sql/query-syntax) and should produce a boolean value per
2789
+ # row as the result.Example: col1 >= 0 AND col2 < 10
2735
2790
  class GoogleCloudDataplexV1DataQualityRuleRowConditionExpectation
2736
2791
  include Google::Apis::Core::Hashable
2737
2792
 
@@ -2771,10 +2826,11 @@ module Google
2771
2826
 
2772
2827
  # A SQL statement that is evaluated to return rows that match an invalid state.
2773
2828
  # If any rows are are returned, this rule fails.The SQL statement must use
2774
- # BigQuery standard SQL syntax, and must not contain any semicolons.You can use
2775
- # the data reference parameter $`data()` to reference the source table with all
2776
- # of its precondition filters applied. Examples of precondition filters include
2777
- # row filters, incremental data filters, and sampling. For more information, see
2829
+ # GoogleSQL syntax (https://cloud.google.com/bigquery/docs/reference/standard-
2830
+ # sql/query-syntax), and must not contain any semicolons.You can use the data
2831
+ # reference parameter $`data()` to reference the source table with all of its
2832
+ # precondition filters applied. Examples of precondition filters include row
2833
+ # filters, incremental data filters, and sampling. For more information, see
2778
2834
  # Data reference parameter (https://cloud.google.com/dataplex/docs/auto-data-
2779
2835
  # quality-overview#data-reference-parameter).Example: SELECT * FROM $`data()`
2780
2836
  # WHERE price < 0
@@ -2849,8 +2905,9 @@ module Google
2849
2905
  end
2850
2906
 
2851
2907
  # Evaluates whether the provided expression is true.The SQL expression needs to
2852
- # use BigQuery standard SQL syntax and should produce a scalar boolean result.
2853
- # Example: MIN(col1) >= 0
2908
+ # use GoogleSQL syntax (https://cloud.google.com/bigquery/docs/reference/
2909
+ # standard-sql/query-syntax) and should produce a scalar boolean result.Example:
2910
+ # MIN(col1) >= 0
2854
2911
  class GoogleCloudDataplexV1DataQualityRuleTableConditionExpectation
2855
2912
  include Google::Apis::Core::Hashable
2856
2913
 
@@ -2987,8 +3044,9 @@ module Google
2987
3044
  attr_accessor :post_scan_actions
2988
3045
 
2989
3046
  # Optional. A filter applied to all rows in a single DataScan job. The filter
2990
- # needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL
2991
- # syntax. Example: col1 >= 0 AND col2 < 10
3047
+ # needs to be a valid SQL expression for a WHERE clause in GoogleSQL syntax (
3048
+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#
3049
+ # where_clause).Example: col1 >= 0 AND col2 < 10
2992
3050
  # Corresponds to the JSON property `rowFilter`
2993
3051
  # @return [String]
2994
3052
  attr_accessor :row_filter
@@ -3177,11 +3235,16 @@ module Google
3177
3235
  end
3178
3236
 
3179
3237
  # Represents a user-visible job which provides the insights for the related data
3180
- # source.For example: Data Quality: generates queries based on the rules and
3181
- # runs against the data to get data quality check results. Data Profile:
3182
- # analyzes the data in table(s) and generates insights about the structure,
3183
- # content and relationships (such as null percent, cardinality, min/max/mean,
3184
- # etc).
3238
+ # source.For example: Data quality: generates queries based on the rules and
3239
+ # runs against the data to get data quality check results. For more information,
3240
+ # see Auto data quality overview (https://cloud.google.com/dataplex/docs/auto-
3241
+ # data-quality-overview). Data profile: analyzes the data in tables and
3242
+ # generates insights about the structure, content and relationships (such as
3243
+ # null percent, cardinality, min/max/mean, etc). For more information, see About
3244
+ # data profiling (https://cloud.google.com/dataplex/docs/data-profiling-overview)
3245
+ # . Data discovery: scans data in Cloud Storage buckets to extract and then
3246
+ # catalog metadata. For more information, see Discover and catalog Cloud Storage
3247
+ # data (https://cloud.google.com/bigquery/docs/automatic-discovery).
3185
3248
  class GoogleCloudDataplexV1DataScan
3186
3249
  include Google::Apis::Core::Hashable
3187
3250
 
@@ -4327,7 +4390,9 @@ module Google
4327
4390
  # @return [String]
4328
4391
  attr_accessor :name
4329
4392
 
4330
- # Optional. Immutable. The resource name of the parent entry.
4393
+ # Optional. Immutable. The resource name of the parent entry, in the format
4394
+ # projects/`project_id_or_number`/locations/`location_id`/entryGroups/`
4395
+ # entry_group_id`/entries/`entry_id`.
4331
4396
  # Corresponds to the JSON property `parentEntry`
4332
4397
  # @return [String]
4333
4398
  attr_accessor :parent_entry
@@ -5044,13 +5109,14 @@ module Google
5044
5109
  # The aspects to modify. Supports the following syntaxes: `aspect_type_reference`
5045
5110
  # : matches aspects that belong to the specified aspect type and are attached
5046
5111
  # directly to the entry. `aspect_type_reference`@`path`: matches aspects that
5047
- # belong to the specified aspect type and path. @* : matches aspects of the
5048
- # given type for all paths. *@path : matches aspects of all types on the given
5049
- # path. Replace `aspect_type_reference` with a reference to the aspect type, in
5050
- # the format `project_id_or_number`.`location_id`.`aspect_type_id`.If you leave
5051
- # this field empty, it is treated as specifying exactly those aspects that are
5052
- # present within the specified entry.In FULL entry sync mode, Dataplex
5053
- # implicitly adds the keys for all of the required aspects of an entry.
5112
+ # belong to the specified aspect type and path. `aspect_type_reference`@* :
5113
+ # matches aspects of the given type for all paths. *@path : matches aspects of
5114
+ # all types on the given path.Replace `aspect_type_reference` with a reference
5115
+ # to the aspect type, in the format `project_id_or_number`.`location_id`.`
5116
+ # aspect_type_id`.In FULL entry sync mode, if you leave this field empty, it is
5117
+ # treated as specifying exactly those aspects that are present within the
5118
+ # specified entry. Dataplex implicitly adds the keys for all of the required
5119
+ # aspects of an entry.
5054
5120
  # Corresponds to the JSON property `aspectKeys`
5055
5121
  # @return [Array<String>]
5056
5122
  attr_accessor :aspect_keys
@@ -5068,11 +5134,12 @@ module Google
5068
5134
  # in the metadata import file. All modifiable fields are updated, regardless of
5069
5135
  # the fields that are listed in the update mask, and regardless of whether a
5070
5136
  # field is present in the entry object.The update_mask field is ignored when an
5071
- # entry is created or re-created.Dataplex also determines which entries and
5072
- # aspects to modify by comparing the values and timestamps that you provide in
5073
- # the metadata import file with the values and timestamps that exist in your
5074
- # project. For more information, see Comparison logic (https://cloud.google.com/
5075
- # dataplex/docs/import-metadata#data-modification-logic).
5137
+ # entry is created or re-created.In an aspect-only metadata job (when entry sync
5138
+ # mode is NONE), set this value to aspects.Dataplex also determines which
5139
+ # entries and aspects to modify by comparing the values and timestamps that you
5140
+ # provide in the metadata import file with the values and timestamps that exist
5141
+ # in your project. For more information, see Comparison logic (https://cloud.
5142
+ # google.com/dataplex/docs/import-metadata#data-modification-logic).
5076
5143
  # Corresponds to the JSON property `updateMask`
5077
5144
  # @return [String]
5078
5145
  attr_accessor :update_mask
@@ -6024,7 +6091,12 @@ module Google
6024
6091
  # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1MetadataJobImportJobResult]
6025
6092
  attr_accessor :import_result
6026
6093
 
6027
- # Job specification for a metadata import job
6094
+ # Job specification for a metadata import job.You can run the following kinds of
6095
+ # metadata import jobs: Full sync of entries with incremental import of their
6096
+ # aspects. Supported for custom entries. Incremental import of aspects only.
6097
+ # Supported for aspects that belong to custom entries and system entries. For
6098
+ # custom entries, you can modify both optional aspects and required aspects. For
6099
+ # system entries, you can modify optional aspects.
6028
6100
  # Corresponds to the JSON property `importSpec`
6029
6101
  # @return [Google::Apis::DataplexV1::GoogleCloudDataplexV1MetadataJobImportJobSpec]
6030
6102
  attr_accessor :import_spec
@@ -6130,21 +6202,21 @@ module Google
6130
6202
  end
6131
6203
  end
6132
6204
 
6133
- # Job specification for a metadata import job
6205
+ # Job specification for a metadata import job.You can run the following kinds of
6206
+ # metadata import jobs: Full sync of entries with incremental import of their
6207
+ # aspects. Supported for custom entries. Incremental import of aspects only.
6208
+ # Supported for aspects that belong to custom entries and system entries. For
6209
+ # custom entries, you can modify both optional aspects and required aspects. For
6210
+ # system entries, you can modify optional aspects.
6134
6211
  class GoogleCloudDataplexV1MetadataJobImportJobSpec
6135
6212
  include Google::Apis::Core::Hashable
6136
6213
 
6137
- # Required. The sync mode for aspects. Only INCREMENTAL mode is supported for
6138
- # aspects. An aspect is modified only if the metadata import file includes a
6139
- # reference to the aspect in the update_mask field and the aspect_keys field.
6214
+ # Required. The sync mode for aspects.
6140
6215
  # Corresponds to the JSON property `aspectSyncMode`
6141
6216
  # @return [String]
6142
6217
  attr_accessor :aspect_sync_mode
6143
6218
 
6144
- # Required. The sync mode for entries. Only FULL mode is supported for entries.
6145
- # All entries in the job's scope are modified. If an entry exists in Dataplex
6146
- # but isn't included in the metadata import file, the entry is deleted when you
6147
- # run the metadata job.
6219
+ # Required. The sync mode for entries.
6148
6220
  # Corresponds to the JSON property `entrySyncMode`
6149
6221
  # @return [String]
6150
6222
  attr_accessor :entry_sync_mode
@@ -6207,20 +6279,20 @@ module Google
6207
6279
  # Optional. The aspect types that are in scope for the import job, specified as
6208
6280
  # relative resource names in the format projects/`project_number_or_id`/
6209
6281
  # locations/`location_id`/aspectTypes/`aspect_type_id`. The job modifies only
6210
- # the aspects that belong to these aspect types.If the metadata import file
6211
- # attempts to modify an aspect whose type isn't included in this list, the
6212
- # import job is halted before modifying any entries or aspects.The location of
6213
- # an aspect type must either match the location of the job, or the aspect type
6214
- # must be global.
6282
+ # the aspects that belong to these aspect types.This field is required when
6283
+ # creating an aspect-only import job.If the metadata import file attempts to
6284
+ # modify an aspect whose type isn't included in this list, the import job is
6285
+ # halted before modifying any entries or aspects.The location of an aspect type
6286
+ # must either match the location of the job, or the aspect type must be global.
6215
6287
  # Corresponds to the JSON property `aspectTypes`
6216
6288
  # @return [Array<String>]
6217
6289
  attr_accessor :aspect_types
6218
6290
 
6219
6291
  # Required. The entry group that is in scope for the import job, specified as a
6220
6292
  # relative resource name in the format projects/`project_number_or_id`/locations/
6221
- # `location_id`/entryGroups/`entry_group_id`. Only entries that belong to the
6222
- # specified entry group are affected by the job.Must contain exactly one element.
6223
- # The entry group and the job must be in the same location.
6293
+ # `location_id`/entryGroups/`entry_group_id`. Only entries and aspects that
6294
+ # belong to the specified entry group are affected by the job.Must contain
6295
+ # exactly one element. The entry group and the job must be in the same location.
6224
6296
  # Corresponds to the JSON property `entryGroups`
6225
6297
  # @return [Array<String>]
6226
6298
  attr_accessor :entry_groups
@@ -6228,10 +6300,11 @@ module Google
6228
6300
  # Required. The entry types that are in scope for the import job, specified as
6229
6301
  # relative resource names in the format projects/`project_number_or_id`/
6230
6302
  # locations/`location_id`/entryTypes/`entry_type_id`. The job modifies only the
6231
- # entries that belong to these entry types.If the metadata import file attempts
6232
- # to modify an entry whose type isn't included in this list, the import job is
6233
- # halted before modifying any entries or aspects.The location of an entry type
6234
- # must either match the location of the job, or the entry type must be global.
6303
+ # entries and aspects that belong to these entry types.If the metadata import
6304
+ # file attempts to modify an entry whose type isn't included in this list, the
6305
+ # import job is halted before modifying any entries or aspects.The location of
6306
+ # an entry type must either match the location of the job, or the entry type
6307
+ # must be global.
6235
6308
  # Corresponds to the JSON property `entryTypes`
6236
6309
  # @return [Array<String>]
6237
6310
  attr_accessor :entry_types
@@ -16,13 +16,13 @@ module Google
16
16
  module Apis
17
17
  module DataplexV1
18
18
  # Version of the google-apis-dataplex_v1 gem
19
- GEM_VERSION = "0.68.0"
19
+ GEM_VERSION = "0.70.0"
20
20
 
21
21
  # Version of the code generator used to generate this client
22
- GENERATOR_VERSION = "0.15.1"
22
+ GENERATOR_VERSION = "0.16.0"
23
23
 
24
24
  # Revision of the discovery document this client was generated from
25
- REVISION = "20241206"
25
+ REVISION = "20250307"
26
26
  end
27
27
  end
28
28
  end
@@ -190,6 +190,12 @@ module Google
190
190
  include Google::Apis::Core::JsonObjectSupport
191
191
  end
192
192
 
193
+ class GoogleCloudDataplexV1BusinessGlossaryEvent
194
+ class Representation < Google::Apis::Core::JsonRepresentation; end
195
+
196
+ include Google::Apis::Core::JsonObjectSupport
197
+ end
198
+
193
199
  class GoogleCloudDataplexV1CancelJobRequest
194
200
  class Representation < Google::Apis::Core::JsonRepresentation; end
195
201
 
@@ -1597,6 +1603,15 @@ module Google
1597
1603
  end
1598
1604
  end
1599
1605
 
1606
+ class GoogleCloudDataplexV1BusinessGlossaryEvent
1607
+ # @private
1608
+ class Representation < Google::Apis::Core::JsonRepresentation
1609
+ property :event_type, as: 'eventType'
1610
+ property :message, as: 'message'
1611
+ property :resource, as: 'resource'
1612
+ end
1613
+ end
1614
+
1600
1615
  class GoogleCloudDataplexV1CancelJobRequest
1601
1616
  # @private
1602
1617
  class Representation < Google::Apis::Core::JsonRepresentation
@@ -1723,6 +1738,7 @@ module Google
1723
1738
  # @private
1724
1739
  class Representation < Google::Apis::Core::JsonRepresentation
1725
1740
  property :connection, as: 'connection'
1741
+ property :location, as: 'location'
1726
1742
  property :table_type, as: 'tableType'
1727
1743
  end
1728
1744
  end
@@ -389,10 +389,7 @@ module Google
389
389
  execute_or_queue_command(command, &block)
390
390
  end
391
391
 
392
- # Looks up a single Entry by name using the permission on the source system.
393
- # Caution: The BigQuery metadata that is stored in Dataplex Catalog is changing.
394
- # For more information, see Changes to BigQuery metadata stored in Dataplex
395
- # Catalog (https://cloud.google.com/dataplex/docs/biqquery-metadata-changes).
392
+ # Looks up an entry by name using the permission on the source system.
396
393
  # @param [String] name
397
394
  # Required. The project to which the request should be attributed in the
398
395
  # following form: projects/`project`/locations/`location`.
@@ -443,8 +440,8 @@ module Google
443
440
  # Required. The project to which the request should be attributed in the
444
441
  # following form: projects/`project`/locations/`location`.
445
442
  # @param [String] order_by
446
- # Optional. Specifies the ordering of results. Supported values are: * relevance
447
- # (default) * last_modified_timestamp * last_modified_timestamp asc
443
+ # Optional. Specifies the ordering of results. Supported values are: relevance (
444
+ # default) last_modified_timestamp last_modified_timestamp asc
448
445
  # @param [Fixnum] page_size
449
446
  # Optional. Number of results in the search page. If <=0, then defaults to 10.
450
447
  # Max limit for page_size is 1000. Throws an invalid argument for page_size >
@@ -1673,9 +1670,6 @@ module Google
1673
1670
 
1674
1671
  # Create a DataTaxonomy resource.
1675
1672
  # @param [String] parent
1676
- # Required. The resource name of the data taxonomy location, of the form:
1677
- # projects/`project_number`/locations/`location_id` where location_id refers to
1678
- # a GCP region.
1679
1673
  # @param [Google::Apis::DataplexV1::GoogleCloudDataplexV1DataTaxonomy] google_cloud_dataplex_v1_data_taxonomy_object
1680
1674
  # @param [String] data_taxonomy_id
1681
1675
  # Required. DataTaxonomy identifier. * Must contain only lowercase letters,
@@ -1754,8 +1748,6 @@ module Google
1754
1748
 
1755
1749
  # Retrieves a DataTaxonomy resource.
1756
1750
  # @param [String] name
1757
- # Required. The resource name of the DataTaxonomy: projects/`project_number`/
1758
- # locations/`location_id`/dataTaxonomies/`data_taxonomy_id`
1759
1751
  # @param [String] fields
1760
1752
  # Selector specifying which fields to include in a partial response.
1761
1753
  # @param [String] quota_user
@@ -2718,10 +2710,7 @@ module Google
2718
2710
  execute_or_queue_command(command, &block)
2719
2711
  end
2720
2712
 
2721
- # Gets an Entry.Caution: The BigQuery metadata that is stored in Dataplex
2722
- # Catalog is changing. For more information, see Changes to BigQuery metadata
2723
- # stored in Dataplex Catalog (https://cloud.google.com/dataplex/docs/biqquery-
2724
- # metadata-changes).
2713
+ # Gets an Entry.
2725
2714
  # @param [String] name
2726
2715
  # Required. The resource name of the Entry: projects/`project`/locations/`
2727
2716
  # location`/entryGroups/`entry_group`/entries/`entry`.
metadata CHANGED
@@ -1,14 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-apis-dataplex_v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.68.0
4
+ version: 0.70.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
- autorequire:
9
8
  bindir: bin
10
9
  cert_chain: []
11
- date: 2024-12-15 00:00:00.000000000 Z
10
+ date: 2025-03-16 00:00:00.000000000 Z
12
11
  dependencies:
13
12
  - !ruby/object:Gem::Dependency
14
13
  name: google-apis-core
@@ -58,9 +57,8 @@ licenses:
58
57
  metadata:
59
58
  bug_tracker_uri: https://github.com/googleapis/google-api-ruby-client/issues
60
59
  changelog_uri: https://github.com/googleapis/google-api-ruby-client/tree/main/generated/google-apis-dataplex_v1/CHANGELOG.md
61
- documentation_uri: https://googleapis.dev/ruby/google-apis-dataplex_v1/v0.68.0
60
+ documentation_uri: https://googleapis.dev/ruby/google-apis-dataplex_v1/v0.70.0
62
61
  source_code_uri: https://github.com/googleapis/google-api-ruby-client/tree/main/generated/google-apis-dataplex_v1
63
- post_install_message:
64
62
  rdoc_options: []
65
63
  require_paths:
66
64
  - lib
@@ -75,8 +73,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
75
73
  - !ruby/object:Gem::Version
76
74
  version: '0'
77
75
  requirements: []
78
- rubygems_version: 3.5.23
79
- signing_key:
76
+ rubygems_version: 3.6.5
80
77
  specification_version: 4
81
78
  summary: Simple REST client for Cloud Dataplex API V1
82
79
  test_files: []