google-cloud-bigquery 1.21.1 → 1.25.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -153,13 +153,21 @@ module Google
153
153
  # is 1,024 characters. If `job_id` is provided, then `prefix` will not
154
154
  # be used.
155
155
  # @param [Hash] labels A hash of user-provided labels associated with
156
- # the job. You can use these to organize and group your jobs. Label
157
- # keys and values can be no longer than 63 characters, can only
158
- # contain lowercase letters, numeric characters, underscores and
159
- # dashes. International characters are allowed. Label values are
160
- # optional. Label keys must start with a letter and each label in the
161
- # list must have a different key. See [Requirements for
162
- # labels](https://cloud.google.com/bigquery/docs/creating-managing-labels#requirements).
156
+ # the job. You can use these to organize and group your jobs.
157
+ #
158
+ # The labels applied to a resource must meet the following requirements:
159
+ #
160
+ # * Each resource can have multiple labels, up to a maximum of 64.
161
+ # * Each label must be a key-value pair.
162
+ # * Keys have a minimum length of 1 character and a maximum length of
163
+ # 63 characters, and cannot be empty. Values can be empty, and have
164
+ # a maximum length of 63 characters.
165
+ # * Keys and values can contain only lowercase letters, numeric characters,
166
+ # underscores, and dashes. All characters must use UTF-8 encoding, and
167
+ # international characters are allowed.
168
+ # * The key portion of a label must be unique. However, you can use the
169
+ # same key with multiple resources.
170
+ # * Keys must start with a lowercase letter or international character.
163
171
  # @yield [job] a job configuration object
164
172
  # @yieldparam [Google::Cloud::Bigquery::CopyJob::Updater] job a job
165
173
  # configuration object for setting additional options.
@@ -411,13 +419,21 @@ module Google
411
419
  # See [Generating a job
412
420
  # ID](https://cloud.google.com/bigquery/docs/managing-jobs#generate-jobid).
413
421
  # @param [Hash] labels A hash of user-provided labels associated with
414
- # the job. You can use these to organize and group your jobs. Label
415
- # keys and values can be no longer than 63 characters, can only
416
- # contain lowercase letters, numeric characters, underscores and
417
- # dashes. International characters are allowed. Label values are
418
- # optional. Label keys must start with a letter and each label in the
419
- # list must have a different key. See [Requirements for
420
- # labels](https://cloud.google.com/bigquery/docs/creating-managing-labels#requirements).
422
+ # the job. You can use these to organize and group your jobs.
423
+ #
424
+ # The labels applied to a resource must meet the following requirements:
425
+ #
426
+ # * Each resource can have multiple labels, up to a maximum of 64.
427
+ # * Each label must be a key-value pair.
428
+ # * Keys have a minimum length of 1 character and a maximum length of
429
+ # 63 characters, and cannot be empty. Values can be empty, and have
430
+ # a maximum length of 63 characters.
431
+ # * Keys and values can contain only lowercase letters, numeric characters,
432
+ # underscores, and dashes. All characters must use UTF-8 encoding, and
433
+ # international characters are allowed.
434
+ # * The key portion of a label must be unique. However, you can use the
435
+ # same key with multiple resources.
436
+ # * Keys must start with a lowercase letter or international character.
421
437
  # @param [Array<String>, String] udfs User-defined function resources
422
438
  # used in a legacy SQL query. May be either a code resource to load from
423
439
  # a Google Cloud Storage URI (`gs://bucket/path`), or an inline resource
@@ -717,9 +733,12 @@ module Google
717
733
  # sql = "SELECT name FROM `my_project.my_dataset.my_table`"
718
734
  # data = bigquery.query sql
719
735
  #
736
+ # # Iterate over the first page of results
720
737
  # data.each do |row|
721
738
  # puts row[:name]
722
739
  # end
740
+ # # Retrieve the next page of results
741
+ # data = data.next if data.next?
723
742
  #
724
743
  # @example Query using legacy SQL:
725
744
  # require "google/cloud/bigquery"
@@ -729,9 +748,12 @@ module Google
729
748
  # sql = "SELECT name FROM [my_project:my_dataset.my_table]"
730
749
  # data = bigquery.query sql, legacy_sql: true
731
750
  #
751
+ # # Iterate over the first page of results
732
752
  # data.each do |row|
733
753
  # puts row[:name]
734
754
  # end
755
+ # # Retrieve the next page of results
756
+ # data = data.next if data.next?
735
757
  #
736
758
  # @example Retrieve all rows: (See {Data#all})
737
759
  # require "google/cloud/bigquery"
@@ -754,9 +776,12 @@ module Google
754
776
  # "WHERE id = ?",
755
777
  # params: [1]
756
778
  #
779
+ # # Iterate over the first page of results
757
780
  # data.each do |row|
758
781
  # puts row[:name]
759
782
  # end
783
+ # # Retrieve the next page of results
784
+ # data = data.next if data.next?
760
785
  #
761
786
  # @example Query using named query parameters:
762
787
  # require "google/cloud/bigquery"
@@ -768,9 +793,12 @@ module Google
768
793
  # "WHERE id = @id",
769
794
  # params: { id: 1 }
770
795
  #
796
+ # # Iterate over the first page of results
771
797
  # data.each do |row|
772
798
  # puts row[:name]
773
799
  # end
800
+ # # Retrieve the next page of results
801
+ # data = data.next if data.next?
774
802
  #
775
803
  # @example Query using named query parameters with types:
776
804
  # require "google/cloud/bigquery"
@@ -783,9 +811,12 @@ module Google
783
811
  # params: { ids: [] },
784
812
  # types: { ids: [:INT64] }
785
813
  #
814
+ # # Iterate over the first page of results
786
815
  # data.each do |row|
787
816
  # puts row[:name]
788
817
  # end
818
+ # # Retrieve the next page of results
819
+ # data = data.next if data.next?
789
820
  #
790
821
  # @example Execute a DDL statement:
791
822
  # require "google/cloud/bigquery"
@@ -824,9 +855,12 @@ module Google
824
855
  # query.table = dataset.table "my_table", skip_lookup: true
825
856
  # end
826
857
  #
858
+ # # Iterate over the first page of results
827
859
  # data.each do |row|
828
860
  # puts row[:name]
829
861
  # end
862
+ # # Retrieve the next page of results
863
+ # data = data.next if data.next?
830
864
  #
831
865
  def query query, params: nil, types: nil, external: nil, max: nil, cache: true, dataset: nil, project: nil,
832
866
  standard_sql: nil, legacy_sql: nil, &block
@@ -888,9 +922,12 @@ module Google
888
922
  # data = bigquery.query "SELECT * FROM my_ext_table",
889
923
  # external: { my_ext_table: csv_table }
890
924
  #
925
+ # # Iterate over the first page of results
891
926
  # data.each do |row|
892
927
  # puts row[:name]
893
928
  # end
929
+ # # Retrieve the next page of results
930
+ # data = data.next if data.next?
894
931
  #
895
932
  def external url, format: nil
896
933
  ext = External.from_urls url, format
@@ -1284,9 +1321,12 @@ module Google
1284
1321
  # "WHERE time_of_date = @time",
1285
1322
  # params: { time: fourpm }
1286
1323
  #
1324
+ # # Iterate over the first page of results
1287
1325
  # data.each do |row|
1288
1326
  # puts row[:name]
1289
1327
  # end
1328
+ # # Retrieve the next page of results
1329
+ # data = data.next if data.next?
1290
1330
  #
1291
1331
  # @example Create Time with fractional seconds:
1292
1332
  # require "google/cloud/bigquery"
@@ -1299,9 +1339,12 @@ module Google
1299
1339
  # "WHERE time_of_date >= @time",
1300
1340
  # params: { time: precise_time }
1301
1341
  #
1342
+ # # Iterate over the first page of results
1302
1343
  # data.each do |row|
1303
1344
  # puts row[:name]
1304
1345
  # end
1346
+ # # Retrieve the next page of results
1347
+ # data = data.next if data.next?
1305
1348
  #
1306
1349
  def time hour, minute, second
1307
1350
  Bigquery::Time.new "#{hour}:#{minute}:#{second}"
@@ -1418,46 +1461,58 @@ module Google
1418
1461
  end
1419
1462
 
1420
1463
  ##
1421
- # Extracts the data from the provided table to a Google Cloud Storage
1422
- # file using an asynchronous method. In this method, an {ExtractJob} is
1423
- # immediately returned. The caller may poll the service by repeatedly
1424
- # calling {Job#reload!} and {Job#done?} to detect when the job is done,
1425
- # or simply block until the job is done by calling
1464
+ # Extracts the data from a table or exports a model to Google Cloud Storage
1465
+ # asynchronously, immediately returning an {ExtractJob} that can be used to
1466
+ # track the progress of the export job. The caller may poll the service by
1467
+ # repeatedly calling {Job#reload!} and {Job#done?} to detect when the job
1468
+ # is done, or simply block until the job is done by calling
1426
1469
  # #{Job#wait_until_done!}. See {#extract} for the synchronous version.
1427
- # Use this method instead of {Table#extract_job} to extract data from
1428
- # source tables in other projects.
1470
+ #
1471
+ # Use this method instead of {Table#extract_job} or {Model#extract_job} to
1472
+ # extract data from source tables or models in other projects.
1429
1473
  #
1430
1474
  # The geographic location for the job ("US", "EU", etc.) can be set via
1431
1475
  # {ExtractJob::Updater#location=} in a block passed to this method.
1432
1476
  #
1433
- # @see https://cloud.google.com/bigquery/exporting-data-from-bigquery
1434
- # Exporting Data From BigQuery
1477
+ # @see https://cloud.google.com/bigquery/docs/exporting-data
1478
+ # Exporting table data
1479
+ # @see https://cloud.google.com/bigquery-ml/docs/exporting-models
1480
+ # Exporting models
1435
1481
  #
1436
- # @param [String, Table] table The source table from which to extract
1437
- # data. This can be a table object; or a string ID as specified by the
1438
- # [Standard SQL Query
1482
+ # @param [Table, Model, String] source The source table or model for
1483
+ # the extract operation. This can be a table or model object; or a
1484
+ # table ID string as specified by the [Standard SQL Query
1439
1485
  # Reference](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#from-clause)
1440
1486
  # (`project-name.dataset_id.table_id`) or the [Legacy SQL Query
1441
1487
  # Reference](https://cloud.google.com/bigquery/query-reference#from)
1442
1488
  # (`project-name:dataset_id.table_id`).
1443
1489
  # @param [Google::Cloud::Storage::File, String, Array<String>]
1444
1490
  # extract_url The Google Storage file or file URI pattern(s) to which
1445
- # BigQuery should extract the table data.
1446
- # @param [String] format The exported file format. The default value is
1447
- # `csv`.
1491
+ # BigQuery should extract. For a model export this value should be a
1492
+ # string ending in an object name prefix, since multiple objects will
1493
+ # be exported.
1494
+ # @param [String] format The exported file format. The default value for
1495
+ # tables is `csv`. Tables with nested or repeated fields cannot be
1496
+ # exported as CSV. The default value for models is `ml_tf_saved_model`.
1448
1497
  #
1449
- # The following values are supported:
1498
+ # Supported values for tables:
1450
1499
  #
1451
1500
  # * `csv` - CSV
1452
1501
  # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
1453
1502
  # * `avro` - [Avro](http://avro.apache.org/)
1503
+ #
1504
+ # Supported values for models:
1505
+ #
1506
+ # * `ml_tf_saved_model` - TensorFlow SavedModel
1507
+ # * `ml_xgboost_booster` - XGBoost Booster
1454
1508
  # @param [String] compression The compression type to use for exported
1455
1509
  # files. Possible values include `GZIP` and `NONE`. The default value
1456
- # is `NONE`.
1510
+ # is `NONE`. Not applicable when extracting models.
1457
1511
  # @param [String] delimiter Delimiter to use between fields in the
1458
- # exported data. Default is <code>,</code>.
1459
- # @param [Boolean] header Whether to print out a header row in the
1460
- # results. Default is `true`.
1512
+ # exported table data. Default is `,`. Not applicable when extracting
1513
+ # models.
1514
+ # @param [Boolean] header Whether to print out a header row in table
1515
+ # exports. Default is `true`. Not applicable when extracting models.
1461
1516
  # @param [String] job_id A user-defined ID for the extract job. The ID
1462
1517
  # must contain only letters (a-z, A-Z), numbers (0-9), underscores
1463
1518
  # (_), or dashes (-). The maximum length is 1,024 characters. If
@@ -1474,40 +1529,60 @@ module Google
1474
1529
  # is 1,024 characters. If `job_id` is provided, then `prefix` will not
1475
1530
  # be used.
1476
1531
  # @param [Hash] labels A hash of user-provided labels associated with
1477
- # the job. You can use these to organize and group your jobs. Label
1478
- # keys and values can be no longer than 63 characters, can only
1479
- # contain lowercase letters, numeric characters, underscores and
1480
- # dashes. International characters are allowed. Label values are
1481
- # optional. Label keys must start with a letter and each label in the
1482
- # list must have a different key. See [Requirements for
1483
- # labels](https://cloud.google.com/bigquery/docs/creating-managing-labels#requirements).
1532
+ # the job. You can use these to organize and group your jobs.
1533
+ #
1534
+ # The labels applied to a resource must meet the following requirements:
1535
+ #
1536
+ # * Each resource can have multiple labels, up to a maximum of 64.
1537
+ # * Each label must be a key-value pair.
1538
+ # * Keys have a minimum length of 1 character and a maximum length of
1539
+ # 63 characters, and cannot be empty. Values can be empty, and have
1540
+ # a maximum length of 63 characters.
1541
+ # * Keys and values can contain only lowercase letters, numeric characters,
1542
+ # underscores, and dashes. All characters must use UTF-8 encoding, and
1543
+ # international characters are allowed.
1544
+ # * The key portion of a label must be unique. However, you can use the
1545
+ # same key with multiple resources.
1546
+ # * Keys must start with a lowercase letter or international character.
1484
1547
  # @yield [job] a job configuration object
1485
1548
  # @yieldparam [Google::Cloud::Bigquery::ExtractJob::Updater] job a job
1486
1549
  # configuration object for setting additional options.
1487
1550
  #
1488
1551
  # @return [Google::Cloud::Bigquery::ExtractJob]
1489
1552
  #
1490
- # @example
1553
+ # @example Export table data
1491
1554
  # require "google/cloud/bigquery"
1492
1555
  #
1493
1556
  # bigquery = Google::Cloud::Bigquery.new
1494
1557
  #
1495
1558
  # table_id = "bigquery-public-data.samples.shakespeare"
1496
- # extract_job = bigquery.extract_job table_id,
1497
- # "gs://my-bucket/shakespeare.csv"
1559
+ # extract_job = bigquery.extract_job table_id, "gs://my-bucket/shakespeare.csv"
1498
1560
  # extract_job.wait_until_done!
1499
1561
  # extract_job.done? #=> true
1500
1562
  #
1563
+ # @example Export a model
1564
+ # require "google/cloud/bigquery"
1565
+ #
1566
+ # bigquery = Google::Cloud::Bigquery.new
1567
+ # dataset = bigquery.dataset "my_dataset"
1568
+ # model = dataset.model "my_model"
1569
+ #
1570
+ # extract_job = bigquery.extract model, "gs://my-bucket/#{model.model_id}"
1571
+ #
1501
1572
  # @!group Data
1502
1573
  #
1503
- def extract_job table, extract_url, format: nil, compression: nil, delimiter: nil, header: nil, job_id: nil,
1574
+ def extract_job source, extract_url, format: nil, compression: nil, delimiter: nil, header: nil, job_id: nil,
1504
1575
  prefix: nil, labels: nil
1505
1576
  ensure_service!
1506
1577
  options = { format: format, compression: compression, delimiter: delimiter, header: header, job_id: job_id,
1507
1578
  prefix: prefix, labels: labels }
1579
+ source_ref = if source.respond_to? :model_ref
1580
+ source.model_ref
1581
+ else
1582
+ Service.get_table_ref source, default_ref: project_ref
1583
+ end
1508
1584
 
1509
- table_ref = Service.get_table_ref table, default_ref: project_ref
1510
- updater = ExtractJob::Updater.from_options service, table_ref, extract_url, options
1585
+ updater = ExtractJob::Updater.from_options service, source_ref, extract_url, options
1511
1586
 
1512
1587
  yield updater if block_given?
1513
1588
 
@@ -1517,51 +1592,63 @@ module Google
1517
1592
  end
1518
1593
 
1519
1594
  ##
1520
- # Extracts the data from the provided table to a Google Cloud Storage
1521
- # file using a synchronous method that blocks for a response. Timeouts
1595
+ # Extracts the data from a table or exports a model to Google Cloud Storage
1596
+ # using a synchronous method that blocks for a response. Timeouts
1522
1597
  # and transient errors are generally handled as needed to complete the
1523
- # job. See {#extract_job} for the asynchronous version. Use this method
1524
- # instead of {Table#extract} to extract data from source tables in other
1525
- # projects.
1598
+ # job. See {#extract_job} for the asynchronous version.
1599
+ #
1600
+ # Use this method instead of {Table#extract} or {Model#extract} to
1601
+ # extract data from source tables or models in other projects.
1526
1602
  #
1527
1603
  # The geographic location for the job ("US", "EU", etc.) can be set via
1528
1604
  # {ExtractJob::Updater#location=} in a block passed to this method.
1529
1605
  #
1530
- # @see https://cloud.google.com/bigquery/exporting-data-from-bigquery
1531
- # Exporting Data From BigQuery
1606
+ # @see https://cloud.google.com/bigquery/docs/exporting-data
1607
+ # Exporting table data
1608
+ # @see https://cloud.google.com/bigquery-ml/docs/exporting-models
1609
+ # Exporting models
1532
1610
  #
1533
- # @param [String, Table] table The source table from which to extract
1534
- # data. This can be a table object; or a string ID as specified by the
1535
- # [Standard SQL Query
1611
+ # @param [Table, Model, String] source The source table or model for
1612
+ # the extract operation. This can be a table or model object; or a
1613
+ # table ID string as specified by the [Standard SQL Query
1536
1614
  # Reference](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#from-clause)
1537
1615
  # (`project-name.dataset_id.table_id`) or the [Legacy SQL Query
1538
1616
  # Reference](https://cloud.google.com/bigquery/query-reference#from)
1539
1617
  # (`project-name:dataset_id.table_id`).
1540
1618
  # @param [Google::Cloud::Storage::File, String, Array<String>]
1541
1619
  # extract_url The Google Storage file or file URI pattern(s) to which
1542
- # BigQuery should extract the table data.
1543
- # @param [String] format The exported file format. The default value is
1544
- # `csv`.
1620
+ # BigQuery should extract. For a model export this value should be a
1621
+ # string ending in an object name prefix, since multiple objects will
1622
+ # be exported.
1623
+ # @param [String] format The exported file format. The default value for
1624
+ # tables is `csv`. Tables with nested or repeated fields cannot be
1625
+ # exported as CSV. The default value for models is `ml_tf_saved_model`.
1545
1626
  #
1546
- # The following values are supported:
1627
+ # Supported values for tables:
1547
1628
  #
1548
1629
  # * `csv` - CSV
1549
1630
  # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
1550
1631
  # * `avro` - [Avro](http://avro.apache.org/)
1632
+ #
1633
+ # Supported values for models:
1634
+ #
1635
+ # * `ml_tf_saved_model` - TensorFlow SavedModel
1636
+ # * `ml_xgboost_booster` - XGBoost Booster
1551
1637
  # @param [String] compression The compression type to use for exported
1552
1638
  # files. Possible values include `GZIP` and `NONE`. The default value
1553
- # is `NONE`.
1639
+ # is `NONE`. Not applicable when extracting models.
1554
1640
  # @param [String] delimiter Delimiter to use between fields in the
1555
- # exported data. Default is <code>,</code>.
1556
- # @param [Boolean] header Whether to print out a header row in the
1557
- # results. Default is `true`.
1641
+ # exported table data. Default is `,`. Not applicable when extracting
1642
+ # models.
1643
+ # @param [Boolean] header Whether to print out a header row in table
1644
+ # exports. Default is `true`. Not applicable when extracting models.
1558
1645
  # @yield [job] a job configuration object
1559
1646
  # @yieldparam [Google::Cloud::Bigquery::ExtractJob::Updater] job a job
1560
1647
  # configuration object for setting additional options.
1561
1648
  #
1562
1649
  # @return [Boolean] Returns `true` if the extract operation succeeded.
1563
1650
  #
1564
- # @example
1651
+ # @example Export table data
1565
1652
  # require "google/cloud/bigquery"
1566
1653
  #
1567
1654
  # bigquery = Google::Cloud::Bigquery.new
@@ -1569,10 +1656,19 @@ module Google
1569
1656
  # bigquery.extract "bigquery-public-data.samples.shakespeare",
1570
1657
  # "gs://my-bucket/shakespeare.csv"
1571
1658
  #
1659
+ # @example Export a model
1660
+ # require "google/cloud/bigquery"
1661
+ #
1662
+ # bigquery = Google::Cloud::Bigquery.new
1663
+ # dataset = bigquery.dataset "my_dataset"
1664
+ # model = dataset.model "my_model"
1665
+ #
1666
+ # bigquery.extract model, "gs://my-bucket/#{model.model_id}"
1667
+ #
1572
1668
  # @!group Data
1573
1669
  #
1574
- def extract table, extract_url, format: nil, compression: nil, delimiter: nil, header: nil, &block
1575
- job = extract_job table, extract_url,
1670
+ def extract source, extract_url, format: nil, compression: nil, delimiter: nil, header: nil, &block
1671
+ job = extract_job source, extract_url,
1576
1672
  format: format,
1577
1673
  compression: compression,
1578
1674
  delimiter: delimiter,
@@ -528,8 +528,9 @@ module Google
528
528
  # The period for which the destination table will be partitioned, if
529
529
  # any. See [Partitioned Tables](https://cloud.google.com/bigquery/docs/partitioned-tables).
530
530
  #
531
- # @return [String, nil] The partition type. Currently the only supported
532
- # value is "DAY", or `nil` if not present.
531
+ # @return [String, nil] The partition type. The supported types are `DAY`,
532
+ # `HOUR`, `MONTH`, and `YEAR`, which will generate one partition per day,
533
+ # hour, month, and year, respectively; or `nil` if not present.
533
534
  #
534
535
  # @!group Attributes
535
536
  #
@@ -675,9 +676,12 @@ module Google
675
676
  #
676
677
  # job.wait_until_done!
677
678
  # data = job.data
679
+ #
680
+ # # Iterate over the first page of results
678
681
  # data.each do |row|
679
682
  # puts row[:word]
680
683
  # end
684
+ # # Retrieve the next page of results
681
685
  # data = data.next if data.next?
682
686
  #
683
687
  def data token: nil, max: nil, start: nil
@@ -1028,12 +1032,21 @@ module Google
1028
1032
  # Sets the labels to use for the job.
1029
1033
  #
1030
1034
  # @param [Hash] value A hash of user-provided labels associated with
1031
- # the job. You can use these to organize and group your jobs. Label
1032
- # keys and values can be no longer than 63 characters, can only
1033
- # contain lowercase letters, numeric characters, underscores and
1034
- # dashes. International characters are allowed. Label values are
1035
- # optional. Label keys must start with a letter and each label in
1036
- # the list must have a different key.
1035
+ # the job. You can use these to organize and group your jobs.
1036
+ #
1037
+ # The labels applied to a resource must meet the following requirements:
1038
+ #
1039
+ # * Each resource can have multiple labels, up to a maximum of 64.
1040
+ # * Each label must be a key-value pair.
1041
+ # * Keys have a minimum length of 1 character and a maximum length of
1042
+ # 63 characters, and cannot be empty. Values can be empty, and have
1043
+ # a maximum length of 63 characters.
1044
+ # * Keys and values can contain only lowercase letters, numeric characters,
1045
+ # underscores, and dashes. All characters must use UTF-8 encoding, and
1046
+ # international characters are allowed.
1047
+ # * The key portion of a label must be unique. However, you can use the
1048
+ # same key with multiple resources.
1049
+ # * Keys must start with a lowercase letter or international character.
1037
1050
  #
1038
1051
  # @!group Attributes
1039
1052
  #
@@ -1290,13 +1303,16 @@ module Google
1290
1303
  ##
1291
1304
  # Sets the partitioning for the destination table. See [Partitioned
1292
1305
  # Tables](https://cloud.google.com/bigquery/docs/partitioned-tables).
1306
+ # The supported types are `DAY`, `HOUR`, `MONTH`, and `YEAR`, which will
1307
+ # generate one partition per day, hour, month, and year, respectively.
1293
1308
  #
1294
1309
  # You can only set the partitioning field while creating a table.
1295
1310
  # BigQuery does not allow you to change partitioning on an existing
1296
1311
  # table.
1297
1312
  #
1298
- # @param [String] type The partition type. Currently the only
1299
- # supported value is "DAY".
1313
+ # @param [String] type The partition type. The supported types are `DAY`,
1314
+ # `HOUR`, `MONTH`, and `YEAR`, which will generate one partition per day,
1315
+ # hour, month, and year, respectively.
1300
1316
  #
1301
1317
  # @example
1302
1318
  # require "google/cloud/bigquery"