gcloud 0.6.3 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (175) hide show
  1. checksums.yaml +8 -8
  2. data/AUTHENTICATION.md +13 -9
  3. data/CHANGELOG.md +8 -3
  4. data/OVERVIEW.md +46 -8
  5. data/lib/gcloud.rb +123 -117
  6. data/lib/gcloud/backoff.rb +43 -15
  7. data/lib/gcloud/bigquery.rb +211 -195
  8. data/lib/gcloud/bigquery/connection.rb +9 -9
  9. data/lib/gcloud/bigquery/copy_job.rb +15 -16
  10. data/lib/gcloud/bigquery/credentials.rb +3 -3
  11. data/lib/gcloud/bigquery/data.rb +12 -11
  12. data/lib/gcloud/bigquery/dataset.rb +162 -216
  13. data/lib/gcloud/bigquery/dataset/access.rb +59 -43
  14. data/lib/gcloud/bigquery/dataset/list.rb +3 -3
  15. data/lib/gcloud/bigquery/errors.rb +9 -5
  16. data/lib/gcloud/bigquery/extract_job.rb +18 -18
  17. data/lib/gcloud/bigquery/insert_response.rb +7 -4
  18. data/lib/gcloud/bigquery/job.rb +48 -44
  19. data/lib/gcloud/bigquery/job/list.rb +3 -3
  20. data/lib/gcloud/bigquery/load_job.rb +24 -25
  21. data/lib/gcloud/bigquery/project.rb +145 -204
  22. data/lib/gcloud/bigquery/query_data.rb +10 -9
  23. data/lib/gcloud/bigquery/query_job.rb +23 -32
  24. data/lib/gcloud/bigquery/table.rb +238 -280
  25. data/lib/gcloud/bigquery/table/list.rb +3 -3
  26. data/lib/gcloud/bigquery/table/schema.rb +79 -87
  27. data/lib/gcloud/bigquery/view.rb +69 -82
  28. data/lib/gcloud/credentials.rb +3 -9
  29. data/lib/gcloud/datastore.rb +194 -170
  30. data/lib/gcloud/datastore/connection.rb +12 -8
  31. data/lib/gcloud/datastore/credentials.rb +6 -4
  32. data/lib/gcloud/datastore/dataset.rb +74 -141
  33. data/lib/gcloud/datastore/dataset/lookup_results.rb +6 -4
  34. data/lib/gcloud/datastore/dataset/query_results.rb +6 -4
  35. data/lib/gcloud/datastore/entity.rb +81 -76
  36. data/lib/gcloud/datastore/errors.rb +10 -8
  37. data/lib/gcloud/datastore/key.rb +41 -77
  38. data/lib/gcloud/datastore/properties.rb +3 -3
  39. data/lib/gcloud/datastore/proto.rb +7 -4
  40. data/lib/gcloud/datastore/query.rb +26 -3
  41. data/lib/gcloud/datastore/transaction.rb +12 -8
  42. data/lib/gcloud/dns.rb +180 -152
  43. data/lib/gcloud/dns/change.rb +16 -16
  44. data/lib/gcloud/dns/change/list.rb +3 -3
  45. data/lib/gcloud/dns/connection.rb +9 -10
  46. data/lib/gcloud/dns/credentials.rb +3 -3
  47. data/lib/gcloud/dns/errors.rb +9 -5
  48. data/lib/gcloud/dns/importer.rb +17 -23
  49. data/lib/gcloud/dns/project.rb +42 -64
  50. data/lib/gcloud/dns/record.rb +58 -46
  51. data/lib/gcloud/dns/record/list.rb +6 -7
  52. data/lib/gcloud/dns/zone.rb +198 -289
  53. data/lib/gcloud/dns/zone/list.rb +3 -3
  54. data/lib/gcloud/dns/zone/transaction.rb +56 -72
  55. data/lib/gcloud/errors.rb +174 -3
  56. data/lib/gcloud/gce.rb +3 -4
  57. data/lib/gcloud/grpc_utils.rb +76 -0
  58. data/lib/gcloud/logging.rb +308 -0
  59. data/lib/gcloud/logging/credentials.rb +29 -0
  60. data/lib/gcloud/logging/entry.rb +303 -0
  61. data/lib/gcloud/logging/entry/http_request.rb +141 -0
  62. data/lib/gcloud/logging/entry/list.rb +111 -0
  63. data/lib/gcloud/logging/entry/operation.rb +90 -0
  64. data/lib/gcloud/logging/logger.rb +307 -0
  65. data/lib/gcloud/logging/metric.rb +175 -0
  66. data/lib/gcloud/logging/metric/list.rb +98 -0
  67. data/lib/gcloud/logging/project.rb +650 -0
  68. data/lib/gcloud/logging/resource.rb +95 -0
  69. data/lib/gcloud/logging/resource_descriptor.rb +140 -0
  70. data/lib/gcloud/logging/resource_descriptor/list.rb +78 -0
  71. data/lib/gcloud/logging/service.rb +258 -0
  72. data/lib/gcloud/logging/sink.rb +233 -0
  73. data/lib/gcloud/logging/sink/list.rb +97 -0
  74. data/lib/gcloud/pubsub.rb +241 -199
  75. data/lib/gcloud/pubsub/credentials.rb +3 -3
  76. data/lib/gcloud/pubsub/message.rb +26 -20
  77. data/lib/gcloud/pubsub/project.rb +166 -233
  78. data/lib/gcloud/pubsub/received_message.rb +28 -38
  79. data/lib/gcloud/pubsub/service.rb +323 -0
  80. data/lib/gcloud/pubsub/subscription.rb +172 -242
  81. data/lib/gcloud/pubsub/subscription/list.rb +11 -9
  82. data/lib/gcloud/pubsub/topic.rb +152 -271
  83. data/lib/gcloud/pubsub/topic/batch.rb +66 -0
  84. data/lib/gcloud/pubsub/topic/list.rb +9 -7
  85. data/lib/gcloud/resource_manager.rb +158 -138
  86. data/lib/gcloud/resource_manager/connection.rb +6 -5
  87. data/lib/gcloud/resource_manager/credentials.rb +3 -3
  88. data/lib/gcloud/resource_manager/errors.rb +9 -5
  89. data/lib/gcloud/resource_manager/manager.rb +54 -86
  90. data/lib/gcloud/resource_manager/project.rb +69 -88
  91. data/lib/gcloud/resource_manager/project/list.rb +4 -5
  92. data/lib/gcloud/resource_manager/project/updater.rb +12 -14
  93. data/lib/gcloud/search.rb +158 -135
  94. data/lib/gcloud/search/api_client.rb +7 -7
  95. data/lib/gcloud/search/connection.rb +8 -8
  96. data/lib/gcloud/search/credentials.rb +3 -3
  97. data/lib/gcloud/search/document.rb +64 -87
  98. data/lib/gcloud/search/document/list.rb +5 -5
  99. data/lib/gcloud/search/errors.rb +9 -5
  100. data/lib/gcloud/search/field_value.rb +32 -38
  101. data/lib/gcloud/search/field_values.rb +50 -80
  102. data/lib/gcloud/search/fields.rb +44 -65
  103. data/lib/gcloud/search/index.rb +163 -204
  104. data/lib/gcloud/search/index/list.rb +5 -5
  105. data/lib/gcloud/search/project.rb +31 -47
  106. data/lib/gcloud/search/result.rb +27 -31
  107. data/lib/gcloud/search/result/list.rb +6 -6
  108. data/lib/gcloud/storage.rb +224 -190
  109. data/lib/gcloud/storage/bucket.rb +202 -227
  110. data/lib/gcloud/storage/bucket/acl.rb +83 -170
  111. data/lib/gcloud/storage/bucket/cors.rb +31 -34
  112. data/lib/gcloud/storage/bucket/list.rb +3 -3
  113. data/lib/gcloud/storage/connection.rb +11 -7
  114. data/lib/gcloud/storage/credentials.rb +3 -3
  115. data/lib/gcloud/storage/errors.rb +11 -8
  116. data/lib/gcloud/storage/file.rb +129 -171
  117. data/lib/gcloud/storage/file/acl.rb +51 -99
  118. data/lib/gcloud/storage/file/list.rb +3 -3
  119. data/lib/gcloud/storage/file/verifier.rb +3 -2
  120. data/lib/gcloud/storage/project.rb +111 -132
  121. data/lib/gcloud/upload.rb +4 -7
  122. data/lib/gcloud/version.rb +2 -4
  123. data/lib/google/api/annotations.rb +14 -0
  124. data/lib/google/api/http.rb +30 -0
  125. data/lib/google/api/label.rb +24 -0
  126. data/lib/google/api/monitored_resource.rb +25 -0
  127. data/lib/google/datastore/v1beta3/datastore.rb +115 -0
  128. data/lib/google/datastore/v1beta3/datastore_services.rb +33 -0
  129. data/lib/google/datastore/v1beta3/entity.rb +63 -0
  130. data/lib/google/datastore/v1beta3/query.rb +128 -0
  131. data/lib/google/devtools/cloudtrace/v1/trace.rb +78 -0
  132. data/lib/google/devtools/cloudtrace/v1/trace_services.rb +32 -0
  133. data/lib/google/example/library/v1/library.rb +91 -0
  134. data/lib/google/example/library/v1/library_services.rb +40 -0
  135. data/lib/google/iam/v1/iam_policy.rb +33 -0
  136. data/lib/google/iam/v1/iam_policy_services.rb +30 -0
  137. data/lib/google/iam/v1/policy.rb +25 -0
  138. data/lib/google/logging/type/http_request.rb +28 -0
  139. data/lib/google/logging/type/log_severity.rb +27 -0
  140. data/lib/google/logging/v2/log_entry.rb +44 -0
  141. data/lib/google/logging/v2/logging.rb +56 -0
  142. data/lib/google/logging/v2/logging_config.rb +59 -0
  143. data/lib/google/logging/v2/logging_config_services.rb +32 -0
  144. data/lib/google/logging/v2/logging_metrics.rb +51 -0
  145. data/lib/google/logging/v2/logging_metrics_services.rb +32 -0
  146. data/lib/google/logging/v2/logging_services.rb +31 -0
  147. data/lib/google/longrunning/operations.rb +50 -0
  148. data/lib/google/longrunning/operations_services.rb +29 -0
  149. data/lib/google/protobuf/any.rb +17 -0
  150. data/lib/google/protobuf/api.rb +31 -0
  151. data/lib/google/protobuf/descriptor.rb +0 -0
  152. data/lib/google/protobuf/duration.rb +17 -0
  153. data/lib/google/protobuf/empty.rb +15 -0
  154. data/lib/google/protobuf/field_mask.rb +16 -0
  155. data/lib/google/protobuf/source_context.rb +16 -0
  156. data/lib/google/protobuf/struct.rb +35 -0
  157. data/lib/google/protobuf/timestamp.rb +17 -0
  158. data/lib/google/protobuf/type.rb +79 -0
  159. data/lib/google/protobuf/wrappers.rb +48 -0
  160. data/lib/google/pubsub/v1/pubsub.rb +129 -0
  161. data/lib/google/pubsub/v1/pubsub_services.rb +56 -0
  162. data/lib/google/pubsub/v1beta2/pubsub.rb +126 -0
  163. data/lib/google/pubsub/v1beta2/pubsub_services.rb +56 -0
  164. data/lib/google/rpc/code.rb +32 -0
  165. data/lib/google/rpc/error_details.rb +61 -0
  166. data/lib/google/rpc/status.rb +19 -0
  167. data/lib/google/type/color.rb +20 -0
  168. data/lib/google/type/date.rb +18 -0
  169. data/lib/google/type/dayofweek.rb +23 -0
  170. data/lib/google/type/latlng.rb +17 -0
  171. data/lib/google/type/money.rb +18 -0
  172. data/lib/google/type/timeofday.rb +19 -0
  173. metadata +101 -4
  174. data/lib/gcloud/pubsub/connection.rb +0 -295
  175. data/lib/gcloud/pubsub/errors.rb +0 -93
@@ -1,4 +1,3 @@
1
- #--
2
1
  # Copyright 2015 Google Inc. All rights reserved.
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,20 +12,22 @@
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
14
 
15
+
16
16
  require "gcloud/bigquery/data"
17
17
 
18
18
  module Gcloud
19
19
  module Bigquery
20
20
  ##
21
- # = QueryData
21
+ # # QueryData
22
22
  #
23
23
  # Represents Data returned from a query a a list of name/value pairs.
24
24
  class QueryData < Data
25
25
  ##
26
- # The Connection object.
27
- attr_accessor :connection #:nodoc:
26
+ # @private The Connection object.
27
+ attr_accessor :connection
28
28
 
29
- def initialize arr = [] #:nodoc:
29
+ # @private
30
+ def initialize arr = []
30
31
  @job = nil
31
32
  super
32
33
  end
@@ -37,7 +38,7 @@ module Gcloud
37
38
  end
38
39
 
39
40
  # Whether the query has completed or not. When data is present this will
40
- # always be +true+. When +false+, +total+ will not be available.
41
+ # always be `true`. When `false`, `total` will not be available.
41
42
  def complete?
42
43
  @gapi["jobComplete"]
43
44
  end
@@ -89,7 +90,7 @@ module Gcloud
89
90
  end
90
91
 
91
92
  ##
92
- # The BigQuery Job that was created to run the query.
93
+ # The BigQuery {Job} that was created to run the query.
93
94
  def job
94
95
  return @job if @job
95
96
  return nil unless job?
@@ -104,8 +105,8 @@ module Gcloud
104
105
  end
105
106
 
106
107
  ##
107
- # New Data from a response object.
108
- def self.from_gapi gapi, connection #:nodoc:
108
+ # @private New Data from a response object.
109
+ def self.from_gapi gapi, connection
109
110
  if gapi["schema"].nil?
110
111
  formatted_rows = []
111
112
  else
@@ -1,4 +1,3 @@
1
- #--
2
1
  # Copyright 2015 Google Inc. All rights reserved.
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,30 +12,30 @@
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
14
 
15
+
16
16
  module Gcloud
17
17
  module Bigquery
18
18
  ##
19
- # = QueryJob
19
+ # # QueryJob
20
20
  #
21
- # A Job subclass representing a query operation that may be performed
22
- # on a Table. A QueryJob instance is created when you call
23
- # Project#query_job, Dataset#query_job, or View#data.
21
+ # A {Job} subclass representing a query operation that may be performed
22
+ # on a {Table}. A QueryJob instance is created when you call
23
+ # {Project#query_job}, {Dataset#query_job}, or {View#data}.
24
24
  #
25
- # See {Querying Data}[https://cloud.google.com/bigquery/querying-data]
26
- # and the {Jobs API
27
- # reference}[https://cloud.google.com/bigquery/docs/reference/v2/jobs]
28
- # for details.
25
+ # @see https://cloud.google.com/bigquery/querying-data Querying Data
26
+ # @see https://cloud.google.com/bigquery/docs/reference/v2/jobs Jobs API
27
+ # reference
29
28
  #
30
29
  class QueryJob < Job
31
30
  ##
32
- # Checks if the priority for the query is +BATCH+.
31
+ # Checks if the priority for the query is `BATCH`.
33
32
  def batch?
34
33
  val = config["query"]["priority"]
35
34
  val == "BATCH"
36
35
  end
37
36
 
38
37
  ##
39
- # Checks if the priority for the query is +INTERACTIVE+.
38
+ # Checks if the priority for the query is `INTERACTIVE`.
40
39
  def interactive?
41
40
  val = config["query"]["priority"]
42
41
  return true if val.nil?
@@ -54,8 +53,8 @@ module Gcloud
54
53
 
55
54
  ##
56
55
  # Checks if the query job looks for an existing result in the query cache.
57
- # For more information, see {Query
58
- # Caching}[https://cloud.google.com/bigquery/querying-data#querycaching].
56
+ # For more information, see [Query
57
+ # Caching](https://cloud.google.com/bigquery/querying-data#querycaching).
59
58
  def cache?
60
59
  val = config["query"]["useQueryCache"]
61
60
  return false if val.nil?
@@ -64,8 +63,8 @@ module Gcloud
64
63
 
65
64
  ##
66
65
  # Checks if the query job flattens nested and repeated fields in the query
67
- # results. The default is +true+. If the value is +false+, #large_results?
68
- # should return +true+.
66
+ # results. The default is `true`. If the value is `false`, #large_results?
67
+ # should return `true`.
69
68
  def flatten?
70
69
  val = config["query"]["flattenResults"]
71
70
  return true if val.nil?
@@ -97,25 +96,17 @@ module Gcloud
97
96
  ##
98
97
  # Retrieves the query results for the job.
99
98
  #
100
- # === Parameters
101
- #
102
- # +token+::
103
- # Page token, returned by a previous call, identifying the result set.
104
- # (+String+)
105
- # +max+::
106
- # Maximum number of results to return. (+Integer+)
107
- # +start+::
108
- # Zero-based index of the starting row to read. (+Integer+)
109
- # +timeout+::
110
- # How long to wait for the query to complete, in milliseconds, before
111
- # returning. Default is 10,000 milliseconds (10 seconds). (+Integer+)
112
- #
113
- # === Returns
114
- #
115
- # Gcloud::Bigquery::QueryData
99
+ # @param [String] token Page token, returned by a previous call,
100
+ # identifying the result set.
101
+ # @param [Integer] max Maximum number of results to return.
102
+ # @param [Integer] start Zero-based index of the starting row to read.
103
+ # @param [Integer] timeout How long to wait for the query to complete, in
104
+ # milliseconds, before returning. Default is 10,000 milliseconds (10
105
+ # seconds).
116
106
  #
117
- # === Example
107
+ # @return [Gcloud::Bigquery::QueryData]
118
108
  #
109
+ # @example
119
110
  # require "gcloud"
120
111
  #
121
112
  # gcloud = Gcloud.new
@@ -1,4 +1,3 @@
1
- #--
2
1
  # Copyright 2015 Google Inc. All rights reserved.
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,7 @@
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
14
 
15
+
16
16
  require "gcloud/bigquery/view"
17
17
  require "gcloud/bigquery/data"
18
18
  require "gcloud/bigquery/table/list"
@@ -24,14 +24,16 @@ require "gcloud/upload"
24
24
  module Gcloud
25
25
  module Bigquery
26
26
  ##
27
- # = Table
27
+ # # Table
28
28
  #
29
29
  # A named resource representing a BigQuery table that holds zero or more
30
- # records. Every table is defined by a schema
31
- # that may contain nested and repeated fields. (For more information
32
- # about nested and repeated fields, see {Preparing Data for
33
- # BigQuery}[https://cloud.google.com/bigquery/preparing-data-for-bigquery].)
30
+ # records. Every table is defined by a schema that may contain nested and
31
+ # repeated fields.
32
+ #
33
+ # @see https://cloud.google.com/bigquery/preparing-data-for-bigquery
34
+ # Preparing Data for BigQuery
34
35
  #
36
+ # @example
35
37
  # require "gcloud"
36
38
  #
37
39
  # gcloud = Gcloud.new
@@ -63,16 +65,16 @@ module Gcloud
63
65
  #
64
66
  class Table
65
67
  ##
66
- # The Connection object.
67
- attr_accessor :connection #:nodoc:
68
+ # @private The Connection object.
69
+ attr_accessor :connection
68
70
 
69
71
  ##
70
- # The Google API Client object.
71
- attr_accessor :gapi #:nodoc:
72
+ # @private The Google API Client object.
73
+ attr_accessor :gapi
72
74
 
73
75
  ##
74
- # Create an empty Table object.
75
- def initialize #:nodoc:
76
+ # @private Create an empty Table object.
77
+ def initialize
76
78
  @connection = nil
77
79
  @gapi = {}
78
80
  end
@@ -82,34 +84,35 @@ module Gcloud
82
84
  # The ID must contain only letters (a-z, A-Z), numbers (0-9),
83
85
  # or underscores (_). The maximum length is 1,024 characters.
84
86
  #
85
- # :category: Attributes
87
+ # @!group Attributes
86
88
  #
87
89
  def table_id
88
90
  @gapi["tableReference"]["tableId"]
89
91
  end
90
92
 
91
93
  ##
92
- # The ID of the +Dataset+ containing this table.
94
+ # The ID of the `Dataset` containing this table.
93
95
  #
94
- # :category: Attributes
96
+ # @!group Attributes
95
97
  #
96
98
  def dataset_id
97
99
  @gapi["tableReference"]["datasetId"]
98
100
  end
99
101
 
100
102
  ##
101
- # The ID of the +Project+ containing this table.
103
+ # The ID of the `Project` containing this table.
102
104
  #
103
- # :category: Attributes
105
+ # @!group Attributes
104
106
  #
105
107
  def project_id
106
108
  @gapi["tableReference"]["projectId"]
107
109
  end
108
110
 
109
111
  ##
112
+ # @private
110
113
  # The gapi fragment containing the Project ID, Dataset ID, and Table ID as
111
114
  # a camel-cased hash.
112
- def table_ref #:nodoc:
115
+ def table_ref
113
116
  table_ref = @gapi["tableReference"]
114
117
  table_ref = table_ref.to_hash if table_ref.respond_to? :to_hash
115
118
  table_ref
@@ -117,25 +120,24 @@ module Gcloud
117
120
 
118
121
  ##
119
122
  # The combined Project ID, Dataset ID, and Table ID for this table, in the
120
- # format specified by the {Query
121
- # Reference}[https://cloud.google.com/bigquery/query-reference#from]:
122
- # +project_name:datasetId.tableId+. To use this value in queries see
123
- # #query_id.
123
+ # format specified by the [Query
124
+ # Reference](https://cloud.google.com/bigquery/query-reference#from):
125
+ # `project_name:datasetId.tableId`. To use this value in queries see
126
+ # {#query_id}.
124
127
  #
125
- # :category: Attributes
128
+ # @!group Attributes
126
129
  #
127
130
  def id
128
131
  @gapi["id"]
129
132
  end
130
133
 
131
134
  ##
132
- # The value returned by #id, wrapped in square brackets if the Project ID
133
- # contains dashes, as specified by the {Query
134
- # Reference}[https://cloud.google.com/bigquery/query-reference#from].
135
+ # The value returned by {#id}, wrapped in square brackets if the Project
136
+ # ID contains dashes, as specified by the [Query
137
+ # Reference](https://cloud.google.com/bigquery/query-reference#from).
135
138
  # Useful in queries.
136
139
  #
137
- # === Example
138
- #
140
+ # @example
139
141
  # require "gcloud"
140
142
  #
141
143
  # gcloud = Gcloud.new
@@ -145,7 +147,7 @@ module Gcloud
145
147
  #
146
148
  # data = bigquery.query "SELECT name FROM #{table.query_id}"
147
149
  #
148
- # :category: Attributes
150
+ # @!group Attributes
149
151
  #
150
152
  def query_id
151
153
  project_id["-"] ? "[#{id}]" : id
@@ -154,7 +156,7 @@ module Gcloud
154
156
  ##
155
157
  # The name of the table.
156
158
  #
157
- # :category: Attributes
159
+ # @!group Attributes
158
160
  #
159
161
  def name
160
162
  @gapi["friendlyName"]
@@ -163,7 +165,7 @@ module Gcloud
163
165
  ##
164
166
  # Updates the name of the table.
165
167
  #
166
- # :category: Attributes
168
+ # @!group Attributes
167
169
  #
168
170
  def name= new_name
169
171
  patch_gapi! name: new_name
@@ -172,7 +174,7 @@ module Gcloud
172
174
  ##
173
175
  # A string hash of the dataset.
174
176
  #
175
- # :category: Attributes
177
+ # @!group Attributes
176
178
  #
177
179
  def etag
178
180
  ensure_full_data!
@@ -182,7 +184,7 @@ module Gcloud
182
184
  ##
183
185
  # A URL that can be used to access the dataset using the REST API.
184
186
  #
185
- # :category: Attributes
187
+ # @!group Attributes
186
188
  #
187
189
  def api_url
188
190
  ensure_full_data!
@@ -192,7 +194,7 @@ module Gcloud
192
194
  ##
193
195
  # The description of the table.
194
196
  #
195
- # :category: Attributes
197
+ # @!group Attributes
196
198
  #
197
199
  def description
198
200
  ensure_full_data!
@@ -202,7 +204,7 @@ module Gcloud
202
204
  ##
203
205
  # Updates the description of the table.
204
206
  #
205
- # :category: Attributes
207
+ # @!group Attributes
206
208
  #
207
209
  def description= new_description
208
210
  patch_gapi! description: new_description
@@ -211,7 +213,7 @@ module Gcloud
211
213
  ##
212
214
  # The number of bytes in the table.
213
215
  #
214
- # :category: Data
216
+ # @!group Data
215
217
  #
216
218
  def bytes_count
217
219
  ensure_full_data!
@@ -221,7 +223,7 @@ module Gcloud
221
223
  ##
222
224
  # The number of rows in the table.
223
225
  #
224
- # :category: Data
226
+ # @!group Data
225
227
  #
226
228
  def rows_count
227
229
  ensure_full_data!
@@ -231,7 +233,7 @@ module Gcloud
231
233
  ##
232
234
  # The time when this table was created.
233
235
  #
234
- # :category: Attributes
236
+ # @!group Attributes
235
237
  #
236
238
  def created_at
237
239
  ensure_full_data!
@@ -243,7 +245,7 @@ module Gcloud
243
245
  # If not present, the table will persist indefinitely.
244
246
  # Expired tables will be deleted and their storage reclaimed.
245
247
  #
246
- # :category: Attributes
248
+ # @!group Attributes
247
249
  #
248
250
  def expires_at
249
251
  ensure_full_data!
@@ -254,7 +256,7 @@ module Gcloud
254
256
  ##
255
257
  # The date when this table was last modified.
256
258
  #
257
- # :category: Attributes
259
+ # @!group Attributes
258
260
  #
259
261
  def modified_at
260
262
  ensure_full_data!
@@ -264,7 +266,7 @@ module Gcloud
264
266
  ##
265
267
  # Checks if the table's type is "TABLE".
266
268
  #
267
- # :category: Attributes
269
+ # @!group Attributes
268
270
  #
269
271
  def table?
270
272
  @gapi["type"] == "TABLE"
@@ -273,7 +275,7 @@ module Gcloud
273
275
  ##
274
276
  # Checks if the table's type is "VIEW".
275
277
  #
276
- # :category: Attributes
278
+ # @!group Attributes
277
279
  #
278
280
  def view?
279
281
  @gapi["type"] == "VIEW"
@@ -283,7 +285,7 @@ module Gcloud
283
285
  # The geographic location where the table should reside. Possible
284
286
  # values include EU and US. The default value is US.
285
287
  #
286
- # :category: Attributes
288
+ # @!group Attributes
287
289
  #
288
290
  def location
289
291
  ensure_full_data!
@@ -292,23 +294,21 @@ module Gcloud
292
294
 
293
295
  ##
294
296
  # Returns the table's schema as hash containing the keys and values
295
- # returned by the Google Cloud BigQuery {Rest API
296
- # }[https://cloud.google.com/bigquery/docs/reference/v2/tables#resource].
297
+ # returned by the Google Cloud BigQuery [Rest API
298
+ # ](https://cloud.google.com/bigquery/docs/reference/v2/tables#resource).
297
299
  # This method can also be used to set, replace, or add to the schema by
298
- # passing a block. See Table::Schema for available methods. To set the
299
- # schema by passing a hash instead, use #schema=.
300
+ # passing a block. See {Table::Schema} for available methods. To set the
301
+ # schema by passing a hash instead, use {#schema=}.
300
302
  #
301
- # === Parameters
302
- #
303
- # +replace+::
304
- # Whether to replace the existing schema with the new schema. If
305
- # +true+, the fields will replace the existing schema. If
306
- # +false+, the fields will be added to the existing schema. When a table
303
+ # @param [Boolean] replace Whether to replace the existing schema with the
304
+ # new schema. If `true`, the fields will replace the existing schema. If
305
+ # `false`, the fields will be added to the existing schema. When a table
307
306
  # already contains data, schema changes must be additive. Thus, the
308
- # default value is +false+. (+Boolean+)
309
- #
310
- # === Examples
307
+ # default value is `false`.
308
+ # @yield [schema] a block for setting the schema
309
+ # @yieldparam [Table::Schema] schema the object accepting the schema
311
310
  #
311
+ # @example
312
312
  # require "gcloud"
313
313
  #
314
314
  # gcloud = Gcloud.new
@@ -324,7 +324,7 @@ module Gcloud
324
324
  # end
325
325
  # end
326
326
  #
327
- # :category: Attributes
327
+ # @!group Attributes
328
328
  #
329
329
  def schema replace: false
330
330
  ensure_full_data!
@@ -342,16 +342,12 @@ module Gcloud
342
342
  # Updates the schema of the table.
343
343
  # To update the schema using a block instead, use #schema.
344
344
  #
345
- # === Parameters
346
- #
347
- # +schema+::
348
- # A hash containing keys and values as specified by the Google Cloud
349
- # BigQuery {Rest API
350
- # }[https://cloud.google.com/bigquery/docs/reference/v2/tables#resource]
351
- # . (+Hash+)
352
- #
353
- # === Example
345
+ # @param [Hash] new_schema A hash containing keys and values as specified
346
+ # by the Google Cloud BigQuery [Rest API
347
+ # ](https://cloud.google.com/bigquery/docs/reference/v2/tables#resource)
348
+ # .
354
349
  #
350
+ # @example
355
351
  # require "gcloud"
356
352
  #
357
353
  # gcloud = Gcloud.new
@@ -375,7 +371,7 @@ module Gcloud
375
371
  # }
376
372
  # table.schema = schema
377
373
  #
378
- # :category: Attributes
374
+ # @!group Attributes
379
375
  #
380
376
  def schema= new_schema
381
377
  patch_gapi! schema: new_schema
@@ -384,7 +380,7 @@ module Gcloud
384
380
  ##
385
381
  # The fields of the table.
386
382
  #
387
- # :category: Attributes
383
+ # @!group Attributes
388
384
  #
389
385
  def fields
390
386
  f = schema["fields"]
@@ -396,7 +392,7 @@ module Gcloud
396
392
  ##
397
393
  # The names of the columns in the table.
398
394
  #
399
- # :category: Attributes
395
+ # @!group Attributes
400
396
  #
401
397
  def headers
402
398
  fields.map { |f| f["name"] }
@@ -405,22 +401,15 @@ module Gcloud
405
401
  ##
406
402
  # Retrieves data from the table.
407
403
  #
408
- # === Parameters
409
- #
410
- # +token+::
411
- # Page token, returned by a previous call, identifying the result set.
412
- # (+String+)
413
- # +max+::
414
- # Maximum number of results to return. (+Integer+)
415
- # +start+::
416
- # Zero-based index of the starting row to read. (+Integer+)
417
- #
418
- # === Returns
404
+ # @param [String] token Page token, returned by a previous call,
405
+ # identifying the result set.
419
406
  #
420
- # Gcloud::Bigquery::Data
407
+ # @param [Integer] max Maximum number of results to return.
408
+ # @param [Integer] start Zero-based index of the starting row to read.
421
409
  #
422
- # === Example
410
+ # @return [Gcloud::Bigquery::Data]
423
411
  #
412
+ # @example
424
413
  # require "gcloud"
425
414
  #
426
415
  # gcloud = Gcloud.new
@@ -434,7 +423,7 @@ module Gcloud
434
423
  # end
435
424
  # more_data = table.data token: data.token
436
425
  #
437
- # :category: Data
426
+ # @!group Data
438
427
  #
439
428
  def data token: nil, max: nil, start: nil
440
429
  ensure_connection!
@@ -449,34 +438,35 @@ module Gcloud
449
438
 
450
439
  ##
451
440
  # Copies the data from the table to another table.
441
+ # The destination table argument can also be a string identifier as
442
+ # specified by the [Query
443
+ # Reference](https://cloud.google.com/bigquery/query-reference#from):
444
+ # `project_name:datasetId.tableId`. This is useful for referencing tables
445
+ # in other projects and datasets.
452
446
  #
453
- # === Parameters
454
- #
455
- # +destination_table+::
456
- # The destination for the copied data. (+Table+ or +String+)
457
- # +create+::
458
- # Specifies whether the job is allowed to create new tables. (+String+)
447
+ # @param [Table, String] destination_table The destination for the copied
448
+ # data.
449
+ # @param [String] create Specifies whether the job is allowed to create
450
+ # new tables.
459
451
  #
460
452
  # The following values are supported:
461
- # * +needed+ - Create the table if it does not exist.
462
- # * +never+ - The table must already exist. A 'notFound' error is
453
+ #
454
+ # * `needed` - Create the table if it does not exist.
455
+ # * `never` - The table must already exist. A 'notFound' error is
463
456
  # raised if the table does not exist.
464
- # +write+::
465
- # Specifies how to handle data already present in the destination table.
466
- # The default value is +empty+. (+String+)
457
+ # @param [String] write Specifies how to handle data already present in
458
+ # the destination table. The default value is `empty`.
467
459
  #
468
460
  # The following values are supported:
469
- # * +truncate+ - BigQuery overwrites the table data.
470
- # * +append+ - BigQuery appends the data to the table.
471
- # * +empty+ - An error will be returned if the destination table already
472
- # contains data.
473
- #
474
- # === Returns
475
461
  #
476
- # Gcloud::Bigquery::CopyJob
462
+ # * `truncate` - BigQuery overwrites the table data.
463
+ # * `append` - BigQuery appends the data to the table.
464
+ # * `empty` - An error will be returned if the destination table already
465
+ # contains data.
477
466
  #
478
- # === Examples
467
+ # @return [Gcloud::Bigquery::CopyJob]
479
468
  #
469
+ # @example
480
470
  # require "gcloud"
481
471
  #
482
472
  # gcloud = Gcloud.new
@@ -487,12 +477,7 @@ module Gcloud
487
477
  #
488
478
  # copy_job = table.copy destination_table
489
479
  #
490
- # The destination table argument can also be a string identifier as
491
- # specified by the {Query
492
- # Reference}[https://cloud.google.com/bigquery/query-reference#from]:
493
- # +project_name:datasetId.tableId+. This is useful for referencing tables
494
- # in other projects and datasets.
495
- #
480
+ # @example Passing a string identifier for the destination table:
496
481
  # require "gcloud"
497
482
  #
498
483
  # gcloud = Gcloud.new
@@ -502,7 +487,7 @@ module Gcloud
502
487
  #
503
488
  # copy_job = table.copy "other-project:other_dataset.other_table"
504
489
  #
505
- # :category: Data
490
+ # @!group Data
506
491
  #
507
492
  def copy destination_table, create: nil, write: nil, dryrun: nil
508
493
  ensure_connection!
@@ -518,36 +503,33 @@ module Gcloud
518
503
  end
519
504
 
520
505
  ##
506
+ # @private
521
507
  # Links the table to a source table identified by a URI.
522
508
  #
523
- # === Parameters
524
- #
525
- # +source_url+::
526
- # The URI of source table to link. (+String+)
527
- # +create+::
528
- # Specifies whether the job is allowed to create new tables. (+String+)
509
+ # @param [String] source_url The URI of source table to link.
510
+ # @param [String] create Specifies whether the job is allowed to create
511
+ # new tables.
529
512
  #
530
513
  # The following values are supported:
531
- # * +needed+ - Create the table if it does not exist.
532
- # * +never+ - The table must already exist. A 'notFound' error is
514
+ #
515
+ # * `needed` - Create the table if it does not exist.
516
+ # * `never` - The table must already exist. A 'notFound' error is
533
517
  # raised if the table does not exist.
534
- # +write+::
535
- # Specifies how to handle data already present in the table.
536
- # The default value is +empty+. (+String+)
518
+ # @param [String] write Specifies how to handle data already present in
519
+ # the destination table. The default value is `empty`.
537
520
  #
538
521
  # The following values are supported:
539
- # * +truncate+ - BigQuery overwrites the table data.
540
- # * +append+ - BigQuery appends the data to the table.
541
- # * +empty+ - An error will be returned if the table already contains
542
- # data.
543
522
  #
544
- # === Returns
523
+ # * `truncate` - BigQuery overwrites the table data.
524
+ # * `append` - BigQuery appends the data to the table.
525
+ # * `empty` - An error will be returned if the destination table already
526
+ # contains data.
545
527
  #
546
- # Gcloud::Bigquery::Job
528
+ # @return [Gcloud::Bigquery::Job]
547
529
  #
548
- # :category: Data
530
+ # @!group Data
549
531
  #
550
- def link source_url, create: nil, write: nil, dryrun: nil #:nodoc:
532
+ def link source_url, create: nil, write: nil, dryrun: nil
551
533
  ensure_connection!
552
534
  options = { create: create, write: write, dryrun: dryrun }
553
535
  resp = connection.link_table table_ref, source_url, options
@@ -559,39 +541,34 @@ module Gcloud
559
541
  end
560
542
 
561
543
  ##
562
- # Extract the data from the table to a Google Cloud Storage file. For
563
- # more information, see {Exporting Data From BigQuery
564
- # }[https://cloud.google.com/bigquery/exporting-data-from-bigquery].
544
+ # Extract the data from the table to a Google Cloud Storage file.
565
545
  #
566
- # === Parameters
546
+ # @see https://cloud.google.com/bigquery/exporting-data-from-bigquery
547
+ # Exporting Data From BigQuery
567
548
  #
568
- # +extract_url+::
569
- # The Google Storage file or file URI pattern(s) to which BigQuery
570
- # should extract the table data.
571
- # (+Gcloud::Storage::File+ or +String+ or +Array+)
572
- # +format+::
573
- # The exported file format. The default value is +csv+. (+String+)
549
+ # @param [Gcloud::Storage::File, String, Array<String>] extract_url The
550
+ # Google Storage file or file URI pattern(s) to which BigQuery should
551
+ # extract the table data.
552
+ # @param [String] format The exported file format. The default value is
553
+ # `csv`.
574
554
  #
575
555
  # The following values are supported:
576
- # * +csv+ - CSV
577
- # * +json+ - {Newline-delimited JSON}[http://jsonlines.org/]
578
- # * +avro+ - {Avro}[http://avro.apache.org/]
579
- # +compression+::
580
- # The compression type to use for exported files. Possible values
581
- # include +GZIP+ and +NONE+. The default value is +NONE+. (+String+)
582
- # +delimiter+::
583
- # Delimiter to use between fields in the exported data. Default is
584
- # <code>,</code>. (+String+)
585
- # +header+::
586
- # Whether to print out a header row in the results. Default is +true+.
587
- # (+Boolean+)
588
556
  #
589
- # === Returns
557
+ # * `csv` - CSV
558
+ # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
559
+ # * `avro` - [Avro](http://avro.apache.org/)
560
+ # @param [String] compression The compression type to use for exported
561
+ # files. Possible values include `GZIP` and `NONE`. The default value is
562
+ # `NONE`.
563
+ # @param [String] delimiter Delimiter to use between fields in the
564
+ # exported data. Default is <code>,</code>.
565
+ # @param [Boolean] header Whether to print out a header row in the
566
+ # results. Default is `true`.
590
567
  #
591
- # Gcloud::Bigquery::ExtractJob
592
568
  #
593
- # === Example
569
+ # @return [Gcloud::Bigquery::ExtractJob]
594
570
  #
571
+ # @example
595
572
  # require "gcloud"
596
573
  #
597
574
  # gcloud = Gcloud.new
@@ -602,7 +579,7 @@ module Gcloud
602
579
  # extract_job = table.extract "gs://my-bucket/file-name.json",
603
580
  # format: "json"
604
581
  #
605
- # :category: Data
582
+ # @!group Data
606
583
  #
607
584
  def extract extract_url, format: nil, compression: nil, delimiter: nil,
608
585
  header: nil, dryrun: nil
@@ -618,100 +595,93 @@ module Gcloud
618
595
  end
619
596
 
620
597
  ##
621
- # Loads data into the table.
622
- #
623
- # === Parameters
598
+ # Loads data into the table. You can pass a gcloud storage file path or
599
+ # a gcloud storage file instance. Or, you can upload a file directly.
600
+ # See [Loading Data with a POST Request](
601
+ # https://cloud.google.com/bigquery/loading-data-post-request#multipart).
624
602
  #
625
- # +file+::
626
- # A file or the URI of a Google Cloud Storage file containing
627
- # data to load into the table.
628
- # (+File+ or +Gcloud::Storage::File+ or +String+)
629
- # +format+::
630
- # The exported file format. The default value is +csv+. (+String+)
603
+ # @param [File, Gcloud::Storage::File, String] file A file or the URI of a
604
+ # Google Cloud Storage file containing data to load into the table.
605
+ # @param [String] format The exported file format. The default value is
606
+ # `csv`.
631
607
  #
632
608
  # The following values are supported:
633
- # * +csv+ - CSV
634
- # * +json+ - {Newline-delimited JSON}[http://jsonlines.org/]
635
- # * +avro+ - {Avro}[http://avro.apache.org/]
636
- # * +datastore_backup+ - Cloud Datastore backup
637
- # +create+::
638
- # Specifies whether the job is allowed to create new tables. (+String+)
609
+ #
610
+ # * `csv` - CSV
611
+ # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
612
+ # * `avro` - [Avro](http://avro.apache.org/)
613
+ # * `datastore_backup` - Cloud Datastore backup
614
+ # @param [String] create Specifies whether the job is allowed to create
615
+ # new tables.
639
616
  #
640
617
  # The following values are supported:
641
- # * +needed+ - Create the table if it does not exist.
642
- # * +never+ - The table must already exist. A 'notFound' error is
618
+ #
619
+ # * `needed` - Create the table if it does not exist.
620
+ # * `never` - The table must already exist. A 'notFound' error is
643
621
  # raised if the table does not exist.
644
- # +write+::
645
- # Specifies how to handle data already present in the table.
646
- # The default value is +empty+. (+String+)
622
+ # @param [String] write Specifies how to handle data already present in
623
+ # the table. The default value is `empty`.
647
624
  #
648
625
  # The following values are supported:
649
- # * +truncate+ - BigQuery overwrites the table data.
650
- # * +append+ - BigQuery appends the data to the table.
651
- # * +empty+ - An error will be returned if the table already contains
626
+ #
627
+ # * `truncate` - BigQuery overwrites the table data.
628
+ # * `append` - BigQuery appends the data to the table.
629
+ # * `empty` - An error will be returned if the table already contains
652
630
  # data.
653
- # +projection_fields+::
654
- # If the +format+ option is set to +datastore_backup+, indicates which
655
- # entity properties to load from a Cloud Datastore backup. Property
656
- # names are case sensitive and must be top-level properties. If not set,
657
- # BigQuery loads all properties. If any named property isn't found in
658
- # the Cloud Datastore backup, an invalid error is returned. (+Array+)
659
- # +jagged_rows+::
660
- # Accept rows that are missing trailing optional columns. The missing
661
- # values are treated as nulls. If +false+, records with missing trailing
662
- # columns are treated as bad records, and if there are too many bad
663
- # records, an invalid error is returned in the job result. The default
664
- # value is +false+. Only applicable to CSV, ignored for other formats.
665
- # (+Boolean+)
666
- # +quoted_newlines+::
667
- # Indicates if BigQuery should allow quoted data sections that contain
668
- # newline characters in a CSV file. The default value is +false+.
669
- # (+Boolean+)
670
- # +encoding+::
671
- # The character encoding of the data. The supported values are +UTF-8+
672
- # or +ISO-8859-1+. The default value is +UTF-8+. (+String+)
673
- # +delimiter+::
674
- # Specifices the separator for fields in a CSV file. BigQuery converts
675
- # the string to +ISO-8859-1+ encoding, and then uses the first byte of
676
- # the encoded string to split the data in its raw, binary state. Default
677
- # is <code>,</code>. (+String+)
678
- # +ignore_unknown+::
679
- # Indicates if BigQuery should allow extra values that are not
680
- # represented in the table schema. If true, the extra values are
681
- # ignored. If false, records with extra columns are treated as bad
682
- # records, and if there are too many bad records, an invalid error is
683
- # returned in the job result. The default value is +false+. (+Boolean+)
684
- #
685
- # The +format+ property determines what BigQuery treats as an extra
631
+ # @param [Array<String>] projection_fields If the `format` option is set
632
+ # to `datastore_backup`, indicates which entity properties to load from
633
+ # a Cloud Datastore backup. Property names are case sensitive and must
634
+ # be top-level properties. If not set, BigQuery loads all properties. If
635
+ # any named property isn't found in the Cloud Datastore backup, an
636
+ # invalid error is returned.
637
+ # @param [Boolean] jagged_rows Accept rows that are missing trailing
638
+ # optional columns. The missing values are treated as nulls. If `false`,
639
+ # records with missing trailing columns are treated as bad records, and
640
+ # if there are too many bad records, an invalid error is returned in the
641
+ # job result. The default value is `false`. Only applicable to CSV,
642
+ # ignored for other formats.
643
+ # @param [Boolean] quoted_newlines Indicates if BigQuery should allow
644
+ # quoted data sections that contain newline characters in a CSV file.
645
+ # The default value is `false`.
646
+ # @param [String] encoding The character encoding of the data. The
647
+ # supported values are `UTF-8` or `ISO-8859-1`. The default value is
648
+ # `UTF-8`.
649
+ # @param [String] delimiter Specifices the separator for fields in a CSV
650
+ # file. BigQuery converts the string to `ISO-8859-1` encoding, and then
651
+ # uses the first byte of the encoded string to split the data in its
652
+ # raw, binary state. Default is <code>,</code>.
653
+ # @param [Boolean] ignore_unknown Indicates if BigQuery should allow extra
654
+ # values that are not represented in the table schema. If true, the
655
+ # extra values are ignored. If false, records with extra columns are
656
+ # treated as bad records, and if there are too many bad records, an
657
+ # invalid error is returned in the job result. The default value is
658
+ # `false`.
659
+ #
660
+ # The `format` property determines what BigQuery treats as an extra
686
661
  # value:
687
662
  #
688
- # * +CSV+: Trailing columns
689
- # * +JSON+: Named values that don't match any column names
690
- # +max_bad_records+::
691
- # The maximum number of bad records that BigQuery can ignore when
692
- # running the job. If the number of bad records exceeds this value, an
693
- # invalid error is returned in the job result. The default value is +0+,
694
- # which requires that all records are valid. (+Integer+)
695
- # +quote+::
696
- # The value that is used to quote data sections in a CSV file. BigQuery
697
- # converts the string to ISO-8859-1 encoding, and then uses the first
698
- # byte of the encoded string to split the data in its raw, binary state.
699
- # The default value is a double-quote <code>"</code>. If your data does
700
- # not contain quoted sections, set the property value to an empty
701
- # string. If your data contains quoted newline characters, you must also
702
- # set the allowQuotedNewlines property to true. (+String+)
703
- # +skip_leading+::
704
- # The number of rows at the top of a CSV file that BigQuery will skip
705
- # when loading the data. The default value is +0+. This property is
706
- # useful if you have header rows in the file that should be skipped.
707
- # (+Integer+)
708
- #
709
- # === Returns
710
- #
711
- # Gcloud::Bigquery::LoadJob
712
- #
713
- # === Examples
714
- #
663
+ # * `CSV`: Trailing columns
664
+ # * `JSON`: Named values that don't match any column names
665
+ # @param [Integer] max_bad_records The maximum number of bad records that
666
+ # BigQuery can ignore when running the job. If the number of bad records
667
+ # exceeds this value, an invalid error is returned in the job result.
668
+ # The default value is `0`, which requires that all records are valid.
669
+ # @param [String] quote The value that is used to quote data sections in a
670
+ # CSV file. BigQuery converts the string to ISO-8859-1 encoding, and
671
+ # then uses the first byte of the encoded string to split the data in
672
+ # its raw, binary state. The default value is a double-quote
673
+ # <code>"</code>. If your data does not contain quoted sections, set the
674
+ # property value to an empty string. If your data contains quoted
675
+ # newline characters, you must also set the allowQuotedNewlines property
676
+ # to true.
677
+ # @param [Integer] skip_leading The number of rows at the top of a CSV
678
+ # file that BigQuery will skip when loading the data. The default value
679
+ # is `0`. This property is useful if you have header rows in the file
680
+ # that should be skipped.
681
+ #
682
+ # @return [Gcloud::Bigquery::LoadJob]
683
+ #
684
+ # @example
715
685
  # require "gcloud"
716
686
  #
717
687
  # gcloud = Gcloud.new
@@ -721,8 +691,7 @@ module Gcloud
721
691
  #
722
692
  # load_job = table.load "gs://my-bucket/file-name.csv"
723
693
  #
724
- # You can also pass a gcloud storage file instance.
725
- #
694
+ # @example Pass a gcloud storage file instance:
726
695
  # require "gcloud"
727
696
  # require "gcloud/storage"
728
697
  #
@@ -736,10 +705,7 @@ module Gcloud
736
705
  # file = bucket.file "file-name.csv"
737
706
  # load_job = table.load file
738
707
  #
739
- # Or, you can upload a file directly.
740
- # See {Loading Data with a POST Request}[
741
- # https://cloud.google.com/bigquery/loading-data-post-request#multipart].
742
- #
708
+ # @example Upload a file directly:
743
709
  # require "gcloud"
744
710
  #
745
711
  # gcloud = Gcloud.new
@@ -750,19 +716,20 @@ module Gcloud
750
716
  # file = File.open "my_data.csv"
751
717
  # load_job = table.load file
752
718
  #
753
- # === A note about large direct uploads
719
+ # ### A note about large direct uploads
754
720
  #
755
721
  # You may encounter a Broken pipe (Errno::EPIPE) error when attempting to
756
722
  # upload large files. To avoid this problem, add the
757
- # {httpclient}[https://rubygems.org/gems/httpclient] gem to your project,
723
+ # [httpclient](https://rubygems.org/gems/httpclient) gem to your project,
758
724
  # and the line (or lines) of configuration shown below. These lines must
759
725
  # execute after you require gcloud but before you make your first gcloud
760
726
  # connection. The first statement configures
761
- # {Faraday}[https://rubygems.org/gems/faraday] to use httpclient. The
727
+ # [Faraday](https://rubygems.org/gems/faraday) to use httpclient. The
762
728
  # second statement, which should only be added if you are using a version
763
- # of Faraday at or above 0.9.2, is a workaround for {this gzip
764
- # issue}[https://github.com/GoogleCloudPlatform/gcloud-ruby/issues/367].
729
+ # of Faraday at or above 0.9.2, is a workaround for [this gzip
730
+ # issue](https://github.com/GoogleCloudPlatform/gcloud-ruby/issues/367).
765
731
  #
732
+ # @example
766
733
  # require "gcloud"
767
734
  #
768
735
  # # Use httpclient to avoid broken pipe errors with large uploads
@@ -776,7 +743,7 @@ module Gcloud
776
743
  # gcloud = Gcloud.new
777
744
  # bigquery = gcloud.bigquery
778
745
  #
779
- # :category: Data
746
+ # @!group Data
780
747
  #
781
748
  def load file, format: nil, create: nil, write: nil,
782
749
  projection_fields: nil, jagged_rows: nil, quoted_newlines: nil,
@@ -798,29 +765,23 @@ module Gcloud
798
765
  ##
799
766
  # Inserts data into the table for near-immediate querying, without the
800
767
  # need to complete a #load operation before the data can appear in query
801
- # results. See {Streaming Data Into BigQuery
802
- # }[https://cloud.google.com/bigquery/streaming-data-into-bigquery].
768
+ # results.
803
769
  #
804
- # === Parameters
770
+ # @see https://cloud.google.com/bigquery/streaming-data-into-bigquery
771
+ # Streaming Data Into BigQuery
805
772
  #
806
- # +rows+::
807
- # A hash object or array of hash objects containing the data.
808
- # (+Array+ or +Hash+)
809
- # +skip_invalid+::
810
- # Insert all valid rows of a request, even if invalid rows exist. The
811
- # default value is +false+, which causes the entire request to fail if
812
- # any invalid rows exist. (+Boolean+)
813
- # +ignore_unknown+::
814
- # Accept rows that contain values that do not match the schema. The
815
- # unknown values are ignored. Default is false, which treats unknown
816
- # values as errors. (+Boolean+)
773
+ # @param [Hash, Array<Hash>] rows A hash object or array of hash objects
774
+ # containing the data.
775
+ # @param [Boolean] skip_invalid Insert all valid rows of a request, even
776
+ # if invalid rows exist. The default value is `false`, which causes the
777
+ # entire request to fail if any invalid rows exist.
778
+ # @param [Boolean] ignore_unknown Accept rows that contain values that do
779
+ # not match the schema. The unknown values are ignored. Default is
780
+ # false, which treats unknown values as errors.
817
781
  #
818
- # === Returns
819
- #
820
- # Gcloud::Bigquery::InsertResponse
821
- #
822
- # === Example
782
+ # @return [Gcloud::Bigquery::InsertResponse]
823
783
  #
784
+ # @example
824
785
  # require "gcloud"
825
786
  #
826
787
  # gcloud = Gcloud.new
@@ -834,7 +795,7 @@ module Gcloud
834
795
  # ]
835
796
  # table.insert rows
836
797
  #
837
- # :category: Data
798
+ # @!group Data
838
799
  #
839
800
  def insert rows, skip_invalid: nil, ignore_unknown: nil
840
801
  rows = [rows] if rows.is_a? Hash
@@ -851,12 +812,9 @@ module Gcloud
851
812
  ##
852
813
  # Permanently deletes the table.
853
814
  #
854
- # === Returns
855
- #
856
- # +true+ if the table was deleted.
857
- #
858
- # === Example
815
+ # @return [Boolean] Returns `true` if the table was deleted.
859
816
  #
817
+ # @example
860
818
  # require "gcloud"
861
819
  #
862
820
  # gcloud = Gcloud.new
@@ -866,7 +824,7 @@ module Gcloud
866
824
  #
867
825
  # table.delete
868
826
  #
869
- # :category: Lifecycle
827
+ # @!group Lifecycle
870
828
  #
871
829
  def delete
872
830
  ensure_connection!
@@ -881,7 +839,7 @@ module Gcloud
881
839
  ##
882
840
  # Reloads the table with current data from the BigQuery service.
883
841
  #
884
- # :category: Lifecycle
842
+ # @!group Lifecycle
885
843
  #
886
844
  def reload!
887
845
  ensure_connection!
@@ -895,8 +853,8 @@ module Gcloud
895
853
  alias_method :refresh!, :reload!
896
854
 
897
855
  ##
898
- # New Table from a Google API Client object.
899
- def self.from_gapi gapi, conn #:nodoc:
856
+ # @private New Table from a Google API Client object.
857
+ def self.from_gapi gapi, conn
900
858
  klass = class_for gapi
901
859
  klass.new.tap do |f|
902
860
  f.gapi = gapi
@@ -967,8 +925,8 @@ module Gcloud
967
925
  end
968
926
 
969
927
  ##
970
- # Determines if a resumable upload should be used.
971
- def resumable_upload? file #:nodoc:
928
+ # @private Determines if a resumable upload should be used.
929
+ def resumable_upload? file
972
930
  ::File.size?(file).to_i > Upload.resumable_threshold
973
931
  end
974
932