gcloud 0.12.2 → 0.20.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -13
- data/lib/gcloud.rb +27 -456
- data/lib/gcloud/bigquery.rb +2 -382
- data/lib/gcloud/datastore.rb +2 -576
- data/lib/gcloud/dns.rb +2 -321
- data/lib/gcloud/logging.rb +1 -322
- data/lib/gcloud/pubsub.rb +2 -476
- data/lib/gcloud/resource_manager.rb +2 -273
- data/lib/gcloud/storage.rb +2 -440
- data/lib/gcloud/translate.rb +1 -250
- data/lib/gcloud/version.rb +2 -2
- data/lib/gcloud/vision.rb +1 -501
- metadata +36 -332
- data/AUTHENTICATION.md +0 -75
- data/CHANGELOG.md +0 -382
- data/OVERVIEW.md +0 -259
- data/lib/gcloud/backoff.rb +0 -150
- data/lib/gcloud/bigquery/copy_job.rb +0 -97
- data/lib/gcloud/bigquery/credentials.rb +0 -29
- data/lib/gcloud/bigquery/data.rb +0 -239
- data/lib/gcloud/bigquery/dataset.rb +0 -753
- data/lib/gcloud/bigquery/dataset/access.rb +0 -507
- data/lib/gcloud/bigquery/dataset/list.rb +0 -169
- data/lib/gcloud/bigquery/extract_job.rb +0 -117
- data/lib/gcloud/bigquery/insert_response.rb +0 -81
- data/lib/gcloud/bigquery/job.rb +0 -299
- data/lib/gcloud/bigquery/job/list.rb +0 -172
- data/lib/gcloud/bigquery/load_job.rb +0 -202
- data/lib/gcloud/bigquery/project.rb +0 -475
- data/lib/gcloud/bigquery/query_data.rb +0 -234
- data/lib/gcloud/bigquery/query_job.rb +0 -137
- data/lib/gcloud/bigquery/schema.rb +0 -359
- data/lib/gcloud/bigquery/service.rb +0 -506
- data/lib/gcloud/bigquery/table.rb +0 -1141
- data/lib/gcloud/bigquery/table/list.rb +0 -180
- data/lib/gcloud/bigquery/view.rb +0 -475
- data/lib/gcloud/credentials.rb +0 -129
- data/lib/gcloud/datastore/commit.rb +0 -148
- data/lib/gcloud/datastore/credentials.rb +0 -35
- data/lib/gcloud/datastore/cursor.rb +0 -76
- data/lib/gcloud/datastore/dataset.rb +0 -660
- data/lib/gcloud/datastore/dataset/lookup_results.rb +0 -219
- data/lib/gcloud/datastore/dataset/query_results.rb +0 -386
- data/lib/gcloud/datastore/entity.rb +0 -449
- data/lib/gcloud/datastore/errors.rb +0 -41
- data/lib/gcloud/datastore/gql_query.rb +0 -211
- data/lib/gcloud/datastore/grpc_utils.rb +0 -132
- data/lib/gcloud/datastore/key.rb +0 -281
- data/lib/gcloud/datastore/properties.rb +0 -128
- data/lib/gcloud/datastore/query.rb +0 -348
- data/lib/gcloud/datastore/service.rb +0 -167
- data/lib/gcloud/datastore/transaction.rb +0 -362
- data/lib/gcloud/dns/change.rb +0 -158
- data/lib/gcloud/dns/change/list.rb +0 -173
- data/lib/gcloud/dns/credentials.rb +0 -29
- data/lib/gcloud/dns/importer.rb +0 -183
- data/lib/gcloud/dns/project.rb +0 -247
- data/lib/gcloud/dns/record.rb +0 -170
- data/lib/gcloud/dns/record/list.rb +0 -174
- data/lib/gcloud/dns/service.rb +0 -167
- data/lib/gcloud/dns/zone.rb +0 -759
- data/lib/gcloud/dns/zone/list.rb +0 -168
- data/lib/gcloud/dns/zone/transaction.rb +0 -176
- data/lib/gcloud/errors.rb +0 -206
- data/lib/gcloud/gce.rb +0 -56
- data/lib/gcloud/grpc_utils.rb +0 -87
- data/lib/gcloud/logging/credentials.rb +0 -29
- data/lib/gcloud/logging/entry.rb +0 -465
- data/lib/gcloud/logging/entry/http_request.rb +0 -141
- data/lib/gcloud/logging/entry/list.rb +0 -177
- data/lib/gcloud/logging/entry/operation.rb +0 -90
- data/lib/gcloud/logging/logger.rb +0 -307
- data/lib/gcloud/logging/metric.rb +0 -169
- data/lib/gcloud/logging/metric/list.rb +0 -172
- data/lib/gcloud/logging/project.rb +0 -642
- data/lib/gcloud/logging/resource.rb +0 -84
- data/lib/gcloud/logging/resource_descriptor.rb +0 -137
- data/lib/gcloud/logging/resource_descriptor/list.rb +0 -174
- data/lib/gcloud/logging/service.rb +0 -267
- data/lib/gcloud/logging/sink.rb +0 -227
- data/lib/gcloud/logging/sink/list.rb +0 -171
- data/lib/gcloud/pubsub/credentials.rb +0 -29
- data/lib/gcloud/pubsub/message.rb +0 -94
- data/lib/gcloud/pubsub/policy.rb +0 -204
- data/lib/gcloud/pubsub/project.rb +0 -482
- data/lib/gcloud/pubsub/received_message.rb +0 -160
- data/lib/gcloud/pubsub/service.rb +0 -334
- data/lib/gcloud/pubsub/subscription.rb +0 -565
- data/lib/gcloud/pubsub/subscription/list.rb +0 -208
- data/lib/gcloud/pubsub/topic.rb +0 -511
- data/lib/gcloud/pubsub/topic/list.rb +0 -174
- data/lib/gcloud/pubsub/topic/publisher.rb +0 -85
- data/lib/gcloud/resource_manager/credentials.rb +0 -30
- data/lib/gcloud/resource_manager/manager.rb +0 -266
- data/lib/gcloud/resource_manager/policy.rb +0 -211
- data/lib/gcloud/resource_manager/project.rb +0 -484
- data/lib/gcloud/resource_manager/project/list.rb +0 -167
- data/lib/gcloud/resource_manager/project/updater.rb +0 -130
- data/lib/gcloud/resource_manager/service.rb +0 -127
- data/lib/gcloud/storage/bucket.rb +0 -775
- data/lib/gcloud/storage/bucket/acl.rb +0 -810
- data/lib/gcloud/storage/bucket/cors.rb +0 -153
- data/lib/gcloud/storage/bucket/list.rb +0 -172
- data/lib/gcloud/storage/credentials.rb +0 -29
- data/lib/gcloud/storage/errors.rb +0 -65
- data/lib/gcloud/storage/file.rb +0 -842
- data/lib/gcloud/storage/file/acl.rb +0 -425
- data/lib/gcloud/storage/file/list.rb +0 -191
- data/lib/gcloud/storage/file/verifier.rb +0 -67
- data/lib/gcloud/storage/project.rb +0 -316
- data/lib/gcloud/storage/service.rb +0 -347
- data/lib/gcloud/translate/api.rb +0 -241
- data/lib/gcloud/translate/detection.rb +0 -137
- data/lib/gcloud/translate/language.rb +0 -69
- data/lib/gcloud/translate/service.rb +0 -80
- data/lib/gcloud/translate/translation.rb +0 -112
- data/lib/gcloud/vision/annotate.rb +0 -224
- data/lib/gcloud/vision/annotation.rb +0 -455
- data/lib/gcloud/vision/annotation/entity.rb +0 -234
- data/lib/gcloud/vision/annotation/face.rb +0 -1750
- data/lib/gcloud/vision/annotation/properties.rb +0 -245
- data/lib/gcloud/vision/annotation/safe_search.rb +0 -161
- data/lib/gcloud/vision/annotation/text.rb +0 -236
- data/lib/gcloud/vision/annotation/vertex.rb +0 -108
- data/lib/gcloud/vision/credentials.rb +0 -29
- data/lib/gcloud/vision/image.rb +0 -590
- data/lib/gcloud/vision/location.rb +0 -115
- data/lib/gcloud/vision/project.rb +0 -278
- data/lib/gcloud/vision/service.rb +0 -66
- data/lib/google/api/annotations.rb +0 -14
- data/lib/google/api/http.rb +0 -30
- data/lib/google/api/label.rb +0 -24
- data/lib/google/api/monitored_resource.rb +0 -25
- data/lib/google/datastore/v1beta3/datastore.rb +0 -115
- data/lib/google/datastore/v1beta3/datastore_services.rb +0 -33
- data/lib/google/datastore/v1beta3/entity.rb +0 -63
- data/lib/google/datastore/v1beta3/query.rb +0 -128
- data/lib/google/devtools/cloudtrace/v1/trace.rb +0 -78
- data/lib/google/devtools/cloudtrace/v1/trace_services.rb +0 -32
- data/lib/google/example/library/v1/library.rb +0 -91
- data/lib/google/example/library/v1/library_services.rb +0 -40
- data/lib/google/iam/v1/iam_policy.rb +0 -33
- data/lib/google/iam/v1/iam_policy_services.rb +0 -30
- data/lib/google/iam/v1/policy.rb +0 -25
- data/lib/google/logging/type/http_request.rb +0 -28
- data/lib/google/logging/type/log_severity.rb +0 -27
- data/lib/google/logging/v2/log_entry.rb +0 -44
- data/lib/google/logging/v2/logging.rb +0 -56
- data/lib/google/logging/v2/logging_config.rb +0 -59
- data/lib/google/logging/v2/logging_config_services.rb +0 -32
- data/lib/google/logging/v2/logging_metrics.rb +0 -51
- data/lib/google/logging/v2/logging_metrics_services.rb +0 -32
- data/lib/google/logging/v2/logging_services.rb +0 -31
- data/lib/google/longrunning/operations.rb +0 -50
- data/lib/google/longrunning/operations_services.rb +0 -29
- data/lib/google/protobuf/descriptor.rb +0 -0
- data/lib/google/pubsub/v1/pubsub.rb +0 -129
- data/lib/google/pubsub/v1/pubsub_services.rb +0 -56
- data/lib/google/pubsub/v1beta2/pubsub.rb +0 -126
- data/lib/google/pubsub/v1beta2/pubsub_services.rb +0 -56
- data/lib/google/rpc/code.rb +0 -32
- data/lib/google/rpc/error_details.rb +0 -61
- data/lib/google/rpc/status.rb +0 -19
- data/lib/google/type/color.rb +0 -20
- data/lib/google/type/date.rb +0 -18
- data/lib/google/type/dayofweek.rb +0 -23
- data/lib/google/type/latlng.rb +0 -17
- data/lib/google/type/money.rb +0 -18
- data/lib/google/type/timeofday.rb +0 -19
data/lib/gcloud/bigquery.rb
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# Copyright
|
1
|
+
# Copyright 2016 Google Inc. All rights reserved.
|
2
2
|
#
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
4
|
# you may not use this file except in compliance with the License.
|
@@ -14,384 +14,4 @@
|
|
14
14
|
|
15
15
|
|
16
16
|
require "gcloud"
|
17
|
-
require "
|
18
|
-
|
19
|
-
module Gcloud
|
20
|
-
##
|
21
|
-
# Creates a new `Project` instance connected to the BigQuery service.
|
22
|
-
# Each call creates a new connection.
|
23
|
-
#
|
24
|
-
# For more information on connecting to Google Cloud see the [Authentication
|
25
|
-
# Guide](https://googlecloudplatform.github.io/gcloud-ruby/#/docs/guides/authentication).
|
26
|
-
#
|
27
|
-
# @param [String] project Identifier for a BigQuery project. If not present,
|
28
|
-
# the default project for the credentials is used.
|
29
|
-
# @param [String, Hash] keyfile Keyfile downloaded from Google Cloud. If file
|
30
|
-
# path the file must be readable.
|
31
|
-
# @param [String, Array<String>] scope The OAuth 2.0 scopes controlling the
|
32
|
-
# set of resources and operations that the connection can access. See [Using
|
33
|
-
# OAuth 2.0 to Access Google
|
34
|
-
# APIs](https://developers.google.com/identity/protocols/OAuth2).
|
35
|
-
#
|
36
|
-
# The default scope is:
|
37
|
-
#
|
38
|
-
# * `https://www.googleapis.com/auth/bigquery`
|
39
|
-
# @param [Integer] retries Number of times to retry requests on server error.
|
40
|
-
# The default value is `3`. Optional.
|
41
|
-
# @param [Integer] timeout Default timeout to use in requests. Optional.
|
42
|
-
#
|
43
|
-
# @return [Gcloud::Bigquery::Project]
|
44
|
-
#
|
45
|
-
# @example
|
46
|
-
# require "gcloud/bigquery"
|
47
|
-
#
|
48
|
-
# bigquery = Gcloud.bigquery
|
49
|
-
# dataset = bigquery.dataset "my_dataset"
|
50
|
-
# table = dataset.table "my_table"
|
51
|
-
#
|
52
|
-
def self.bigquery project = nil, keyfile = nil, scope: nil, retries: nil,
|
53
|
-
timeout: nil
|
54
|
-
project ||= Gcloud::Bigquery::Project.default_project
|
55
|
-
project = project.to_s # Always cast to a string
|
56
|
-
fail ArgumentError, "project is missing" if project.empty?
|
57
|
-
|
58
|
-
if keyfile.nil?
|
59
|
-
credentials = Gcloud::Bigquery::Credentials.default scope: scope
|
60
|
-
else
|
61
|
-
credentials = Gcloud::Bigquery::Credentials.new keyfile, scope: scope
|
62
|
-
end
|
63
|
-
|
64
|
-
Gcloud::Bigquery::Project.new(
|
65
|
-
Gcloud::Bigquery::Service.new(
|
66
|
-
project, credentials, retries: retries, timeout: timeout))
|
67
|
-
end
|
68
|
-
|
69
|
-
##
|
70
|
-
# # Google Cloud BigQuery
|
71
|
-
#
|
72
|
-
# Google Cloud BigQuery enables super-fast, SQL-like queries against massive
|
73
|
-
# datasets, using the processing power of Google's infrastructure. To learn
|
74
|
-
# more, read [What is
|
75
|
-
# BigQuery?](https://cloud.google.com/bigquery/what-is-bigquery).
|
76
|
-
#
|
77
|
-
# The goal of gcloud-ruby is to provide an API that is comfortable
|
78
|
-
# to Rubyists. Authentication is handled by {Gcloud#bigquery}. You can provide
|
79
|
-
# the project and credential information to connect to the BigQuery service,
|
80
|
-
# or if you are running on Google Compute Engine this configuration is taken
|
81
|
-
# care of for you. You can read more about the options for connecting in the
|
82
|
-
# [Authentication
|
83
|
-
# Guide](https://googlecloudplatform.github.io/gcloud-ruby/#/docs/guides/authentication).
|
84
|
-
#
|
85
|
-
# To help you get started quickly, the first few examples below use a public
|
86
|
-
# dataset provided by Google. As soon as you have [signed
|
87
|
-
# up](https://cloud.google.com/bigquery/sign-up) to use BigQuery, and provided
|
88
|
-
# that you stay in the free tier for queries, you should be able to run these
|
89
|
-
# first examples without the need to set up billing or to load data (although
|
90
|
-
# we'll show you how to do that too.)
|
91
|
-
#
|
92
|
-
# ## Listing Datasets and Tables
|
93
|
-
#
|
94
|
-
# A BigQuery project holds datasets, which in turn hold tables. Assuming that
|
95
|
-
# you have not yet created datasets or tables in your own project, let's
|
96
|
-
# connect to Google's `publicdata` project, and see what you find.
|
97
|
-
#
|
98
|
-
# ```ruby
|
99
|
-
# require "gcloud"
|
100
|
-
#
|
101
|
-
# gcloud = Gcloud.new "publicdata"
|
102
|
-
# bigquery = gcloud.bigquery
|
103
|
-
#
|
104
|
-
# bigquery.datasets.count #=> 1
|
105
|
-
# bigquery.datasets.first.dataset_id #=> "samples"
|
106
|
-
#
|
107
|
-
# dataset = bigquery.datasets.first
|
108
|
-
# tables = dataset.tables
|
109
|
-
#
|
110
|
-
# tables.count #=> 7
|
111
|
-
# tables.map &:table_id #=> [..., "shakespeare", "trigrams", "wikipedia"]
|
112
|
-
# ```
|
113
|
-
#
|
114
|
-
# In addition listing all datasets and tables in the project, you can also
|
115
|
-
# retrieve individual datasets and tables by ID. Let's look at the structure
|
116
|
-
# of the `shakespeare` table, which contains an entry for every word in every
|
117
|
-
# play written by Shakespeare.
|
118
|
-
#
|
119
|
-
# ```ruby
|
120
|
-
# require "gcloud"
|
121
|
-
#
|
122
|
-
# gcloud = Gcloud.new "publicdata"
|
123
|
-
# bigquery = gcloud.bigquery
|
124
|
-
#
|
125
|
-
# dataset = bigquery.dataset "samples"
|
126
|
-
# table = dataset.table "shakespeare"
|
127
|
-
#
|
128
|
-
# table.headers #=> ["word", "word_count", "corpus", "corpus_date"]
|
129
|
-
# table.rows_count #=> 164656
|
130
|
-
# ```
|
131
|
-
#
|
132
|
-
# Now that you know the column names for the Shakespeare table, you can write
|
133
|
-
# and run a query.
|
134
|
-
#
|
135
|
-
# ## Running queries
|
136
|
-
#
|
137
|
-
# BigQuery offers both synchronous and asynchronous methods, as explained in
|
138
|
-
# [Querying Data](https://cloud.google.com/bigquery/querying-data).
|
139
|
-
#
|
140
|
-
# ### Synchronous queries
|
141
|
-
#
|
142
|
-
# Let's start with the simpler synchronous approach. Notice that this time you
|
143
|
-
# are connecting using your own default project. This is necessary for running
|
144
|
-
# a query, since queries need to be able to create tables to hold results.
|
145
|
-
#
|
146
|
-
# ```ruby
|
147
|
-
# require "gcloud"
|
148
|
-
#
|
149
|
-
# gcloud = Gcloud.new
|
150
|
-
# bigquery = gcloud.bigquery
|
151
|
-
#
|
152
|
-
# sql = "SELECT TOP(word, 50) as word, COUNT(*) as count " +
|
153
|
-
# "FROM publicdata:samples.shakespeare"
|
154
|
-
# data = bigquery.query sql
|
155
|
-
#
|
156
|
-
# data.count #=> 50
|
157
|
-
# data.next? #=> false
|
158
|
-
# data.first #=> {"word"=>"you", "count"=>42}
|
159
|
-
# ```
|
160
|
-
#
|
161
|
-
# The `TOP` function shown above is just one of a variety of functions
|
162
|
-
# offered by BigQuery. See the [Query
|
163
|
-
# Reference](https://cloud.google.com/bigquery/query-reference) for a full
|
164
|
-
# listing.
|
165
|
-
#
|
166
|
-
# ### Asynchronous queries
|
167
|
-
#
|
168
|
-
# Because you probably should not block for most BigQuery operations,
|
169
|
-
# including querying as well as importing, exporting, and copying data, the
|
170
|
-
# BigQuery API enables you to manage longer-running jobs. In the asynchronous
|
171
|
-
# approach to running a query, an instance of {Gcloud::Bigquery::QueryJob} is
|
172
|
-
# returned, rather than an instance of {Gcloud::Bigquery::QueryData}.
|
173
|
-
#
|
174
|
-
# ```ruby
|
175
|
-
# require "gcloud"
|
176
|
-
#
|
177
|
-
# gcloud = Gcloud.new
|
178
|
-
# bigquery = gcloud.bigquery
|
179
|
-
#
|
180
|
-
# sql = "SELECT TOP(word, 50) as word, COUNT(*) as count " +
|
181
|
-
# "FROM publicdata:samples.shakespeare"
|
182
|
-
# job = bigquery.query_job sql
|
183
|
-
#
|
184
|
-
# job.wait_until_done!
|
185
|
-
# if !job.failed?
|
186
|
-
# job.query_results.each do |row|
|
187
|
-
# puts row["word"]
|
188
|
-
# end
|
189
|
-
# end
|
190
|
-
# ```
|
191
|
-
#
|
192
|
-
# Once you have determined that the job is done and has not failed, you can
|
193
|
-
# obtain an instance of {Gcloud::Bigquery::QueryData} by calling
|
194
|
-
# {Gcloud::Bigquery::QueryJob#query_results}. The query results for both of
|
195
|
-
# the above examples are stored in temporary tables with a lifetime of about
|
196
|
-
# 24 hours. See the final example below for a demonstration of how to store
|
197
|
-
# query results in a permanent table.
|
198
|
-
#
|
199
|
-
# ## Creating Datasets and Tables
|
200
|
-
#
|
201
|
-
# The first thing you need to do in a new BigQuery project is to create a
|
202
|
-
# {Gcloud::Bigquery::Dataset}. Datasets hold tables and control access to
|
203
|
-
# them.
|
204
|
-
#
|
205
|
-
# ```ruby
|
206
|
-
# require "gcloud/bigquery"
|
207
|
-
#
|
208
|
-
# gcloud = Gcloud.new
|
209
|
-
# bigquery = gcloud.bigquery
|
210
|
-
# dataset = bigquery.create_dataset "my_dataset"
|
211
|
-
# ```
|
212
|
-
#
|
213
|
-
# Now that you have a dataset, you can use it to create a table. Every table
|
214
|
-
# is defined by a schema that may contain nested and repeated fields. The
|
215
|
-
# example below shows a schema with a repeated record field named
|
216
|
-
# `cities_lived`. (For more information about nested and repeated fields, see
|
217
|
-
# [Preparing Data for
|
218
|
-
# BigQuery](https://cloud.google.com/bigquery/preparing-data-for-bigquery).)
|
219
|
-
#
|
220
|
-
# ```ruby
|
221
|
-
# require "gcloud"
|
222
|
-
#
|
223
|
-
# gcloud = Gcloud.new
|
224
|
-
# bigquery = gcloud.bigquery
|
225
|
-
# dataset = bigquery.dataset "my_dataset"
|
226
|
-
#
|
227
|
-
# table = dataset.create_table "people" do |schema|
|
228
|
-
# schema.string "first_name", mode: :required
|
229
|
-
# schema.record "cities_lived", mode: :repeated do |nested_schema|
|
230
|
-
# nested_schema.string "place", mode: :required
|
231
|
-
# nested_schema.integer "number_of_years", mode: :required
|
232
|
-
# end
|
233
|
-
# end
|
234
|
-
# ```
|
235
|
-
#
|
236
|
-
# Because of the repeated field in this schema, we cannot use the CSV format
|
237
|
-
# to load data into the table.
|
238
|
-
#
|
239
|
-
# ## Loading records
|
240
|
-
#
|
241
|
-
# In addition to CSV, data can be imported from files that are formatted as
|
242
|
-
# [Newline-delimited JSON](http://jsonlines.org/) or
|
243
|
-
# [Avro](http://avro.apache.org/), or from a Google Cloud Datastore backup. It
|
244
|
-
# can also be "streamed" into BigQuery.
|
245
|
-
#
|
246
|
-
# To follow along with these examples, you will need to set up billing on the
|
247
|
-
# [Google Developers Console](https://console.developers.google.com).
|
248
|
-
#
|
249
|
-
# ### Streaming records
|
250
|
-
#
|
251
|
-
# For situations in which you want new data to be available for querying as
|
252
|
-
# soon as possible, inserting individual records directly from your Ruby
|
253
|
-
# application is a great approach.
|
254
|
-
#
|
255
|
-
# ```ruby
|
256
|
-
# require "gcloud"
|
257
|
-
#
|
258
|
-
# gcloud = Gcloud.new
|
259
|
-
# bigquery = gcloud.bigquery
|
260
|
-
# dataset = bigquery.dataset "my_dataset"
|
261
|
-
# table = dataset.table "people"
|
262
|
-
#
|
263
|
-
# rows = [
|
264
|
-
# {
|
265
|
-
# "first_name" => "Anna",
|
266
|
-
# "cities_lived" => [
|
267
|
-
# {
|
268
|
-
# "place" => "Stockholm",
|
269
|
-
# "number_of_years" => 2
|
270
|
-
# }
|
271
|
-
# ]
|
272
|
-
# },
|
273
|
-
# {
|
274
|
-
# "first_name" => "Bob",
|
275
|
-
# "cities_lived" => [
|
276
|
-
# {
|
277
|
-
# "place" => "Seattle",
|
278
|
-
# "number_of_years" => 5
|
279
|
-
# },
|
280
|
-
# {
|
281
|
-
# "place" => "Austin",
|
282
|
-
# "number_of_years" => 6
|
283
|
-
# }
|
284
|
-
# ]
|
285
|
-
# }
|
286
|
-
# ]
|
287
|
-
# table.insert rows
|
288
|
-
# ```
|
289
|
-
#
|
290
|
-
# There are some trade-offs involved with streaming, so be sure to read the
|
291
|
-
# discussion of data consistency in [Streaming Data Into
|
292
|
-
# BigQuery](https://cloud.google.com/bigquery/streaming-data-into-bigquery).
|
293
|
-
#
|
294
|
-
# ### Uploading a file
|
295
|
-
#
|
296
|
-
# To follow along with this example, please download the
|
297
|
-
# [names.zip](http://www.ssa.gov/OACT/babynames/names.zip) archive from the
|
298
|
-
# U.S. Social Security Administration. Inside the archive you will find over
|
299
|
-
# 100 files containing baby name records since the year 1880. A PDF file also
|
300
|
-
# contained in the archive specifies the schema used below.
|
301
|
-
#
|
302
|
-
# ```ruby
|
303
|
-
# require "gcloud"
|
304
|
-
#
|
305
|
-
# gcloud = Gcloud.new
|
306
|
-
# bigquery = gcloud.bigquery
|
307
|
-
# dataset = bigquery.dataset "my_dataset"
|
308
|
-
# table = dataset.create_table "baby_names" do |schema|
|
309
|
-
# schema.string "name", mode: :required
|
310
|
-
# schema.string "sex", mode: :required
|
311
|
-
# schema.integer "number", mode: :required
|
312
|
-
# end
|
313
|
-
#
|
314
|
-
# file = File.open "names/yob2014.txt"
|
315
|
-
# load_job = table.load file, format: "csv"
|
316
|
-
# ```
|
317
|
-
#
|
318
|
-
# Because the names data, although formatted as CSV, is distributed in files
|
319
|
-
# with a `.txt` extension, this example explicitly passes the `format` option
|
320
|
-
# in order to demonstrate how to handle such situations. Because CSV is the
|
321
|
-
# default format for load operations, the option is not actually necessary.
|
322
|
-
# For JSON saved with a `.txt` extension, however, it would be.
|
323
|
-
#
|
324
|
-
# ## Exporting query results to Google Cloud Storage
|
325
|
-
#
|
326
|
-
# The example below shows how to pass the `table` option with a query in order
|
327
|
-
# to store results in a permanent table. It also shows how to export the
|
328
|
-
# result data to a Google Cloud Storage file. In order to follow along, you
|
329
|
-
# will need to enable the Google Cloud Storage API in addition to setting up
|
330
|
-
# billing.
|
331
|
-
#
|
332
|
-
# ```ruby
|
333
|
-
# require "gcloud"
|
334
|
-
#
|
335
|
-
# gcloud = Gcloud.new
|
336
|
-
# bigquery = gcloud.bigquery
|
337
|
-
# dataset = bigquery.dataset "my_dataset"
|
338
|
-
# source_table = dataset.table "baby_names"
|
339
|
-
# result_table = dataset.create_table "baby_names_results"
|
340
|
-
#
|
341
|
-
# sql = "SELECT name, number as count " +
|
342
|
-
# "FROM baby_names " +
|
343
|
-
# "WHERE name CONTAINS 'Sam' " +
|
344
|
-
# "ORDER BY count DESC"
|
345
|
-
# query_job = dataset.query_job sql, table: result_table
|
346
|
-
#
|
347
|
-
# query_job.wait_until_done!
|
348
|
-
#
|
349
|
-
# if !query_job.failed?
|
350
|
-
#
|
351
|
-
# storage = gcloud.storage
|
352
|
-
# bucket_id = "bigquery-exports-#{SecureRandom.uuid}"
|
353
|
-
# bucket = storage.create_bucket bucket_id
|
354
|
-
# extract_url = "gs://#{bucket.id}/baby-names-sam.csv"
|
355
|
-
#
|
356
|
-
# extract_job = result_table.extract extract_url
|
357
|
-
#
|
358
|
-
# extract_job.wait_until_done!
|
359
|
-
#
|
360
|
-
# # Download to local filesystem
|
361
|
-
# bucket.files.first.download "baby-names-sam.csv"
|
362
|
-
#
|
363
|
-
# end
|
364
|
-
# ```
|
365
|
-
#
|
366
|
-
# If a table you wish to export contains a large amount of data, you can pass
|
367
|
-
# a wildcard URI to export to multiple files (for sharding), or an array of
|
368
|
-
# URIs (for partitioning), or both. See [Exporting Data From
|
369
|
-
# BigQuery](https://cloud.google.com/bigquery/exporting-data-from-bigquery)
|
370
|
-
# for details.
|
371
|
-
#
|
372
|
-
# ## Configuring retries and timeout
|
373
|
-
#
|
374
|
-
# You can configure how many times API requests may be automatically retried.
|
375
|
-
# When an API request fails, the response will be inspected to see if the
|
376
|
-
# request meets criteria indicating that it may succeed on retry, such as
|
377
|
-
# `500` and `503` status codes or a specific internal error code such as
|
378
|
-
# `rateLimitExceeded`. If it meets the criteria, the request will be retried
|
379
|
-
# after a delay. If another error occurs, the delay will be increased before a
|
380
|
-
# subsequent attempt, until the `retries` limit is reached.
|
381
|
-
#
|
382
|
-
# You can also set the request `timeout` value in seconds.
|
383
|
-
#
|
384
|
-
# ```ruby
|
385
|
-
# require "gcloud"
|
386
|
-
#
|
387
|
-
# gcloud = Gcloud.new
|
388
|
-
# bigquery = gcloud.bigquery retries: 10, timeout: 120
|
389
|
-
# ```
|
390
|
-
#
|
391
|
-
# See the [BigQuery error
|
392
|
-
# table](https://cloud.google.com/bigquery/troubleshooting-errors#errortable)
|
393
|
-
# for a list of error conditions.
|
394
|
-
#
|
395
|
-
module Bigquery
|
396
|
-
end
|
397
|
-
end
|
17
|
+
require "google/cloud/bigquery"
|
data/lib/gcloud/datastore.rb
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# Copyright
|
1
|
+
# Copyright 2016 Google Inc. All rights reserved.
|
2
2
|
#
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
4
|
# you may not use this file except in compliance with the License.
|
@@ -14,578 +14,4 @@
|
|
14
14
|
|
15
15
|
|
16
16
|
require "gcloud"
|
17
|
-
require "
|
18
|
-
require "gcloud/datastore/dataset"
|
19
|
-
require "gcloud/datastore/transaction"
|
20
|
-
require "gcloud/datastore/credentials"
|
21
|
-
|
22
|
-
module Gcloud
|
23
|
-
##
|
24
|
-
# Creates a new object for connecting to the Datastore service.
|
25
|
-
# Each call creates a new connection.
|
26
|
-
#
|
27
|
-
# For more information on connecting to Google Cloud see the [Authentication
|
28
|
-
# Guide](https://googlecloudplatform.github.io/gcloud-ruby/#/docs/guides/authentication).
|
29
|
-
#
|
30
|
-
# @param [String] project Dataset identifier for the Datastore you are
|
31
|
-
# connecting to.
|
32
|
-
# @param [String, Hash] keyfile Keyfile downloaded from Google Cloud. If file
|
33
|
-
# path the file must be readable.
|
34
|
-
# @param [String, Array<String>] scope The OAuth 2.0 scopes controlling the
|
35
|
-
# set of resources and operations that the connection can access. See [Using
|
36
|
-
# OAuth 2.0 to Access Google
|
37
|
-
# APIs](https://developers.google.com/identity/protocols/OAuth2).
|
38
|
-
#
|
39
|
-
# The default scope is:
|
40
|
-
#
|
41
|
-
# * `https://www.googleapis.com/auth/datastore`
|
42
|
-
# @param [Integer] retries Number of times to retry requests on server error.
|
43
|
-
# The default value is `3`. Optional.
|
44
|
-
# @param [Integer] timeout Default timeout to use in requests. Optional.
|
45
|
-
#
|
46
|
-
# @return [Gcloud::Datastore::Dataset]
|
47
|
-
#
|
48
|
-
# @example
|
49
|
-
# require "gcloud/datastore"
|
50
|
-
#
|
51
|
-
# datastore = Gcloud.datastore "my-todo-project",
|
52
|
-
# "/path/to/keyfile.json"
|
53
|
-
#
|
54
|
-
# task = datastore.entity "Task", "sampleTask" do |t|
|
55
|
-
# t["type"] = "Personal"
|
56
|
-
# t["done"] = false
|
57
|
-
# t["priority"] = 4
|
58
|
-
# t["description"] = "Learn Cloud Datastore"
|
59
|
-
# end
|
60
|
-
#
|
61
|
-
# datastore.save task
|
62
|
-
#
|
63
|
-
def self.datastore project = nil, keyfile = nil, scope: nil, retries: nil,
|
64
|
-
timeout: nil
|
65
|
-
project ||= Gcloud::Datastore::Dataset.default_project
|
66
|
-
project = project.to_s # Always cast to a string
|
67
|
-
fail ArgumentError, "project is missing" if project.empty?
|
68
|
-
|
69
|
-
if ENV["DATASTORE_EMULATOR_HOST"]
|
70
|
-
return Gcloud::Datastore::Dataset.new(
|
71
|
-
Gcloud::Datastore::Service.new(
|
72
|
-
project, :this_channel_is_insecure,
|
73
|
-
host: ENV["DATASTORE_EMULATOR_HOST"], retries: retries))
|
74
|
-
end
|
75
|
-
|
76
|
-
if keyfile.nil?
|
77
|
-
credentials = Gcloud::Datastore::Credentials.default scope: scope
|
78
|
-
else
|
79
|
-
credentials = Gcloud::Datastore::Credentials.new keyfile, scope: scope
|
80
|
-
end
|
81
|
-
|
82
|
-
Gcloud::Datastore::Dataset.new(
|
83
|
-
Gcloud::Datastore::Service.new(
|
84
|
-
project, credentials, retries: retries, timeout: timeout))
|
85
|
-
end
|
86
|
-
|
87
|
-
##
|
88
|
-
# # Google Cloud Datastore
|
89
|
-
#
|
90
|
-
# Google Cloud Datastore is a fully managed, schemaless database for storing
|
91
|
-
# non-relational data. You should feel at home if you are familiar with
|
92
|
-
# relational databases, but there are some key differences to be aware of to
|
93
|
-
# make the most of using Datastore.
|
94
|
-
#
|
95
|
-
# The goal of gcloud-ruby is to provide a API that is comfortable to
|
96
|
-
# Rubyists. Authentication is handled by {Gcloud#datastore}. You can provide
|
97
|
-
# the project and credential information to connect to the Datastore service,
|
98
|
-
# or if you are running on Google Compute Engine this configuration is taken
|
99
|
-
# care of for you.
|
100
|
-
#
|
101
|
-
# ```ruby
|
102
|
-
# require "gcloud"
|
103
|
-
#
|
104
|
-
# gcloud = Gcloud.new "my-todo-project",
|
105
|
-
# "/path/to/keyfile.json"
|
106
|
-
# datastore = gcloud.datastore
|
107
|
-
#
|
108
|
-
# task = datastore.find "Task", "sampleTask"
|
109
|
-
# task["priority"] = 5
|
110
|
-
# datastore.save task
|
111
|
-
# ```
|
112
|
-
#
|
113
|
-
# You can learn more about various options for connection on the
|
114
|
-
# [Authentication
|
115
|
-
# Guide](https://googlecloudplatform.github.io/gcloud-ruby/#/docs/guides/authentication).
|
116
|
-
#
|
117
|
-
# To learn more about Datastore, read the
|
118
|
-
# [Google Cloud Datastore Concepts Overview
|
119
|
-
# ](https://cloud.google.com/datastore/docs/concepts/overview).
|
120
|
-
#
|
121
|
-
# ## Retrieving records
|
122
|
-
#
|
123
|
-
# Records, called "entities" in Datastore, are retrieved by using a key.
|
124
|
-
# The key is more than a numeric identifier, it is a complex data structure
|
125
|
-
# that can be used to model relationships. The simplest key has a string
|
126
|
-
# <tt>kind</tt> value, and either a numeric <tt>id</tt> value, or a string
|
127
|
-
# <tt>name</tt> value. A single record can be retrieved by calling
|
128
|
-
# {Gcloud::Datastore::Dataset#find} and passing the parts of the key:
|
129
|
-
#
|
130
|
-
# ```ruby
|
131
|
-
# require "gcloud"
|
132
|
-
#
|
133
|
-
# gcloud = Gcloud.new
|
134
|
-
# datastore = gcloud.datastore
|
135
|
-
#
|
136
|
-
# task = datastore.find "Task", "sampleTask"
|
137
|
-
# ```
|
138
|
-
#
|
139
|
-
# Optionally, {Gcloud::Datastore::Dataset#find} can be given a key object:
|
140
|
-
#
|
141
|
-
# ```ruby
|
142
|
-
# require "gcloud"
|
143
|
-
#
|
144
|
-
# gcloud = Gcloud.new
|
145
|
-
# datastore = gcloud.datastore
|
146
|
-
#
|
147
|
-
# task_key = datastore.key "Task", 123456
|
148
|
-
# task = datastore.find task_key
|
149
|
-
# ```
|
150
|
-
#
|
151
|
-
# See {Gcloud::Datastore::Dataset#find}
|
152
|
-
#
|
153
|
-
# ## Querying records
|
154
|
-
#
|
155
|
-
# Multiple records can be found that match criteria.
|
156
|
-
# (See {Gcloud::Datastore::Query#where})
|
157
|
-
#
|
158
|
-
# ```ruby
|
159
|
-
# require "gcloud"
|
160
|
-
#
|
161
|
-
# gcloud = Gcloud.new
|
162
|
-
# datastore = gcloud.datastore
|
163
|
-
#
|
164
|
-
# query = datastore.query("Task").
|
165
|
-
# where("done", "=", false)
|
166
|
-
#
|
167
|
-
# tasks = datastore.run query
|
168
|
-
# ```
|
169
|
-
#
|
170
|
-
# Records can also be ordered. (See {Gcloud::Datastore::Query#order})
|
171
|
-
#
|
172
|
-
# ```ruby
|
173
|
-
# require "gcloud"
|
174
|
-
#
|
175
|
-
# gcloud = Gcloud.new
|
176
|
-
# datastore = gcloud.datastore
|
177
|
-
#
|
178
|
-
# query = datastore.query("Task").
|
179
|
-
# order("created")
|
180
|
-
#
|
181
|
-
# tasks = datastore.run query
|
182
|
-
# ```
|
183
|
-
#
|
184
|
-
# The number of records returned can be specified.
|
185
|
-
# (See {Gcloud::Datastore::Query#limit})
|
186
|
-
#
|
187
|
-
# ```ruby
|
188
|
-
# require "gcloud"
|
189
|
-
#
|
190
|
-
# gcloud = Gcloud.new
|
191
|
-
# datastore = gcloud.datastore
|
192
|
-
#
|
193
|
-
# query = datastore.query("Task").
|
194
|
-
# limit(5)
|
195
|
-
#
|
196
|
-
# tasks = datastore.run query
|
197
|
-
# ```
|
198
|
-
#
|
199
|
-
# Records' key structures can also be queried.
|
200
|
-
# (See {Gcloud::Datastore::Query#ancestor})
|
201
|
-
#
|
202
|
-
# ```ruby
|
203
|
-
# require "gcloud"
|
204
|
-
#
|
205
|
-
# gcloud = Gcloud.new
|
206
|
-
# datastore = gcloud.datastore
|
207
|
-
#
|
208
|
-
# task_list_key = datastore.key "TaskList", "default"
|
209
|
-
#
|
210
|
-
# query = datastore.query("Task").
|
211
|
-
# ancestor(task_list_key)
|
212
|
-
#
|
213
|
-
# tasks = datastore.run query
|
214
|
-
# ```
|
215
|
-
#
|
216
|
-
# See {Gcloud::Datastore::Query} and {Gcloud::Datastore::Dataset#run}
|
217
|
-
#
|
218
|
-
# ### Paginating records
|
219
|
-
#
|
220
|
-
# All records may not return at once, but multiple calls can be made to
|
221
|
-
# Datastore to return them all.
|
222
|
-
#
|
223
|
-
# ```ruby
|
224
|
-
# require "gcloud"
|
225
|
-
#
|
226
|
-
# gcloud = Gcloud.new
|
227
|
-
# datastore = gcloud.datastore
|
228
|
-
#
|
229
|
-
# query = datastore.query("Task")
|
230
|
-
# tasks = datastore.run query
|
231
|
-
# tasks.all do |task|
|
232
|
-
# puts t["description"]
|
233
|
-
# end
|
234
|
-
# ```
|
235
|
-
#
|
236
|
-
# See {Gcloud::Datastore::Dataset::LookupResults} and
|
237
|
-
# {Gcloud::Datastore::Dataset::QueryResults}
|
238
|
-
#
|
239
|
-
# ## Creating records
|
240
|
-
#
|
241
|
-
# New entities can be created and persisted buy calling
|
242
|
-
# {Gcloud::Datastore::Dataset#save}. The entity must have a key to be saved.
|
243
|
-
# If the key is incomplete then it will be completed when saved.
|
244
|
-
#
|
245
|
-
# ```ruby
|
246
|
-
# require "gcloud"
|
247
|
-
#
|
248
|
-
# gcloud = Gcloud.new
|
249
|
-
# datastore = gcloud.datastore
|
250
|
-
#
|
251
|
-
# task = datastore.entity "Task" do |t|
|
252
|
-
# t["type"] = "Personal"
|
253
|
-
# t["done"] = false
|
254
|
-
# t["priority"] = 4
|
255
|
-
# t["description"] = "Learn Cloud Datastore"
|
256
|
-
# end
|
257
|
-
# task.key.id #=> nil
|
258
|
-
# datastore.save task
|
259
|
-
# task.key.id #=> 123456
|
260
|
-
# ```
|
261
|
-
#
|
262
|
-
# Multiple new entities may be created in a batch.
|
263
|
-
#
|
264
|
-
# ```ruby
|
265
|
-
# require "gcloud"
|
266
|
-
#
|
267
|
-
# gcloud = Gcloud.new
|
268
|
-
# datastore = gcloud.datastore
|
269
|
-
#
|
270
|
-
# task1 = datastore.entity "Task" do |t|
|
271
|
-
# t["type"] = "Personal"
|
272
|
-
# t["done"] = false
|
273
|
-
# t["priority"] = 4
|
274
|
-
# t["description"] = "Learn Cloud Datastore"
|
275
|
-
# end
|
276
|
-
#
|
277
|
-
# task2 = datastore.entity "Task" do |t|
|
278
|
-
# t["type"] = "Personal"
|
279
|
-
# t["done"] = false
|
280
|
-
# t["priority"] = 5
|
281
|
-
# t["description"] = "Integrate Cloud Datastore"
|
282
|
-
# end
|
283
|
-
#
|
284
|
-
# tasks = datastore.save(task1, task2)
|
285
|
-
# task_key1 = tasks[0].key
|
286
|
-
# task_key2 = tasks[1].key
|
287
|
-
# ```
|
288
|
-
#
|
289
|
-
# Entities in Datastore form a hierarchically structured space similar to the
|
290
|
-
# directory structure of a file system. When you create an entity, you can
|
291
|
-
# optionally designate another entity as its parent; the new entity is a child
|
292
|
-
# of the parent entity.
|
293
|
-
#
|
294
|
-
# ```ruby
|
295
|
-
# task_key = datastore.key "Task", "sampleTask"
|
296
|
-
# task_key.parent = datastore.key "TaskList", "default"
|
297
|
-
#
|
298
|
-
# task = datastore.entity task_key do |t|
|
299
|
-
# t["type"] = "Personal"
|
300
|
-
# t["done"] = false
|
301
|
-
# t["priority"] = 5
|
302
|
-
# t["description"] = "Integrate Cloud Datastore"
|
303
|
-
# end
|
304
|
-
# ```
|
305
|
-
#
|
306
|
-
# ## Setting properties
|
307
|
-
#
|
308
|
-
# Entities hold properties. A property has a name that is a string or symbol,
|
309
|
-
# and a value that is an object. Most value objects are supported, including
|
310
|
-
# String, Integer, Date, Time, and even other entity or key objects. Changes
|
311
|
-
# to the entity's properties are persisted by calling
|
312
|
-
# {Gcloud::Datastore::Dataset#save}.
|
313
|
-
#
|
314
|
-
# ```ruby
|
315
|
-
# require "gcloud"
|
316
|
-
#
|
317
|
-
# gcloud = Gcloud.new
|
318
|
-
# datastore = gcloud.datastore
|
319
|
-
#
|
320
|
-
# task = datastore.find "Task", "sampleTask"
|
321
|
-
# # Read the priority property
|
322
|
-
# task["priority"] #=> 4
|
323
|
-
# # Write the priority property
|
324
|
-
# task["priority"] = 5
|
325
|
-
# # Persist the changes
|
326
|
-
# datastore.save task
|
327
|
-
# ```
|
328
|
-
#
|
329
|
-
# Array properties can be used to store more than one value.
|
330
|
-
#
|
331
|
-
# ```ruby
|
332
|
-
# require "gcloud"
|
333
|
-
#
|
334
|
-
# gcloud = Gcloud.new
|
335
|
-
# datastore = gcloud.datastore
|
336
|
-
#
|
337
|
-
# task = datastore.entity "Task", "sampleTask" do |t|
|
338
|
-
# t["tags"] = ["fun", "programming"]
|
339
|
-
# t["collaborators"] = ["alice", "bob"]
|
340
|
-
# end
|
341
|
-
# ```
|
342
|
-
#
|
343
|
-
# ## Deleting records
|
344
|
-
#
|
345
|
-
# Entities can be removed from Datastore by calling
|
346
|
-
# {Gcloud::Datastore::Dataset#delete} and passing the entity object or the
|
347
|
-
# entity's key object.
|
348
|
-
#
|
349
|
-
# ```ruby
|
350
|
-
# require "gcloud"
|
351
|
-
#
|
352
|
-
# gcloud = Gcloud.new
|
353
|
-
# datastore = gcloud.datastore
|
354
|
-
#
|
355
|
-
# task = datastore.find "Task", "sampleTask"
|
356
|
-
# datastore.delete task
|
357
|
-
# ```
|
358
|
-
#
|
359
|
-
# Multiple entities may be deleted in a batch.
|
360
|
-
#
|
361
|
-
# ```ruby
|
362
|
-
# require "gcloud"
|
363
|
-
#
|
364
|
-
# gcloud = Gcloud.new
|
365
|
-
# datastore = gcloud.datastore
|
366
|
-
#
|
367
|
-
# task_key1 = datastore.key "Task", "sampleTask1"
|
368
|
-
# task_key2 = datastore.key "Task", "sampleTask2"
|
369
|
-
# datastore.delete task_key1, task_key2
|
370
|
-
# ```
|
371
|
-
#
|
372
|
-
# ## Transactions
|
373
|
-
#
|
374
|
-
# Complex logic can be wrapped in a Transaction. All queries and updates
|
375
|
-
# within the {Gcloud::Datastore::Dataset#transaction} block are run within the
|
376
|
-
# transaction scope, and will be automatically committed when the block
|
377
|
-
# completes.
|
378
|
-
#
|
379
|
-
# ```ruby
|
380
|
-
# require "gcloud"
|
381
|
-
#
|
382
|
-
# gcloud = Gcloud.new
|
383
|
-
# datastore = gcloud.datastore
|
384
|
-
#
|
385
|
-
# task_key = datastore.key "Task", "sampleTask"
|
386
|
-
#
|
387
|
-
# datastore.transaction do |tx|
|
388
|
-
# if tx.find(task_key).nil?
|
389
|
-
# task = datastore.entity task_key do |t|
|
390
|
-
# t["type"] = "Personal"
|
391
|
-
# t["done"] = false
|
392
|
-
# t["priority"] = 4
|
393
|
-
# t["description"] = "Learn Cloud Datastore"
|
394
|
-
# end
|
395
|
-
# tx.save task
|
396
|
-
# end
|
397
|
-
# end
|
398
|
-
# ```
|
399
|
-
#
|
400
|
-
# Alternatively, if no block is given the transaction object is returned
|
401
|
-
# allowing you to commit or rollback manually.
|
402
|
-
#
|
403
|
-
# ```ruby
|
404
|
-
# require "gcloud"
|
405
|
-
#
|
406
|
-
# gcloud = Gcloud.new
|
407
|
-
# datastore = gcloud.datastore
|
408
|
-
#
|
409
|
-
# task_key = datastore.key "Task", "sampleTask"
|
410
|
-
#
|
411
|
-
# tx = datastore.transaction
|
412
|
-
# begin
|
413
|
-
# if tx.find(task_key).nil?
|
414
|
-
# task = datastore.entity task_key do |t|
|
415
|
-
# t["type"] = "Personal"
|
416
|
-
# t["done"] = false
|
417
|
-
# t["priority"] = 4
|
418
|
-
# t["description"] = "Learn Cloud Datastore"
|
419
|
-
# end
|
420
|
-
# tx.save task
|
421
|
-
# end
|
422
|
-
# tx.commit
|
423
|
-
# rescue
|
424
|
-
# tx.rollback
|
425
|
-
# end
|
426
|
-
# ```
|
427
|
-
#
|
428
|
-
# See {Gcloud::Datastore::Transaction} and
|
429
|
-
# {Gcloud::Datastore::Dataset#transaction}
|
430
|
-
#
|
431
|
-
# ## Querying metadata
|
432
|
-
#
|
433
|
-
# Datastore provides programmatic access to some of its metadata to support
|
434
|
-
# meta-programming, implementing backend administrative functions, simplify
|
435
|
-
# consistent caching, and similar purposes. The metadata available includes
|
436
|
-
# information about the entity groups, namespaces, entity kinds, and
|
437
|
-
# properties your application uses, as well as the property representations
|
438
|
-
# for each property.
|
439
|
-
#
|
440
|
-
# The special entity kind `__namespace__` can be used to find all the
|
441
|
-
# namespaces used in your application entities.
|
442
|
-
#
|
443
|
-
# ```ruby
|
444
|
-
# query = datastore.query("__namespace__").
|
445
|
-
# select("__key__").
|
446
|
-
# where("__key__", ">=", datastore.key("__namespace__", "g")).
|
447
|
-
# where("__key__", "<", datastore.key("__namespace__", "h"))
|
448
|
-
#
|
449
|
-
# namespaces = datastore.run(query).map do |entity|
|
450
|
-
# entity.key.name
|
451
|
-
# end
|
452
|
-
# ```
|
453
|
-
#
|
454
|
-
# The special entity kind `__kind__` can be used to return all the
|
455
|
-
# kinds used in your application.
|
456
|
-
#
|
457
|
-
# ```ruby
|
458
|
-
# query = datastore.query("__kind__").
|
459
|
-
# select("__key__")
|
460
|
-
#
|
461
|
-
# kinds = datastore.run(query).map do |entity|
|
462
|
-
# entity.key.name
|
463
|
-
# end
|
464
|
-
# ```
|
465
|
-
#
|
466
|
-
# Property queries return entities of kind `__property__` denoting the indexed
|
467
|
-
# properties associated with an entity kind. (Unindexed properties are not
|
468
|
-
# included.)
|
469
|
-
#
|
470
|
-
# ```ruby
|
471
|
-
# query = datastore.query("__property__").
|
472
|
-
# select("__key__")
|
473
|
-
#
|
474
|
-
# entities = datastore.run(query)
|
475
|
-
# properties_by_kind = entities.each_with_object({}) do |entity, memo|
|
476
|
-
# kind = entity.key.parent.name
|
477
|
-
# prop = entity.key.name
|
478
|
-
# memo[kind] ||= []
|
479
|
-
# memo[kind] << prop
|
480
|
-
# end
|
481
|
-
# ```
|
482
|
-
#
|
483
|
-
# Property queries support ancestor filtering on a `__kind__` or
|
484
|
-
# `__property__` key, to limit the query results to a single kind or property.
|
485
|
-
# The `property_representation` property in the entity representing property
|
486
|
-
# `p` of kind `k` is an array containing all representations of `p`'s value in
|
487
|
-
# any entity of kind `k`.
|
488
|
-
#
|
489
|
-
# ```ruby
|
490
|
-
# ancestor_key = datastore.key "__kind__", "Task"
|
491
|
-
# query = datastore.query("__property__").
|
492
|
-
# ancestor(ancestor_key)
|
493
|
-
#
|
494
|
-
# entities = datastore.run(query)
|
495
|
-
# representations = entities.each_with_object({}) do |entity, memo|
|
496
|
-
# property_name = entity.key.name
|
497
|
-
# property_types = entity["property_representation"]
|
498
|
-
# memo[property_name] = property_types
|
499
|
-
# end
|
500
|
-
# ```
|
501
|
-
#
|
502
|
-
# Property queries can also be filtered with a range over the pseudo-property
|
503
|
-
# `__key__`, where the keys denote either `__kind__` or `__property__`
|
504
|
-
# entities.
|
505
|
-
#
|
506
|
-
# ```ruby
|
507
|
-
# start_key = datastore.key "__property__", "priority"
|
508
|
-
# start_key.parent = datastore.key "__kind__", "Task"
|
509
|
-
# query = datastore.query("__property__").
|
510
|
-
# select("__key__").
|
511
|
-
# where("__key__", ">=", start_key)
|
512
|
-
#
|
513
|
-
# entities = datastore.run(query)
|
514
|
-
# properties_by_kind = entities.each_with_object({}) do |entity, memo|
|
515
|
-
# kind = entity.key.parent.name
|
516
|
-
# prop = entity.key.name
|
517
|
-
# memo[kind] ||= []
|
518
|
-
# memo[kind] << prop
|
519
|
-
# end
|
520
|
-
# ```
|
521
|
-
#
|
522
|
-
# ## Configuring retries and timeout
|
523
|
-
#
|
524
|
-
# You can configure how many times API requests may be automatically retried.
|
525
|
-
# When an API request fails, the response will be inspected to see if the
|
526
|
-
# request meets criteria indicating that it may succeed on retry, such as
|
527
|
-
# `500` and `503` status codes or a specific internal error code such as
|
528
|
-
# `rateLimitExceeded`. If it meets the criteria, the request will be retried
|
529
|
-
# after a delay. If another error occurs, the delay will be increased before a
|
530
|
-
# subsequent attempt, until the `retries` limit is reached.
|
531
|
-
#
|
532
|
-
# You can also set the request `timeout` value in seconds.
|
533
|
-
#
|
534
|
-
# ```ruby
|
535
|
-
# require "gcloud"
|
536
|
-
#
|
537
|
-
# gcloud = Gcloud.new
|
538
|
-
# datastore = gcloud.datastore retries: 10, timeout: 120
|
539
|
-
# ```
|
540
|
-
#
|
541
|
-
# See the [Datastore error
|
542
|
-
# codes](https://cloud.google.com/datastore/docs/concepts/errors#error_codes)
|
543
|
-
# for a list of error conditions.
|
544
|
-
#
|
545
|
-
# ## The Cloud Datastore Emulator
|
546
|
-
#
|
547
|
-
# As of this release, the Cloud Datastore emulator that is part of the gcloud
|
548
|
-
# SDK is no longer compatible with gcloud-ruby. This is because the gcloud
|
549
|
-
# SDK's Cloud Datastore emulator does not yet support gRPC as a transport
|
550
|
-
# layer.
|
551
|
-
#
|
552
|
-
# A gRPC-compatible emulator is available until the gcloud SDK Cloud Datastore
|
553
|
-
# emulator supports gRPC. To use it you must [download the gRPC
|
554
|
-
# emulator](https://storage.googleapis.com/gcd/tools/cloud-datastore-emulator-1.1.1.zip)
|
555
|
-
# and use the `cloud_datastore_emulator` script.
|
556
|
-
#
|
557
|
-
# When you run the Cloud Datastore emulator you will see a message similar to
|
558
|
-
# the following printed:
|
559
|
-
#
|
560
|
-
# ```
|
561
|
-
# If you are using a library that supports the DATASTORE_EMULATOR_HOST
|
562
|
-
# environment variable, run:
|
563
|
-
#
|
564
|
-
# export DATASTORE_EMULATOR_HOST=localhost:8978
|
565
|
-
# ```
|
566
|
-
#
|
567
|
-
# Now you can connect to the emulator using the `DATASTORE_EMULATOR_HOST`
|
568
|
-
# environment variable:
|
569
|
-
#
|
570
|
-
# ```ruby
|
571
|
-
# require "gcloud"
|
572
|
-
#
|
573
|
-
# # Make Datastore use the emulator
|
574
|
-
# ENV["DATASTORE_EMULATOR_HOST"] = "localhost:8978"
|
575
|
-
#
|
576
|
-
# gcloud = Gcloud.new "emulator-project-id"
|
577
|
-
# datastore = gcloud.datastore
|
578
|
-
#
|
579
|
-
# task = datastore.entity "Task", "emulatorTask" do |t|
|
580
|
-
# t["type"] = "Testing"
|
581
|
-
# t["done"] = false
|
582
|
-
# t["priority"] = 5
|
583
|
-
# t["description"] = "Use Datastore Emulator"
|
584
|
-
# end
|
585
|
-
#
|
586
|
-
# datastore.save task
|
587
|
-
# ```
|
588
|
-
#
|
589
|
-
module Datastore
|
590
|
-
end
|
591
|
-
end
|
17
|
+
require "google/cloud/datastore"
|