google-cloud-bigquery-data_transfer 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +8 -0
  3. data/LICENSE +201 -0
  4. data/README.md +54 -0
  5. data/lib/google/cloud/bigquery/data_transfer.rb +132 -0
  6. data/lib/google/cloud/bigquery/data_transfer/credentials.rb +32 -0
  7. data/lib/google/cloud/bigquery/data_transfer/v1.rb +125 -0
  8. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_pb.rb +189 -0
  9. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client.rb +860 -0
  10. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client_config.json +91 -0
  11. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_services_pb.rb +86 -0
  12. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/data_transfer.rb +500 -0
  13. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/transfer.rb +216 -0
  14. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/any.rb +124 -0
  15. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/duration.rb +90 -0
  16. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/field_mask.rb +223 -0
  17. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/struct.rb +73 -0
  18. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/timestamp.rb +106 -0
  19. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/wrappers.rb +89 -0
  20. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/rpc/status.rb +83 -0
  21. data/lib/google/cloud/bigquery/data_transfer/v1/doc/overview.rb +79 -0
  22. data/lib/google/cloud/bigquery/data_transfer/v1/transfer_pb.rb +82 -0
  23. metadata +149 -0
@@ -0,0 +1,216 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Cloud
17
+ module Bigquery
18
+ module DataTransfer
19
+ module V1
20
+ # Represents a data transfer configuration. A transfer configuration
21
+ # contains all metadata needed to perform a data transfer. For example,
22
+ # +destination_dataset_id+ specifies where data should be stored.
23
+ # When a new transfer configuration is created, the specified
24
+ # +destination_dataset_id+ is created when needed and shared with the
25
+ # appropriate data source service account.
26
+ # Next id: 20
27
+ # @!attribute [rw] name
28
+ # @return [String]
29
+ # The resource name of the transfer config.
30
+ # Transfer config names have the form
31
+ # +projects/{project_id}/transferConfigs/{config_id}+.
32
+ # Where +config_id+ is usually a uuid, even though it is not
33
+ # guaranteed or required. The name is ignored when creating a transfer
34
+ # config.
35
+ # @!attribute [rw] destination_dataset_id
36
+ # @return [String]
37
+ # The BigQuery target dataset id.
38
+ # @!attribute [rw] display_name
39
+ # @return [String]
40
+ # User specified display name for the data transfer.
41
+ # @!attribute [rw] data_source_id
42
+ # @return [String]
43
+ # Data source id. Cannot be changed once data transfer is created.
44
+ # @!attribute [rw] params
45
+ # @return [Google::Protobuf::Struct]
46
+ # Data transfer specific parameters.
47
+ # @!attribute [rw] schedule
48
+ # @return [String]
49
+ # Data transfer schedule.
50
+ # If the data source does not support a custom schedule, this should be
51
+ # empty. If it is empty, the default value for the data source will be
52
+ # used.
53
+ # The specified times are in UTC.
54
+ # Examples of valid format:
55
+ # +1st,3rd monday of month 15:30+,
56
+ # +every wed,fri of jan,jun 13:15+, and
57
+ # +first sunday of quarter 00:00+.
58
+ # See more explanation about the format here:
59
+ # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
60
+ # NOTE: the granularity should be at least 8 hours, or less frequent.
61
+ # @!attribute [rw] data_refresh_window_days
62
+ # @return [Integer]
63
+ # The number of days to look back to automatically refresh the data.
64
+ # For example, if +data_refresh_window_days = 10+, then every day
65
+ # BigQuery reingests data for [today-10, today-1], rather than ingesting data
66
+ # for just [today-1].
67
+ # Only valid if the data source supports the feature. Set the value to 0
68
+ # to use the default value.
69
+ # @!attribute [rw] disabled
70
+ # @return [true, false]
71
+ # Is this config disabled. When set to true, no runs are scheduled
72
+ # for a given transfer.
73
+ # @!attribute [rw] update_time
74
+ # @return [Google::Protobuf::Timestamp]
75
+ # Output only. Data transfer modification time. Ignored by server on input.
76
+ # @!attribute [rw] next_run_time
77
+ # @return [Google::Protobuf::Timestamp]
78
+ # Output only. Next time when data transfer will run.
79
+ # @!attribute [rw] state
80
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferState]
81
+ # Output only. State of the most recently updated transfer run.
82
+ # @!attribute [rw] user_id
83
+ # @return [Integer]
84
+ # Output only. Unique ID of the user on whose behalf transfer is done.
85
+ # Applicable only to data sources that do not support service accounts.
86
+ # When set to 0, the data source service account credentials are used.
87
+ # May be negative. Note, that this identifier is not stable.
88
+ # It may change over time even for the same user.
89
+ # @!attribute [rw] dataset_region
90
+ # @return [String]
91
+ # Output only. Region in which BigQuery dataset is located.
92
+ class TransferConfig; end
93
+
94
+ # Represents a data transfer run.
95
+ # Next id: 27
96
+ # @!attribute [rw] name
97
+ # @return [String]
98
+ # The resource name of the transfer run.
99
+ # Transfer run names have the form
100
+ # +projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}+.
101
+ # The name is ignored when creating a transfer run.
102
+ # @!attribute [rw] schedule_time
103
+ # @return [Google::Protobuf::Timestamp]
104
+ # Minimum time after which a transfer run can be started.
105
+ # @!attribute [rw] run_time
106
+ # @return [Google::Protobuf::Timestamp]
107
+ # For batch transfer runs, specifies the date and time that
108
+ # data should be ingested.
109
+ # @!attribute [rw] error_status
110
+ # @return [Google::Rpc::Status]
111
+ # Status of the transfer run.
112
+ # @!attribute [rw] start_time
113
+ # @return [Google::Protobuf::Timestamp]
114
+ # Output only. Time when transfer run was started.
115
+ # Parameter ignored by server for input requests.
116
+ # @!attribute [rw] end_time
117
+ # @return [Google::Protobuf::Timestamp]
118
+ # Output only. Time when transfer run ended.
119
+ # Parameter ignored by server for input requests.
120
+ # @!attribute [rw] update_time
121
+ # @return [Google::Protobuf::Timestamp]
122
+ # Output only. Last time the data transfer run state was updated.
123
+ # @!attribute [rw] params
124
+ # @return [Google::Protobuf::Struct]
125
+ # Output only. Data transfer specific parameters.
126
+ # @!attribute [rw] destination_dataset_id
127
+ # @return [String]
128
+ # Output only. The BigQuery target dataset id.
129
+ # @!attribute [rw] data_source_id
130
+ # @return [String]
131
+ # Output only. Data source id.
132
+ # @!attribute [rw] state
133
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferState]
134
+ # Data transfer run state. Ignored for input requests.
135
+ # @!attribute [rw] user_id
136
+ # @return [Integer]
137
+ # Output only. Unique ID of the user on whose behalf transfer is done.
138
+ # Applicable only to data sources that do not support service accounts.
139
+ # When set to 0, the data source service account credentials are used.
140
+ # May be negative. Note, that this identifier is not stable.
141
+ # It may change over time even for the same user.
142
+ # @!attribute [rw] schedule
143
+ # @return [String]
144
+ # Output only. Describes the schedule of this transfer run if it was
145
+ # created as part of a regular schedule. For batch transfer runs that are
146
+ # scheduled manually, this is empty.
147
+ # NOTE: the system might choose to delay the schedule depending on the
148
+ # current load, so +schedule_time+ doesn't always matches this.
149
+ class TransferRun; end
150
+
151
+ # Represents a user facing message for a particular data transfer run.
152
+ # @!attribute [rw] message_time
153
+ # @return [Google::Protobuf::Timestamp]
154
+ # Time when message was logged.
155
+ # @!attribute [rw] severity
156
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity]
157
+ # Message severity.
158
+ # @!attribute [rw] message_text
159
+ # @return [String]
160
+ # Message text.
161
+ class TransferMessage
162
+ # Represents data transfer user facing message severity.
163
+ module MessageSeverity
164
+ # No severity specified.
165
+ MESSAGE_SEVERITY_UNSPECIFIED = 0
166
+
167
+ # Informational message.
168
+ INFO = 1
169
+
170
+ # Warning message.
171
+ WARNING = 2
172
+
173
+ # Error message.
174
+ ERROR = 3
175
+ end
176
+ end
177
+
178
+ # Represents data transfer type.
179
+ module TransferType
180
+ # Invalid or Unknown transfer type placeholder.
181
+ TRANSFER_TYPE_UNSPECIFIED = 0
182
+
183
+ # Batch data transfer.
184
+ BATCH = 1
185
+
186
+ # Streaming data transfer. Streaming data source currently doesn't
187
+ # support multiple transfer configs per project.
188
+ STREAMING = 2
189
+ end
190
+
191
+ # Represents data transfer run state.
192
+ module TransferState
193
+ # State placeholder.
194
+ TRANSFER_STATE_UNSPECIFIED = 0
195
+
196
+ # Data transfer is scheduled and is waiting to be picked up by
197
+ # data transfer backend.
198
+ PENDING = 2
199
+
200
+ # Data transfer is in progress.
201
+ RUNNING = 3
202
+
203
+ # Data transfer completed successsfully.
204
+ SUCCEEDED = 4
205
+
206
+ # Data transfer failed.
207
+ FAILED = 5
208
+
209
+ # Data transfer is cancelled.
210
+ CANCELLED = 6
211
+ end
212
+ end
213
+ end
214
+ end
215
+ end
216
+ end
@@ -0,0 +1,124 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Protobuf
17
+ # +Any+ contains an arbitrary serialized protocol buffer message along with a
18
+ # URL that describes the type of the serialized message.
19
+ #
20
+ # Protobuf library provides support to pack/unpack Any values in the form
21
+ # of utility functions or additional generated methods of the Any type.
22
+ #
23
+ # Example 1: Pack and unpack a message in C++.
24
+ #
25
+ # Foo foo = ...;
26
+ # Any any;
27
+ # any.PackFrom(foo);
28
+ # ...
29
+ # if (any.UnpackTo(&foo)) {
30
+ # ...
31
+ # }
32
+ #
33
+ # Example 2: Pack and unpack a message in Java.
34
+ #
35
+ # Foo foo = ...;
36
+ # Any any = Any.pack(foo);
37
+ # ...
38
+ # if (any.is(Foo.class)) {
39
+ # foo = any.unpack(Foo.class);
40
+ # }
41
+ #
42
+ # Example 3: Pack and unpack a message in Python.
43
+ #
44
+ # foo = Foo(...)
45
+ # any = Any()
46
+ # any.Pack(foo)
47
+ # ...
48
+ # if any.Is(Foo.DESCRIPTOR):
49
+ # any.Unpack(foo)
50
+ # ...
51
+ #
52
+ # Example 4: Pack and unpack a message in Go
53
+ #
54
+ # foo := &pb.Foo{...}
55
+ # any, err := ptypes.MarshalAny(foo)
56
+ # ...
57
+ # foo := &pb.Foo{}
58
+ # if err := ptypes.UnmarshalAny(any, foo); err != nil {
59
+ # ...
60
+ # }
61
+ #
62
+ # The pack methods provided by protobuf library will by default use
63
+ # 'type.googleapis.com/full.type.name' as the type URL and the unpack
64
+ # methods only use the fully qualified type name after the last '/'
65
+ # in the type URL, for example "foo.bar.com/x/y.z" will yield type
66
+ # name "y.z".
67
+ #
68
+ #
69
+ # = JSON
70
+ #
71
+ # The JSON representation of an +Any+ value uses the regular
72
+ # representation of the deserialized, embedded message, with an
73
+ # additional field +@type+ which contains the type URL. Example:
74
+ #
75
+ # package google.profile;
76
+ # message Person {
77
+ # string first_name = 1;
78
+ # string last_name = 2;
79
+ # }
80
+ #
81
+ # {
82
+ # "@type": "type.googleapis.com/google.profile.Person",
83
+ # "firstName": <string>,
84
+ # "lastName": <string>
85
+ # }
86
+ #
87
+ # If the embedded message type is well-known and has a custom JSON
88
+ # representation, that representation will be embedded adding a field
89
+ # +value+ which holds the custom JSON in addition to the +@type+
90
+ # field. Example (for message {Google::Protobuf::Duration}):
91
+ #
92
+ # {
93
+ # "@type": "type.googleapis.com/google.protobuf.Duration",
94
+ # "value": "1.212s"
95
+ # }
96
+ # @!attribute [rw] type_url
97
+ # @return [String]
98
+ # A URL/resource name whose content describes the type of the
99
+ # serialized protocol buffer message.
100
+ #
101
+ # For URLs which use the scheme +http+, +https+, or no scheme, the
102
+ # following restrictions and interpretations apply:
103
+ #
104
+ # * If no scheme is provided, +https+ is assumed.
105
+ # * The last segment of the URL's path must represent the fully
106
+ # qualified name of the type (as in +path/google.protobuf.Duration+).
107
+ # The name should be in a canonical form (e.g., leading "." is
108
+ # not accepted).
109
+ # * An HTTP GET on the URL must yield a {Google::Protobuf::Type}
110
+ # value in binary format, or produce an error.
111
+ # * Applications are allowed to cache lookup results based on the
112
+ # URL, or have them precompiled into a binary to avoid any
113
+ # lookup. Therefore, binary compatibility needs to be preserved
114
+ # on changes to types. (Use versioned type names to manage
115
+ # breaking changes.)
116
+ #
117
+ # Schemes other than +http+, +https+ (or the empty scheme) might be
118
+ # used with implementation specific semantics.
119
+ # @!attribute [rw] value
120
+ # @return [String]
121
+ # Must be a valid serialized protocol buffer of the above specified type.
122
+ class Any; end
123
+ end
124
+ end
@@ -0,0 +1,90 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Protobuf
17
+ # A Duration represents a signed, fixed-length span of time represented
18
+ # as a count of seconds and fractions of seconds at nanosecond
19
+ # resolution. It is independent of any calendar and concepts like "day"
20
+ # or "month". It is related to Timestamp in that the difference between
21
+ # two Timestamp values is a Duration and it can be added or subtracted
22
+ # from a Timestamp. Range is approximately +-10,000 years.
23
+ #
24
+ # = Examples
25
+ #
26
+ # Example 1: Compute Duration from two Timestamps in pseudo code.
27
+ #
28
+ # Timestamp start = ...;
29
+ # Timestamp end = ...;
30
+ # Duration duration = ...;
31
+ #
32
+ # duration.seconds = end.seconds - start.seconds;
33
+ # duration.nanos = end.nanos - start.nanos;
34
+ #
35
+ # if (duration.seconds < 0 && duration.nanos > 0) {
36
+ # duration.seconds += 1;
37
+ # duration.nanos -= 1000000000;
38
+ # } else if (durations.seconds > 0 && duration.nanos < 0) {
39
+ # duration.seconds -= 1;
40
+ # duration.nanos += 1000000000;
41
+ # }
42
+ #
43
+ # Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
44
+ #
45
+ # Timestamp start = ...;
46
+ # Duration duration = ...;
47
+ # Timestamp end = ...;
48
+ #
49
+ # end.seconds = start.seconds + duration.seconds;
50
+ # end.nanos = start.nanos + duration.nanos;
51
+ #
52
+ # if (end.nanos < 0) {
53
+ # end.seconds -= 1;
54
+ # end.nanos += 1000000000;
55
+ # } else if (end.nanos >= 1000000000) {
56
+ # end.seconds += 1;
57
+ # end.nanos -= 1000000000;
58
+ # }
59
+ #
60
+ # Example 3: Compute Duration from datetime.timedelta in Python.
61
+ #
62
+ # td = datetime.timedelta(days=3, minutes=10)
63
+ # duration = Duration()
64
+ # duration.FromTimedelta(td)
65
+ #
66
+ # = JSON Mapping
67
+ #
68
+ # In JSON format, the Duration type is encoded as a string rather than an
69
+ # object, where the string ends in the suffix "s" (indicating seconds) and
70
+ # is preceded by the number of seconds, with nanoseconds expressed as
71
+ # fractional seconds. For example, 3 seconds with 0 nanoseconds should be
72
+ # encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
73
+ # be expressed in JSON format as "3.000000001s", and 3 seconds and 1
74
+ # microsecond should be expressed in JSON format as "3.000001s".
75
+ # @!attribute [rw] seconds
76
+ # @return [Integer]
77
+ # Signed seconds of the span of time. Must be from -315,576,000,000
78
+ # to +315,576,000,000 inclusive. Note: these bounds are computed from:
79
+ # 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
80
+ # @!attribute [rw] nanos
81
+ # @return [Integer]
82
+ # Signed fractions of a second at nanosecond resolution of the span
83
+ # of time. Durations less than one second are represented with a 0
84
+ # +seconds+ field and a positive or negative +nanos+ field. For durations
85
+ # of one second or more, a non-zero value for the +nanos+ field must be
86
+ # of the same sign as the +seconds+ field. Must be from -999,999,999
87
+ # to +999,999,999 inclusive.
88
+ class Duration; end
89
+ end
90
+ end
@@ -0,0 +1,223 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Protobuf
17
+ # +FieldMask+ represents a set of symbolic field paths, for example:
18
+ #
19
+ # paths: "f.a"
20
+ # paths: "f.b.d"
21
+ #
22
+ # Here +f+ represents a field in some root message, +a+ and +b+
23
+ # fields in the message found in +f+, and +d+ a field found in the
24
+ # message in +f.b+.
25
+ #
26
+ # Field masks are used to specify a subset of fields that should be
27
+ # returned by a get operation or modified by an update operation.
28
+ # Field masks also have a custom JSON encoding (see below).
29
+ #
30
+ # = Field Masks in Projections
31
+ #
32
+ # When used in the context of a projection, a response message or
33
+ # sub-message is filtered by the API to only contain those fields as
34
+ # specified in the mask. For example, if the mask in the previous
35
+ # example is applied to a response message as follows:
36
+ #
37
+ # f {
38
+ # a : 22
39
+ # b {
40
+ # d : 1
41
+ # x : 2
42
+ # }
43
+ # y : 13
44
+ # }
45
+ # z: 8
46
+ #
47
+ # The result will not contain specific values for fields x,y and z
48
+ # (their value will be set to the default, and omitted in proto text
49
+ # output):
50
+ #
51
+ #
52
+ # f {
53
+ # a : 22
54
+ # b {
55
+ # d : 1
56
+ # }
57
+ # }
58
+ #
59
+ # A repeated field is not allowed except at the last position of a
60
+ # paths string.
61
+ #
62
+ # If a FieldMask object is not present in a get operation, the
63
+ # operation applies to all fields (as if a FieldMask of all fields
64
+ # had been specified).
65
+ #
66
+ # Note that a field mask does not necessarily apply to the
67
+ # top-level response message. In case of a REST get operation, the
68
+ # field mask applies directly to the response, but in case of a REST
69
+ # list operation, the mask instead applies to each individual message
70
+ # in the returned resource list. In case of a REST custom method,
71
+ # other definitions may be used. Where the mask applies will be
72
+ # clearly documented together with its declaration in the API. In
73
+ # any case, the effect on the returned resource/resources is required
74
+ # behavior for APIs.
75
+ #
76
+ # = Field Masks in Update Operations
77
+ #
78
+ # A field mask in update operations specifies which fields of the
79
+ # targeted resource are going to be updated. The API is required
80
+ # to only change the values of the fields as specified in the mask
81
+ # and leave the others untouched. If a resource is passed in to
82
+ # describe the updated values, the API ignores the values of all
83
+ # fields not covered by the mask.
84
+ #
85
+ # If a repeated field is specified for an update operation, the existing
86
+ # repeated values in the target resource will be overwritten by the new values.
87
+ # Note that a repeated field is only allowed in the last position of a +paths+
88
+ # string.
89
+ #
90
+ # If a sub-message is specified in the last position of the field mask for an
91
+ # update operation, then the existing sub-message in the target resource is
92
+ # overwritten. Given the target message:
93
+ #
94
+ # f {
95
+ # b {
96
+ # d : 1
97
+ # x : 2
98
+ # }
99
+ # c : 1
100
+ # }
101
+ #
102
+ # And an update message:
103
+ #
104
+ # f {
105
+ # b {
106
+ # d : 10
107
+ # }
108
+ # }
109
+ #
110
+ # then if the field mask is:
111
+ #
112
+ # paths: "f.b"
113
+ #
114
+ # then the result will be:
115
+ #
116
+ # f {
117
+ # b {
118
+ # d : 10
119
+ # }
120
+ # c : 1
121
+ # }
122
+ #
123
+ # However, if the update mask was:
124
+ #
125
+ # paths: "f.b.d"
126
+ #
127
+ # then the result would be:
128
+ #
129
+ # f {
130
+ # b {
131
+ # d : 10
132
+ # x : 2
133
+ # }
134
+ # c : 1
135
+ # }
136
+ #
137
+ # In order to reset a field's value to the default, the field must
138
+ # be in the mask and set to the default value in the provided resource.
139
+ # Hence, in order to reset all fields of a resource, provide a default
140
+ # instance of the resource and set all fields in the mask, or do
141
+ # not provide a mask as described below.
142
+ #
143
+ # If a field mask is not present on update, the operation applies to
144
+ # all fields (as if a field mask of all fields has been specified).
145
+ # Note that in the presence of schema evolution, this may mean that
146
+ # fields the client does not know and has therefore not filled into
147
+ # the request will be reset to their default. If this is unwanted
148
+ # behavior, a specific service may require a client to always specify
149
+ # a field mask, producing an error if not.
150
+ #
151
+ # As with get operations, the location of the resource which
152
+ # describes the updated values in the request message depends on the
153
+ # operation kind. In any case, the effect of the field mask is
154
+ # required to be honored by the API.
155
+ #
156
+ # == Considerations for HTTP REST
157
+ #
158
+ # The HTTP kind of an update operation which uses a field mask must
159
+ # be set to PATCH instead of PUT in order to satisfy HTTP semantics
160
+ # (PUT must only be used for full updates).
161
+ #
162
+ # = JSON Encoding of Field Masks
163
+ #
164
+ # In JSON, a field mask is encoded as a single string where paths are
165
+ # separated by a comma. Fields name in each path are converted
166
+ # to/from lower-camel naming conventions.
167
+ #
168
+ # As an example, consider the following message declarations:
169
+ #
170
+ # message Profile {
171
+ # User user = 1;
172
+ # Photo photo = 2;
173
+ # }
174
+ # message User {
175
+ # string display_name = 1;
176
+ # string address = 2;
177
+ # }
178
+ #
179
+ # In proto a field mask for +Profile+ may look as such:
180
+ #
181
+ # mask {
182
+ # paths: "user.display_name"
183
+ # paths: "photo"
184
+ # }
185
+ #
186
+ # In JSON, the same mask is represented as below:
187
+ #
188
+ # {
189
+ # mask: "user.displayName,photo"
190
+ # }
191
+ #
192
+ # = Field Masks and Oneof Fields
193
+ #
194
+ # Field masks treat fields in oneofs just as regular fields. Consider the
195
+ # following message:
196
+ #
197
+ # message SampleMessage {
198
+ # oneof test_oneof {
199
+ # string name = 4;
200
+ # SubMessage sub_message = 9;
201
+ # }
202
+ # }
203
+ #
204
+ # The field mask can be:
205
+ #
206
+ # mask {
207
+ # paths: "name"
208
+ # }
209
+ #
210
+ # Or:
211
+ #
212
+ # mask {
213
+ # paths: "sub_message"
214
+ # }
215
+ #
216
+ # Note that oneof type names ("test_oneof" in this case) cannot be used in
217
+ # paths.
218
+ # @!attribute [rw] paths
219
+ # @return [Array<String>]
220
+ # The set of field mask paths.
221
+ class FieldMask; end
222
+ end
223
+ end