google-cloud-bigquery-storage-v1 0.6.2 → 0.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -69,7 +69,7 @@ module Google
69
69
  extend ::Google::Protobuf::MessageExts::ClassMethods
70
70
  end
71
71
 
72
- # Estimated stream statistics for a given Stream.
72
+ # Estimated stream statistics for a given read Stream.
73
73
  # @!attribute [rw] progress
74
74
  # @return [::Google::Cloud::Bigquery::Storage::V1::StreamStats::Progress]
75
75
  # Represents the progress of the current stream.
@@ -162,6 +162,249 @@ module Google
162
162
  include ::Google::Protobuf::MessageExts
163
163
  extend ::Google::Protobuf::MessageExts::ClassMethods
164
164
  end
165
+
166
+ # Request message for `CreateWriteStream`.
167
+ # @!attribute [rw] parent
168
+ # @return [::String]
169
+ # Required. Reference to the table to which the stream belongs, in the format
170
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
171
+ # @!attribute [rw] write_stream
172
+ # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
173
+ # Required. Stream to be created.
174
+ class CreateWriteStreamRequest
175
+ include ::Google::Protobuf::MessageExts
176
+ extend ::Google::Protobuf::MessageExts::ClassMethods
177
+ end
178
+
179
+ # Request message for `AppendRows`.
180
+ #
181
+ # Due to the nature of AppendRows being a bidirectional streaming RPC, certain
182
+ # parts of the AppendRowsRequest need only be specified for the first request
183
+ # sent each time the gRPC network connection is opened/reopened.
184
+ # @!attribute [rw] write_stream
185
+ # @return [::String]
186
+ # Required. The write_stream identifies the target of the append operation, and only
187
+ # needs to be specified as part of the first request on the gRPC connection.
188
+ # If provided for subsequent requests, it must match the value of the first
189
+ # request.
190
+ #
191
+ # For explicitly created write streams, the format is:
192
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
193
+ #
194
+ # For the special default stream, the format is:
195
+ # `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
196
+ # @!attribute [rw] offset
197
+ # @return [::Google::Protobuf::Int64Value]
198
+ # If present, the write is only performed if the next append offset is same
199
+ # as the provided value. If not present, the write is performed at the
200
+ # current end of stream. Specifying a value for this field is not allowed
201
+ # when calling AppendRows for the '_default' stream.
202
+ # @!attribute [rw] proto_rows
203
+ # @return [::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest::ProtoData]
204
+ # Rows in proto format.
205
+ # @!attribute [rw] trace_id
206
+ # @return [::String]
207
+ # Id set by client to annotate its identity. Only initial request setting is
208
+ # respected.
209
+ class AppendRowsRequest
210
+ include ::Google::Protobuf::MessageExts
211
+ extend ::Google::Protobuf::MessageExts::ClassMethods
212
+
213
+ # ProtoData contains the data rows and schema when constructing append
214
+ # requests.
215
+ # @!attribute [rw] writer_schema
216
+ # @return [::Google::Cloud::Bigquery::Storage::V1::ProtoSchema]
217
+ # Proto schema used to serialize the data. This value only needs to be
218
+ # provided as part of the first request on a gRPC network connection,
219
+ # and will be ignored for subsequent requests on the connection.
220
+ # @!attribute [rw] rows
221
+ # @return [::Google::Cloud::Bigquery::Storage::V1::ProtoRows]
222
+ # Serialized row data in protobuf message format.
223
+ # Currently, the backend expects the serialized rows to adhere to
224
+ # proto2 semantics when appending rows, particularly with respect to
225
+ # how default values are encoded.
226
+ class ProtoData
227
+ include ::Google::Protobuf::MessageExts
228
+ extend ::Google::Protobuf::MessageExts::ClassMethods
229
+ end
230
+ end
231
+
232
+ # Response message for `AppendRows`.
233
+ # @!attribute [rw] append_result
234
+ # @return [::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse::AppendResult]
235
+ # Result if the append is successful.
236
+ # @!attribute [rw] error
237
+ # @return [::Google::Rpc::Status]
238
+ # Error returned when problems were encountered. If present,
239
+ # it indicates rows were not accepted into the system.
240
+ # Users can retry or continue with other append requests within the
241
+ # same connection.
242
+ #
243
+ # Additional information about error signalling:
244
+ #
245
+ # ALREADY_EXISTS: Happens when an append specified an offset, and the
246
+ # backend already has received data at this offset. Typically encountered
247
+ # in retry scenarios, and can be ignored.
248
+ #
249
+ # OUT_OF_RANGE: Returned when the specified offset in the stream is beyond
250
+ # the current end of the stream.
251
+ #
252
+ # INVALID_ARGUMENT: Indicates a malformed request or data.
253
+ #
254
+ # ABORTED: Request processing is aborted because of prior failures. The
255
+ # request can be retried if previous failure is addressed.
256
+ #
257
+ # INTERNAL: Indicates server side error(s) that can be retried.
258
+ # @!attribute [rw] updated_schema
259
+ # @return [::Google::Cloud::Bigquery::Storage::V1::TableSchema]
260
+ # If backend detects a schema update, pass it to user so that user can
261
+ # use it to input new type of message. It will be empty when no schema
262
+ # updates have occurred.
263
+ class AppendRowsResponse
264
+ include ::Google::Protobuf::MessageExts
265
+ extend ::Google::Protobuf::MessageExts::ClassMethods
266
+
267
+ # AppendResult is returned for successful append requests.
268
+ # @!attribute [rw] offset
269
+ # @return [::Google::Protobuf::Int64Value]
270
+ # The row offset at which the last append occurred. The offset will not be
271
+ # set if appending using default streams.
272
+ class AppendResult
273
+ include ::Google::Protobuf::MessageExts
274
+ extend ::Google::Protobuf::MessageExts::ClassMethods
275
+ end
276
+ end
277
+
278
+ # Request message for `GetWriteStreamRequest`.
279
+ # @!attribute [rw] name
280
+ # @return [::String]
281
+ # Required. Name of the stream to get, in the form of
282
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
283
+ class GetWriteStreamRequest
284
+ include ::Google::Protobuf::MessageExts
285
+ extend ::Google::Protobuf::MessageExts::ClassMethods
286
+ end
287
+
288
+ # Request message for `BatchCommitWriteStreams`.
289
+ # @!attribute [rw] parent
290
+ # @return [::String]
291
+ # Required. Parent table that all the streams should belong to, in the form of
292
+ # `projects/{project}/datasets/{dataset}/tables/{table}`.
293
+ # @!attribute [rw] write_streams
294
+ # @return [::Array<::String>]
295
+ # Required. The group of streams that will be committed atomically.
296
+ class BatchCommitWriteStreamsRequest
297
+ include ::Google::Protobuf::MessageExts
298
+ extend ::Google::Protobuf::MessageExts::ClassMethods
299
+ end
300
+
301
+ # Response message for `BatchCommitWriteStreams`.
302
+ # @!attribute [rw] commit_time
303
+ # @return [::Google::Protobuf::Timestamp]
304
+ # The time at which streams were committed in microseconds granularity.
305
+ # This field will only exist when there are no stream errors.
306
+ # **Note** if this field is not set, it means the commit was not successful.
307
+ # @!attribute [rw] stream_errors
308
+ # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::StorageError>]
309
+ # Stream level error if commit failed. Only streams with error will be in
310
+ # the list.
311
+ # If empty, there is no error and all streams are committed successfully.
312
+ # If non empty, certain streams have errors and ZERO stream is committed due
313
+ # to atomicity guarantee.
314
+ class BatchCommitWriteStreamsResponse
315
+ include ::Google::Protobuf::MessageExts
316
+ extend ::Google::Protobuf::MessageExts::ClassMethods
317
+ end
318
+
319
+ # Request message for invoking `FinalizeWriteStream`.
320
+ # @!attribute [rw] name
321
+ # @return [::String]
322
+ # Required. Name of the stream to finalize, in the form of
323
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
324
+ class FinalizeWriteStreamRequest
325
+ include ::Google::Protobuf::MessageExts
326
+ extend ::Google::Protobuf::MessageExts::ClassMethods
327
+ end
328
+
329
+ # Response message for `FinalizeWriteStream`.
330
+ # @!attribute [rw] row_count
331
+ # @return [::Integer]
332
+ # Number of rows in the finalized stream.
333
+ class FinalizeWriteStreamResponse
334
+ include ::Google::Protobuf::MessageExts
335
+ extend ::Google::Protobuf::MessageExts::ClassMethods
336
+ end
337
+
338
+ # Request message for `FlushRows`.
339
+ # @!attribute [rw] write_stream
340
+ # @return [::String]
341
+ # Required. The stream that is the target of the flush operation.
342
+ # @!attribute [rw] offset
343
+ # @return [::Google::Protobuf::Int64Value]
344
+ # Ending offset of the flush operation. Rows before this offset(including
345
+ # this offset) will be flushed.
346
+ class FlushRowsRequest
347
+ include ::Google::Protobuf::MessageExts
348
+ extend ::Google::Protobuf::MessageExts::ClassMethods
349
+ end
350
+
351
+ # Respond message for `FlushRows`.
352
+ # @!attribute [rw] offset
353
+ # @return [::Integer]
354
+ # The rows before this offset (including this offset) are flushed.
355
+ class FlushRowsResponse
356
+ include ::Google::Protobuf::MessageExts
357
+ extend ::Google::Protobuf::MessageExts::ClassMethods
358
+ end
359
+
360
+ # Structured custom BigQuery Storage error message. The error can be attached
361
+ # as error details in the returned rpc Status. In particular, the use of error
362
+ # codes allows more structured error handling, and reduces the need to evaluate
363
+ # unstructured error text strings.
364
+ # @!attribute [rw] code
365
+ # @return [::Google::Cloud::Bigquery::Storage::V1::StorageError::StorageErrorCode]
366
+ # BigQuery Storage specific error code.
367
+ # @!attribute [rw] entity
368
+ # @return [::String]
369
+ # Name of the failed entity.
370
+ # @!attribute [rw] error_message
371
+ # @return [::String]
372
+ # Message that describes the error.
373
+ class StorageError
374
+ include ::Google::Protobuf::MessageExts
375
+ extend ::Google::Protobuf::MessageExts::ClassMethods
376
+
377
+ # Error code for `StorageError`.
378
+ module StorageErrorCode
379
+ # Default error.
380
+ STORAGE_ERROR_CODE_UNSPECIFIED = 0
381
+
382
+ # Table is not found in the system.
383
+ TABLE_NOT_FOUND = 1
384
+
385
+ # Stream is already committed.
386
+ STREAM_ALREADY_COMMITTED = 2
387
+
388
+ # Stream is not found.
389
+ STREAM_NOT_FOUND = 3
390
+
391
+ # Invalid Stream type.
392
+ # For example, you try to commit a stream that is not pending.
393
+ INVALID_STREAM_TYPE = 4
394
+
395
+ # Invalid Stream state.
396
+ # For example, you try to commit a stream that is not finalized or is
397
+ # garbaged.
398
+ INVALID_STREAM_STATE = 5
399
+
400
+ # Stream is finalized.
401
+ STREAM_FINALIZED = 6
402
+
403
+ # There is a schema mismatch and it is caused by user schema has extra
404
+ # field than bigquery schema.
405
+ SCHEMA_MISMATCH_EXTRA_FIELDS = 7
406
+ end
407
+ end
165
408
  end
166
409
  end
167
410
  end
@@ -59,6 +59,11 @@ module Google
59
59
  # request_stream_count values *may* result in this list being unpopulated,
60
60
  # in that case, the user will need to use a List method to get the streams
61
61
  # instead, which is not yet available.
62
+ # @!attribute [r] estimated_total_bytes_scanned
63
+ # @return [::Integer]
64
+ # Output only. An estimate on the number of bytes this session will scan when
65
+ # all streams are completely consumed. This estimate is based on
66
+ # metadata from the table which might be incomplete or stale.
62
67
  class ReadSession
63
68
  include ::Google::Protobuf::MessageExts
64
69
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -112,6 +117,51 @@ module Google
112
117
  extend ::Google::Protobuf::MessageExts::ClassMethods
113
118
  end
114
119
 
120
+ # Information about a single stream that gets data inside the storage system.
121
+ # @!attribute [r] name
122
+ # @return [::String]
123
+ # Output only. Name of the stream, in the form
124
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
125
+ # @!attribute [rw] type
126
+ # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream::Type]
127
+ # Immutable. Type of the stream.
128
+ # @!attribute [r] create_time
129
+ # @return [::Google::Protobuf::Timestamp]
130
+ # Output only. Create time of the stream. For the _default stream, this is the
131
+ # creation_time of the table.
132
+ # @!attribute [r] commit_time
133
+ # @return [::Google::Protobuf::Timestamp]
134
+ # Output only. Commit time of the stream.
135
+ # If a stream is of `COMMITTED` type, then it will have a commit_time same as
136
+ # `create_time`. If the stream is of `PENDING` type, empty commit_time
137
+ # means it is not committed.
138
+ # @!attribute [r] table_schema
139
+ # @return [::Google::Cloud::Bigquery::Storage::V1::TableSchema]
140
+ # Output only. The schema of the destination table. It is only returned in
141
+ # `CreateWriteStream` response. Caller should generate data that's
142
+ # compatible with this schema to send in initial `AppendRowsRequest`.
143
+ # The table schema could go out of date during the life time of the stream.
144
+ class WriteStream
145
+ include ::Google::Protobuf::MessageExts
146
+ extend ::Google::Protobuf::MessageExts::ClassMethods
147
+
148
+ # Type enum of the stream.
149
+ module Type
150
+ # Unknown type.
151
+ TYPE_UNSPECIFIED = 0
152
+
153
+ # Data will commit automatically and appear as soon as the write is
154
+ # acknowledged.
155
+ COMMITTED = 1
156
+
157
+ # Data is invisible until the stream is committed.
158
+ PENDING = 2
159
+
160
+ # Data is only visible up to the offset to which it was flushed.
161
+ BUFFERED = 3
162
+ end
163
+ end
164
+
115
165
  # Data format for input or output data.
116
166
  module DataFormat
117
167
  DATA_FORMAT_UNSPECIFIED = 0
@@ -0,0 +1,172 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Bigquery
23
+ module Storage
24
+ module V1
25
+ # Schema of a table.
26
+ # @!attribute [rw] fields
27
+ # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema>]
28
+ # Describes the fields in a table.
29
+ class TableSchema
30
+ include ::Google::Protobuf::MessageExts
31
+ extend ::Google::Protobuf::MessageExts::ClassMethods
32
+ end
33
+
34
+ # TableFieldSchema defines a single field/column within a table schema.
35
+ # @!attribute [rw] name
36
+ # @return [::String]
37
+ # Required. The field name. The name must contain only letters (a-z, A-Z),
38
+ # numbers (0-9), or underscores (_), and must start with a letter or
39
+ # underscore. The maximum length is 128 characters.
40
+ # @!attribute [rw] type
41
+ # @return [::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema::Type]
42
+ # Required. The field data type.
43
+ # @!attribute [rw] mode
44
+ # @return [::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema::Mode]
45
+ # Optional. The field mode. The default value is NULLABLE.
46
+ # @!attribute [rw] fields
47
+ # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema>]
48
+ # Optional. Describes the nested schema fields if the type property is set to STRUCT.
49
+ # @!attribute [rw] description
50
+ # @return [::String]
51
+ # Optional. The field description. The maximum length is 1,024 characters.
52
+ # @!attribute [rw] max_length
53
+ # @return [::Integer]
54
+ # Optional. Maximum length of values of this field for STRINGS or BYTES.
55
+ #
56
+ # If max_length is not specified, no maximum length constraint is imposed
57
+ # on this field.
58
+ #
59
+ # If type = "STRING", then max_length represents the maximum UTF-8
60
+ # length of strings in this field.
61
+ #
62
+ # If type = "BYTES", then max_length represents the maximum number of
63
+ # bytes in this field.
64
+ #
65
+ # It is invalid to set this field if type is not "STRING" or "BYTES".
66
+ # @!attribute [rw] precision
67
+ # @return [::Integer]
68
+ # Optional. Precision (maximum number of total digits in base 10) and scale
69
+ # (maximum number of digits in the fractional part in base 10) constraints
70
+ # for values of this field for NUMERIC or BIGNUMERIC.
71
+ #
72
+ # It is invalid to set precision or scale if type is not "NUMERIC" or
73
+ # "BIGNUMERIC".
74
+ #
75
+ # If precision and scale are not specified, no value range constraint is
76
+ # imposed on this field insofar as values are permitted by the type.
77
+ #
78
+ # Values of this NUMERIC or BIGNUMERIC field must be in this range when:
79
+ #
80
+ # * Precision (P) and scale (S) are specified:
81
+ # [-10^(P-S) + 10^(-S), 10^(P-S) - 10^(-S)]
82
+ # * Precision (P) is specified but not scale (and thus scale is
83
+ # interpreted to be equal to zero):
84
+ # [-10^P + 1, 10^P - 1].
85
+ #
86
+ # Acceptable values for precision and scale if both are specified:
87
+ #
88
+ # * If type = "NUMERIC":
89
+ # 1 <= precision - scale <= 29 and 0 <= scale <= 9.
90
+ # * If type = "BIGNUMERIC":
91
+ # 1 <= precision - scale <= 38 and 0 <= scale <= 38.
92
+ #
93
+ # Acceptable values for precision if only precision is specified but not
94
+ # scale (and thus scale is interpreted to be equal to zero):
95
+ #
96
+ # * If type = "NUMERIC": 1 <= precision <= 29.
97
+ # * If type = "BIGNUMERIC": 1 <= precision <= 38.
98
+ #
99
+ # If scale is specified but not precision, then it is invalid.
100
+ # @!attribute [rw] scale
101
+ # @return [::Integer]
102
+ # Optional. See documentation for precision.
103
+ class TableFieldSchema
104
+ include ::Google::Protobuf::MessageExts
105
+ extend ::Google::Protobuf::MessageExts::ClassMethods
106
+
107
+ module Type
108
+ # Illegal value
109
+ TYPE_UNSPECIFIED = 0
110
+
111
+ # 64K, UTF8
112
+ STRING = 1
113
+
114
+ # 64-bit signed
115
+ INT64 = 2
116
+
117
+ # 64-bit IEEE floating point
118
+ DOUBLE = 3
119
+
120
+ # Aggregate type
121
+ STRUCT = 4
122
+
123
+ # 64K, Binary
124
+ BYTES = 5
125
+
126
+ # 2-valued
127
+ BOOL = 6
128
+
129
+ # 64-bit signed usec since UTC epoch
130
+ TIMESTAMP = 7
131
+
132
+ # Civil date - Year, Month, Day
133
+ DATE = 8
134
+
135
+ # Civil time - Hour, Minute, Second, Microseconds
136
+ TIME = 9
137
+
138
+ # Combination of civil date and civil time
139
+ DATETIME = 10
140
+
141
+ # Geography object
142
+ GEOGRAPHY = 11
143
+
144
+ # Numeric value
145
+ NUMERIC = 12
146
+
147
+ # BigNumeric value
148
+ BIGNUMERIC = 13
149
+
150
+ # Interval
151
+ INTERVAL = 14
152
+
153
+ # JSON, String
154
+ JSON = 15
155
+ end
156
+
157
+ module Mode
158
+ # Illegal value
159
+ MODE_UNSPECIFIED = 0
160
+
161
+ NULLABLE = 1
162
+
163
+ REQUIRED = 2
164
+
165
+ REPEATED = 3
166
+ end
167
+ end
168
+ end
169
+ end
170
+ end
171
+ end
172
+ end
@@ -0,0 +1,141 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Protobuf
22
+ # `Any` contains an arbitrary serialized protocol buffer message along with a
23
+ # URL that describes the type of the serialized message.
24
+ #
25
+ # Protobuf library provides support to pack/unpack Any values in the form
26
+ # of utility functions or additional generated methods of the Any type.
27
+ #
28
+ # Example 1: Pack and unpack a message in C++.
29
+ #
30
+ # Foo foo = ...;
31
+ # Any any;
32
+ # any.PackFrom(foo);
33
+ # ...
34
+ # if (any.UnpackTo(&foo)) {
35
+ # ...
36
+ # }
37
+ #
38
+ # Example 2: Pack and unpack a message in Java.
39
+ #
40
+ # Foo foo = ...;
41
+ # Any any = Any.pack(foo);
42
+ # ...
43
+ # if (any.is(Foo.class)) {
44
+ # foo = any.unpack(Foo.class);
45
+ # }
46
+ #
47
+ # Example 3: Pack and unpack a message in Python.
48
+ #
49
+ # foo = Foo(...)
50
+ # any = Any()
51
+ # any.Pack(foo)
52
+ # ...
53
+ # if any.Is(Foo.DESCRIPTOR):
54
+ # any.Unpack(foo)
55
+ # ...
56
+ #
57
+ # Example 4: Pack and unpack a message in Go
58
+ #
59
+ # foo := &pb.Foo{...}
60
+ # any, err := anypb.New(foo)
61
+ # if err != nil {
62
+ # ...
63
+ # }
64
+ # ...
65
+ # foo := &pb.Foo{}
66
+ # if err := any.UnmarshalTo(foo); err != nil {
67
+ # ...
68
+ # }
69
+ #
70
+ # The pack methods provided by protobuf library will by default use
71
+ # 'type.googleapis.com/full.type.name' as the type URL and the unpack
72
+ # methods only use the fully qualified type name after the last '/'
73
+ # in the type URL, for example "foo.bar.com/x/y.z" will yield type
74
+ # name "y.z".
75
+ #
76
+ #
77
+ # JSON
78
+ # ====
79
+ # The JSON representation of an `Any` value uses the regular
80
+ # representation of the deserialized, embedded message, with an
81
+ # additional field `@type` which contains the type URL. Example:
82
+ #
83
+ # package google.profile;
84
+ # message Person {
85
+ # string first_name = 1;
86
+ # string last_name = 2;
87
+ # }
88
+ #
89
+ # {
90
+ # "@type": "type.googleapis.com/google.profile.Person",
91
+ # "firstName": <string>,
92
+ # "lastName": <string>
93
+ # }
94
+ #
95
+ # If the embedded message type is well-known and has a custom JSON
96
+ # representation, that representation will be embedded adding a field
97
+ # `value` which holds the custom JSON in addition to the `@type`
98
+ # field. Example (for message [google.protobuf.Duration][]):
99
+ #
100
+ # {
101
+ # "@type": "type.googleapis.com/google.protobuf.Duration",
102
+ # "value": "1.212s"
103
+ # }
104
+ # @!attribute [rw] type_url
105
+ # @return [::String]
106
+ # A URL/resource name that uniquely identifies the type of the serialized
107
+ # protocol buffer message. This string must contain at least
108
+ # one "/" character. The last segment of the URL's path must represent
109
+ # the fully qualified name of the type (as in
110
+ # `path/google.protobuf.Duration`). The name should be in a canonical form
111
+ # (e.g., leading "." is not accepted).
112
+ #
113
+ # In practice, teams usually precompile into the binary all types that they
114
+ # expect it to use in the context of Any. However, for URLs which use the
115
+ # scheme `http`, `https`, or no scheme, one can optionally set up a type
116
+ # server that maps type URLs to message definitions as follows:
117
+ #
118
+ # * If no scheme is provided, `https` is assumed.
119
+ # * An HTTP GET on the URL must yield a [google.protobuf.Type][]
120
+ # value in binary format, or produce an error.
121
+ # * Applications are allowed to cache lookup results based on the
122
+ # URL, or have them precompiled into a binary to avoid any
123
+ # lookup. Therefore, binary compatibility needs to be preserved
124
+ # on changes to types. (Use versioned type names to manage
125
+ # breaking changes.)
126
+ #
127
+ # Note: this functionality is not currently available in the official
128
+ # protobuf release, and it is not used for type URLs beginning with
129
+ # type.googleapis.com.
130
+ #
131
+ # Schemes other than `http`, `https` (or the empty scheme) might be
132
+ # used with implementation specific semantics.
133
+ # @!attribute [rw] value
134
+ # @return [::String]
135
+ # Must be a valid serialized protocol buffer of the above specified type.
136
+ class Any
137
+ include ::Google::Protobuf::MessageExts
138
+ extend ::Google::Protobuf::MessageExts::ClassMethods
139
+ end
140
+ end
141
+ end