google-cloud-bigquery-data_transfer 0.9.0 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (36) hide show
  1. checksums.yaml +4 -4
  2. data/.yardopts +2 -1
  3. data/AUTHENTICATION.md +51 -54
  4. data/LICENSE.md +203 -0
  5. data/MIGRATING.md +301 -0
  6. data/README.md +33 -45
  7. data/lib/{google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/datasource.rb → google-cloud-bigquery-data_transfer.rb} +5 -11
  8. data/lib/google/cloud/bigquery/data_transfer.rb +82 -140
  9. data/lib/google/cloud/bigquery/data_transfer/version.rb +6 -2
  10. metadata +85 -65
  11. data/LICENSE +0 -201
  12. data/lib/google/cloud/bigquery/data_transfer/credentials.rb +0 -33
  13. data/lib/google/cloud/bigquery/data_transfer/v1.rb +0 -173
  14. data/lib/google/cloud/bigquery/data_transfer/v1/credentials.rb +0 -43
  15. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_pb.rb +0 -190
  16. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client.rb +0 -1230
  17. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client_config.json +0 -96
  18. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_services_pb.rb +0 -87
  19. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/data_transfer.rb +0 -500
  20. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/transfer.rb +0 -217
  21. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/datatransfer.rb +0 -570
  22. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/transfer.rb +0 -257
  23. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/any.rb +0 -131
  24. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/duration.rb +0 -91
  25. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/empty.rb +0 -29
  26. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/field_mask.rb +0 -222
  27. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/struct.rb +0 -74
  28. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/timestamp.rb +0 -113
  29. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/wrappers.rb +0 -26
  30. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/rpc/status.rb +0 -39
  31. data/lib/google/cloud/bigquery/data_transfer/v1/transfer_pb.rb +0 -83
  32. data/lib/google/cloud/bigquery/datatransfer/v1/datasource_pb.rb +0 -170
  33. data/lib/google/cloud/bigquery/datatransfer/v1/datasource_services_pb.rb +0 -103
  34. data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb +0 -217
  35. data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb +0 -94
  36. data/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +0 -104
@@ -1,257 +0,0 @@
1
- # Copyright 2020 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
-
16
- module Google
17
- module Cloud
18
- module Bigquery
19
- module DataTransfer
20
- module V1
21
- # Represents preferences for sending email notifications for transfer run
22
- # events.
23
- # @!attribute [rw] enable_failure_email
24
- # @return [true, false]
25
- # If true, email notifications will be sent on transfer run failures.
26
- class EmailPreferences; end
27
-
28
- # Options customizing the data transfer schedule.
29
- # @!attribute [rw] disable_auto_scheduling
30
- # @return [true, false]
31
- # If true, automatic scheduling of data transfer runs for this configuration
32
- # will be disabled. The runs can be started on ad-hoc basis using
33
- # StartManualTransferRuns API. When automatic scheduling is disabled, the
34
- # TransferConfig.schedule field will be ignored.
35
- # @!attribute [rw] start_time
36
- # @return [Google::Protobuf::Timestamp]
37
- # Specifies time to start scheduling transfer runs. The first run will be
38
- # scheduled at or after the start time according to a recurrence pattern
39
- # defined in the schedule string. The start time can be changed at any
40
- # moment. The time when a data transfer can be trigerred manually is not
41
- # limited by this option.
42
- # @!attribute [rw] end_time
43
- # @return [Google::Protobuf::Timestamp]
44
- # Defines time to stop scheduling transfer runs. A transfer run cannot be
45
- # scheduled at or after the end time. The end time can be changed at any
46
- # moment. The time when a data transfer can be trigerred manually is not
47
- # limited by this option.
48
- class ScheduleOptions; end
49
-
50
- # Represents a data transfer configuration. A transfer configuration
51
- # contains all metadata needed to perform a data transfer. For example,
52
- # `destination_dataset_id` specifies where data should be stored.
53
- # When a new transfer configuration is created, the specified
54
- # `destination_dataset_id` is created when needed and shared with the
55
- # appropriate data source service account.
56
- # @!attribute [rw] name
57
- # @return [String]
58
- # The resource name of the transfer config.
59
- # Transfer config names have the form of
60
- # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
61
- # The name is automatically generated based on the config_id specified in
62
- # CreateTransferConfigRequest along with project_id and region. If config_id
63
- # is not provided, usually a uuid, even though it is not guaranteed or
64
- # required, will be generated for config_id.
65
- # @!attribute [rw] destination_dataset_id
66
- # @return [String]
67
- # The BigQuery target dataset id.
68
- # @!attribute [rw] display_name
69
- # @return [String]
70
- # User specified display name for the data transfer.
71
- # @!attribute [rw] data_source_id
72
- # @return [String]
73
- # Data source id. Cannot be changed once data transfer is created.
74
- # @!attribute [rw] params
75
- # @return [Google::Protobuf::Struct]
76
- # Data transfer specific parameters.
77
- # @!attribute [rw] schedule
78
- # @return [String]
79
- # Data transfer schedule.
80
- # If the data source does not support a custom schedule, this should be
81
- # empty. If it is empty, the default value for the data source will be
82
- # used.
83
- # The specified times are in UTC.
84
- # Examples of valid format:
85
- # `1st,3rd monday of month 15:30`,
86
- # `every wed,fri of jan,jun 13:15`, and
87
- # `first sunday of quarter 00:00`.
88
- # See more explanation about the format here:
89
- # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
90
- # NOTE: the granularity should be at least 8 hours, or less frequent.
91
- # @!attribute [rw] schedule_options
92
- # @return [Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions]
93
- # Options customizing the data transfer schedule.
94
- # @!attribute [rw] data_refresh_window_days
95
- # @return [Integer]
96
- # The number of days to look back to automatically refresh the data.
97
- # For example, if `data_refresh_window_days = 10`, then every day
98
- # BigQuery reingests data for [today-10, today-1], rather than ingesting data
99
- # for just [today-1].
100
- # Only valid if the data source supports the feature. Set the value to 0
101
- # to use the default value.
102
- # @!attribute [rw] disabled
103
- # @return [true, false]
104
- # Is this config disabled. When set to true, no runs are scheduled
105
- # for a given transfer.
106
- # @!attribute [rw] update_time
107
- # @return [Google::Protobuf::Timestamp]
108
- # Output only. Data transfer modification time. Ignored by server on input.
109
- # @!attribute [rw] next_run_time
110
- # @return [Google::Protobuf::Timestamp]
111
- # Output only. Next time when data transfer will run.
112
- # @!attribute [rw] state
113
- # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferState]
114
- # Output only. State of the most recently updated transfer run.
115
- # @!attribute [rw] user_id
116
- # @return [Integer]
117
- # Deprecated. Unique ID of the user on whose behalf transfer is done.
118
- # @!attribute [rw] dataset_region
119
- # @return [String]
120
- # Output only. Region in which BigQuery dataset is located.
121
- # @!attribute [rw] notification_pubsub_topic
122
- # @return [String]
123
- # Pub/Sub topic where notifications will be sent after transfer runs
124
- # associated with this transfer config finish.
125
- # @!attribute [rw] email_preferences
126
- # @return [Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
127
- # Email notifications will be sent according to these preferences
128
- # to the email address of the user who owns this transfer config.
129
- class TransferConfig; end
130
-
131
- # Represents a data transfer run.
132
- # @!attribute [rw] name
133
- # @return [String]
134
- # The resource name of the transfer run.
135
- # Transfer run names have the form
136
- # `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
137
- # The name is ignored when creating a transfer run.
138
- # @!attribute [rw] schedule_time
139
- # @return [Google::Protobuf::Timestamp]
140
- # Minimum time after which a transfer run can be started.
141
- # @!attribute [rw] run_time
142
- # @return [Google::Protobuf::Timestamp]
143
- # For batch transfer runs, specifies the date and time of the data should be
144
- # ingested.
145
- # @!attribute [rw] error_status
146
- # @return [Google::Rpc::Status]
147
- # Status of the transfer run.
148
- # @!attribute [rw] start_time
149
- # @return [Google::Protobuf::Timestamp]
150
- # Output only. Time when transfer run was started.
151
- # Parameter ignored by server for input requests.
152
- # @!attribute [rw] end_time
153
- # @return [Google::Protobuf::Timestamp]
154
- # Output only. Time when transfer run ended.
155
- # Parameter ignored by server for input requests.
156
- # @!attribute [rw] update_time
157
- # @return [Google::Protobuf::Timestamp]
158
- # Output only. Last time the data transfer run state was updated.
159
- # @!attribute [rw] params
160
- # @return [Google::Protobuf::Struct]
161
- # Output only. Data transfer specific parameters.
162
- # @!attribute [rw] destination_dataset_id
163
- # @return [String]
164
- # Output only. The BigQuery target dataset id.
165
- # @!attribute [rw] data_source_id
166
- # @return [String]
167
- # Output only. Data source id.
168
- # @!attribute [rw] state
169
- # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferState]
170
- # Data transfer run state. Ignored for input requests.
171
- # @!attribute [rw] user_id
172
- # @return [Integer]
173
- # Deprecated. Unique ID of the user on whose behalf transfer is done.
174
- # @!attribute [rw] schedule
175
- # @return [String]
176
- # Output only. Describes the schedule of this transfer run if it was
177
- # created as part of a regular schedule. For batch transfer runs that are
178
- # scheduled manually, this is empty.
179
- # NOTE: the system might choose to delay the schedule depending on the
180
- # current load, so `schedule_time` doesn't always match this.
181
- # @!attribute [rw] notification_pubsub_topic
182
- # @return [String]
183
- # Output only. Pub/Sub topic where a notification will be sent after this
184
- # transfer run finishes
185
- # @!attribute [rw] email_preferences
186
- # @return [Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
187
- # Output only. Email notifications will be sent according to these
188
- # preferences to the email address of the user who owns the transfer config
189
- # this run was derived from.
190
- class TransferRun; end
191
-
192
- # Represents a user facing message for a particular data transfer run.
193
- # @!attribute [rw] message_time
194
- # @return [Google::Protobuf::Timestamp]
195
- # Time when message was logged.
196
- # @!attribute [rw] severity
197
- # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity]
198
- # Message severity.
199
- # @!attribute [rw] message_text
200
- # @return [String]
201
- # Message text.
202
- class TransferMessage
203
- # Represents data transfer user facing message severity.
204
- module MessageSeverity
205
- # No severity specified.
206
- MESSAGE_SEVERITY_UNSPECIFIED = 0
207
-
208
- # Informational message.
209
- INFO = 1
210
-
211
- # Warning message.
212
- WARNING = 2
213
-
214
- # Error message.
215
- ERROR = 3
216
- end
217
- end
218
-
219
- # Represents data transfer run state.
220
- module TransferState
221
- # State placeholder.
222
- TRANSFER_STATE_UNSPECIFIED = 0
223
-
224
- # Data transfer is scheduled and is waiting to be picked up by
225
- # data transfer backend.
226
- PENDING = 2
227
-
228
- # Data transfer is in progress.
229
- RUNNING = 3
230
-
231
- # Data transfer completed successfully.
232
- SUCCEEDED = 4
233
-
234
- # Data transfer failed.
235
- FAILED = 5
236
-
237
- # Data transfer is cancelled.
238
- CANCELLED = 6
239
- end
240
-
241
- # DEPRECATED. Represents data transfer type.
242
- module TransferType
243
- # Invalid or Unknown transfer type placeholder.
244
- TRANSFER_TYPE_UNSPECIFIED = 0
245
-
246
- # Batch data transfer.
247
- BATCH = 1
248
-
249
- # Streaming data transfer. Streaming data source currently doesn't
250
- # support multiple transfer configs per project.
251
- STREAMING = 2
252
- end
253
- end
254
- end
255
- end
256
- end
257
- end
@@ -1,131 +0,0 @@
1
- # Copyright 2020 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
-
16
- module Google
17
- module Protobuf
18
- # `Any` contains an arbitrary serialized protocol buffer message along with a
19
- # URL that describes the type of the serialized message.
20
- #
21
- # Protobuf library provides support to pack/unpack Any values in the form
22
- # of utility functions or additional generated methods of the Any type.
23
- #
24
- # Example 1: Pack and unpack a message in C++.
25
- #
26
- # Foo foo = ...;
27
- # Any any;
28
- # any.PackFrom(foo);
29
- # ...
30
- # if (any.UnpackTo(&foo)) {
31
- # ...
32
- # }
33
- #
34
- # Example 2: Pack and unpack a message in Java.
35
- #
36
- # Foo foo = ...;
37
- # Any any = Any.pack(foo);
38
- # ...
39
- # if (any.is(Foo.class)) {
40
- # foo = any.unpack(Foo.class);
41
- # }
42
- #
43
- # Example 3: Pack and unpack a message in Python.
44
- #
45
- # foo = Foo(...)
46
- # any = Any()
47
- # any.Pack(foo)
48
- # ...
49
- # if any.Is(Foo.DESCRIPTOR):
50
- # any.Unpack(foo)
51
- # ...
52
- #
53
- # Example 4: Pack and unpack a message in Go
54
- #
55
- # foo := &pb.Foo{...}
56
- # any, err := ptypes.MarshalAny(foo)
57
- # ...
58
- # foo := &pb.Foo{}
59
- # if err := ptypes.UnmarshalAny(any, foo); err != nil {
60
- # ...
61
- # }
62
- #
63
- # The pack methods provided by protobuf library will by default use
64
- # 'type.googleapis.com/full.type.name' as the type URL and the unpack
65
- # methods only use the fully qualified type name after the last '/'
66
- # in the type URL, for example "foo.bar.com/x/y.z" will yield type
67
- # name "y.z".
68
- #
69
- #
70
- # = JSON
71
- #
72
- # The JSON representation of an `Any` value uses the regular
73
- # representation of the deserialized, embedded message, with an
74
- # additional field `@type` which contains the type URL. Example:
75
- #
76
- # package google.profile;
77
- # message Person {
78
- # string first_name = 1;
79
- # string last_name = 2;
80
- # }
81
- #
82
- # {
83
- # "@type": "type.googleapis.com/google.profile.Person",
84
- # "firstName": <string>,
85
- # "lastName": <string>
86
- # }
87
- #
88
- # If the embedded message type is well-known and has a custom JSON
89
- # representation, that representation will be embedded adding a field
90
- # `value` which holds the custom JSON in addition to the `@type`
91
- # field. Example (for message {Google::Protobuf::Duration}):
92
- #
93
- # {
94
- # "@type": "type.googleapis.com/google.protobuf.Duration",
95
- # "value": "1.212s"
96
- # }
97
- # @!attribute [rw] type_url
98
- # @return [String]
99
- # A URL/resource name that uniquely identifies the type of the serialized
100
- # protocol buffer message. This string must contain at least
101
- # one "/" character. The last segment of the URL's path must represent
102
- # the fully qualified name of the type (as in
103
- # `path/google.protobuf.Duration`). The name should be in a canonical form
104
- # (e.g., leading "." is not accepted).
105
- #
106
- # In practice, teams usually precompile into the binary all types that they
107
- # expect it to use in the context of Any. However, for URLs which use the
108
- # scheme `http`, `https`, or no scheme, one can optionally set up a type
109
- # server that maps type URLs to message definitions as follows:
110
- #
111
- # * If no scheme is provided, `https` is assumed.
112
- # * An HTTP GET on the URL must yield a {Google::Protobuf::Type}
113
- # value in binary format, or produce an error.
114
- # * Applications are allowed to cache lookup results based on the
115
- # URL, or have them precompiled into a binary to avoid any
116
- # lookup. Therefore, binary compatibility needs to be preserved
117
- # on changes to types. (Use versioned type names to manage
118
- # breaking changes.)
119
- #
120
- # Note: this functionality is not currently available in the official
121
- # protobuf release, and it is not used for type URLs beginning with
122
- # type.googleapis.com.
123
- #
124
- # Schemes other than `http`, `https` (or the empty scheme) might be
125
- # used with implementation specific semantics.
126
- # @!attribute [rw] value
127
- # @return [String]
128
- # Must be a valid serialized protocol buffer of the above specified type.
129
- class Any; end
130
- end
131
- end
@@ -1,91 +0,0 @@
1
- # Copyright 2020 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
-
16
- module Google
17
- module Protobuf
18
- # A Duration represents a signed, fixed-length span of time represented
19
- # as a count of seconds and fractions of seconds at nanosecond
20
- # resolution. It is independent of any calendar and concepts like "day"
21
- # or "month". It is related to Timestamp in that the difference between
22
- # two Timestamp values is a Duration and it can be added or subtracted
23
- # from a Timestamp. Range is approximately +-10,000 years.
24
- #
25
- # = Examples
26
- #
27
- # Example 1: Compute Duration from two Timestamps in pseudo code.
28
- #
29
- # Timestamp start = ...;
30
- # Timestamp end = ...;
31
- # Duration duration = ...;
32
- #
33
- # duration.seconds = end.seconds - start.seconds;
34
- # duration.nanos = end.nanos - start.nanos;
35
- #
36
- # if (duration.seconds < 0 && duration.nanos > 0) {
37
- # duration.seconds += 1;
38
- # duration.nanos -= 1000000000;
39
- # } else if (durations.seconds > 0 && duration.nanos < 0) {
40
- # duration.seconds -= 1;
41
- # duration.nanos += 1000000000;
42
- # }
43
- #
44
- # Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
45
- #
46
- # Timestamp start = ...;
47
- # Duration duration = ...;
48
- # Timestamp end = ...;
49
- #
50
- # end.seconds = start.seconds + duration.seconds;
51
- # end.nanos = start.nanos + duration.nanos;
52
- #
53
- # if (end.nanos < 0) {
54
- # end.seconds -= 1;
55
- # end.nanos += 1000000000;
56
- # } else if (end.nanos >= 1000000000) {
57
- # end.seconds += 1;
58
- # end.nanos -= 1000000000;
59
- # }
60
- #
61
- # Example 3: Compute Duration from datetime.timedelta in Python.
62
- #
63
- # td = datetime.timedelta(days=3, minutes=10)
64
- # duration = Duration()
65
- # duration.FromTimedelta(td)
66
- #
67
- # = JSON Mapping
68
- #
69
- # In JSON format, the Duration type is encoded as a string rather than an
70
- # object, where the string ends in the suffix "s" (indicating seconds) and
71
- # is preceded by the number of seconds, with nanoseconds expressed as
72
- # fractional seconds. For example, 3 seconds with 0 nanoseconds should be
73
- # encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
74
- # be expressed in JSON format as "3.000000001s", and 3 seconds and 1
75
- # microsecond should be expressed in JSON format as "3.000001s".
76
- # @!attribute [rw] seconds
77
- # @return [Integer]
78
- # Signed seconds of the span of time. Must be from -315,576,000,000
79
- # to +315,576,000,000 inclusive. Note: these bounds are computed from:
80
- # 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
81
- # @!attribute [rw] nanos
82
- # @return [Integer]
83
- # Signed fractions of a second at nanosecond resolution of the span
84
- # of time. Durations less than one second are represented with a 0
85
- # `seconds` field and a positive or negative `nanos` field. For durations
86
- # of one second or more, a non-zero value for the `nanos` field must be
87
- # of the same sign as the `seconds` field. Must be from -999,999,999
88
- # to +999,999,999 inclusive.
89
- class Duration; end
90
- end
91
- end