google-cloud-bigquery-data_transfer-v1 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +12 -0
  3. data/AUTHENTICATION.md +169 -0
  4. data/LICENSE.md +203 -0
  5. data/README.md +71 -0
  6. data/lib/google-cloud-bigquery-data_transfer-v1.rb +21 -0
  7. data/lib/google/cloud/bigquery/data_transfer/v1.rb +37 -0
  8. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb +54 -0
  9. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb +1567 -0
  10. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb +53 -0
  11. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb +193 -0
  12. data/lib/google/cloud/bigquery/data_transfer/v1/version.rb +30 -0
  13. data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb +217 -0
  14. data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb +92 -0
  15. data/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +104 -0
  16. data/lib/google/cloud/common_resources_pb.rb +15 -0
  17. data/proto_docs/README.md +4 -0
  18. data/proto_docs/google/api/field_behavior.rb +59 -0
  19. data/proto_docs/google/api/resource.rb +247 -0
  20. data/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb +646 -0
  21. data/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb +276 -0
  22. data/proto_docs/google/protobuf/any.rb +138 -0
  23. data/proto_docs/google/protobuf/duration.rb +98 -0
  24. data/proto_docs/google/protobuf/empty.rb +36 -0
  25. data/proto_docs/google/protobuf/field_mask.rb +229 -0
  26. data/proto_docs/google/protobuf/struct.rb +96 -0
  27. data/proto_docs/google/protobuf/timestamp.rb +120 -0
  28. data/proto_docs/google/protobuf/wrappers.rb +121 -0
  29. data/proto_docs/google/rpc/status.rb +46 -0
  30. metadata +183 -0
@@ -0,0 +1,276 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Bigquery
23
+ module DataTransfer
24
+ module V1
25
+ # Represents preferences for sending email notifications for transfer run
26
+ # events.
27
+ # @!attribute [rw] enable_failure_email
28
+ # @return [Boolean]
29
+ # If true, email notifications will be sent on transfer run failures.
30
+ class EmailPreferences
31
+ include Google::Protobuf::MessageExts
32
+ extend Google::Protobuf::MessageExts::ClassMethods
33
+ end
34
+
35
+ # Options customizing the data transfer schedule.
36
+ # @!attribute [rw] disable_auto_scheduling
37
+ # @return [Boolean]
38
+ # If true, automatic scheduling of data transfer runs for this configuration
39
+ # will be disabled. The runs can be started on ad-hoc basis using
40
+ # StartManualTransferRuns API. When automatic scheduling is disabled, the
41
+ # TransferConfig.schedule field will be ignored.
42
+ # @!attribute [rw] start_time
43
+ # @return [Google::Protobuf::Timestamp]
44
+ # Specifies time to start scheduling transfer runs. The first run will be
45
+ # scheduled at or after the start time according to a recurrence pattern
46
+ # defined in the schedule string. The start time can be changed at any
47
+ # moment. The time when a data transfer can be trigerred manually is not
48
+ # limited by this option.
49
+ # @!attribute [rw] end_time
50
+ # @return [Google::Protobuf::Timestamp]
51
+ # Defines time to stop scheduling transfer runs. A transfer run cannot be
52
+ # scheduled at or after the end time. The end time can be changed at any
53
+ # moment. The time when a data transfer can be trigerred manually is not
54
+ # limited by this option.
55
+ class ScheduleOptions
56
+ include Google::Protobuf::MessageExts
57
+ extend Google::Protobuf::MessageExts::ClassMethods
58
+ end
59
+
60
+ # Represents a data transfer configuration. A transfer configuration
61
+ # contains all metadata needed to perform a data transfer. For example,
62
+ # `destination_dataset_id` specifies where data should be stored.
63
+ # When a new transfer configuration is created, the specified
64
+ # `destination_dataset_id` is created when needed and shared with the
65
+ # appropriate data source service account.
66
+ # @!attribute [rw] name
67
+ # @return [String]
68
+ # The resource name of the transfer config.
69
+ # Transfer config names have the form of
70
+ # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
71
+ # The name is automatically generated based on the config_id specified in
72
+ # CreateTransferConfigRequest along with project_id and region. If config_id
73
+ # is not provided, usually a uuid, even though it is not guaranteed or
74
+ # required, will be generated for config_id.
75
+ # @!attribute [rw] destination_dataset_id
76
+ # @return [String]
77
+ # The BigQuery target dataset id.
78
+ # @!attribute [rw] display_name
79
+ # @return [String]
80
+ # User specified display name for the data transfer.
81
+ # @!attribute [rw] data_source_id
82
+ # @return [String]
83
+ # Data source id. Cannot be changed once data transfer is created.
84
+ # @!attribute [rw] params
85
+ # @return [Google::Protobuf::Struct]
86
+ # Data transfer specific parameters.
87
+ # @!attribute [rw] schedule
88
+ # @return [String]
89
+ # Data transfer schedule.
90
+ # If the data source does not support a custom schedule, this should be
91
+ # empty. If it is empty, the default value for the data source will be
92
+ # used.
93
+ # The specified times are in UTC.
94
+ # Examples of valid format:
95
+ # `1st,3rd monday of month 15:30`,
96
+ # `every wed,fri of jan,jun 13:15`, and
97
+ # `first sunday of quarter 00:00`.
98
+ # See more explanation about the format here:
99
+ # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
100
+ # NOTE: the granularity should be at least 8 hours, or less frequent.
101
+ # @!attribute [rw] schedule_options
102
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions]
103
+ # Options customizing the data transfer schedule.
104
+ # @!attribute [rw] data_refresh_window_days
105
+ # @return [Integer]
106
+ # The number of days to look back to automatically refresh the data.
107
+ # For example, if `data_refresh_window_days = 10`, then every day
108
+ # BigQuery reingests data for [today-10, today-1], rather than ingesting data
109
+ # for just [today-1].
110
+ # Only valid if the data source supports the feature. Set the value to 0
111
+ # to use the default value.
112
+ # @!attribute [rw] disabled
113
+ # @return [Boolean]
114
+ # Is this config disabled. When set to true, no runs are scheduled
115
+ # for a given transfer.
116
+ # @!attribute [r] update_time
117
+ # @return [Google::Protobuf::Timestamp]
118
+ # Output only. Data transfer modification time. Ignored by server on input.
119
+ # @!attribute [r] next_run_time
120
+ # @return [Google::Protobuf::Timestamp]
121
+ # Output only. Next time when data transfer will run.
122
+ # @!attribute [r] state
123
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferState]
124
+ # Output only. State of the most recently updated transfer run.
125
+ # @!attribute [rw] user_id
126
+ # @return [Integer]
127
+ # Deprecated. Unique ID of the user on whose behalf transfer is done.
128
+ # @!attribute [r] dataset_region
129
+ # @return [String]
130
+ # Output only. Region in which BigQuery dataset is located.
131
+ # @!attribute [rw] notification_pubsub_topic
132
+ # @return [String]
133
+ # Pub/Sub topic where notifications will be sent after transfer runs
134
+ # associated with this transfer config finish.
135
+ # @!attribute [rw] email_preferences
136
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
137
+ # Email notifications will be sent according to these preferences
138
+ # to the email address of the user who owns this transfer config.
139
+ class TransferConfig
140
+ include Google::Protobuf::MessageExts
141
+ extend Google::Protobuf::MessageExts::ClassMethods
142
+ end
143
+
144
+ # Represents a data transfer run.
145
+ # @!attribute [rw] name
146
+ # @return [String]
147
+ # The resource name of the transfer run.
148
+ # Transfer run names have the form
149
+ # `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
150
+ # The name is ignored when creating a transfer run.
151
+ # @!attribute [rw] schedule_time
152
+ # @return [Google::Protobuf::Timestamp]
153
+ # Minimum time after which a transfer run can be started.
154
+ # @!attribute [rw] run_time
155
+ # @return [Google::Protobuf::Timestamp]
156
+ # For batch transfer runs, specifies the date and time of the data should be
157
+ # ingested.
158
+ # @!attribute [rw] error_status
159
+ # @return [Google::Rpc::Status]
160
+ # Status of the transfer run.
161
+ # @!attribute [r] start_time
162
+ # @return [Google::Protobuf::Timestamp]
163
+ # Output only. Time when transfer run was started.
164
+ # Parameter ignored by server for input requests.
165
+ # @!attribute [r] end_time
166
+ # @return [Google::Protobuf::Timestamp]
167
+ # Output only. Time when transfer run ended.
168
+ # Parameter ignored by server for input requests.
169
+ # @!attribute [r] update_time
170
+ # @return [Google::Protobuf::Timestamp]
171
+ # Output only. Last time the data transfer run state was updated.
172
+ # @!attribute [r] params
173
+ # @return [Google::Protobuf::Struct]
174
+ # Output only. Data transfer specific parameters.
175
+ # @!attribute [r] destination_dataset_id
176
+ # @return [String]
177
+ # Output only. The BigQuery target dataset id.
178
+ # @!attribute [r] data_source_id
179
+ # @return [String]
180
+ # Output only. Data source id.
181
+ # @!attribute [rw] state
182
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferState]
183
+ # Data transfer run state. Ignored for input requests.
184
+ # @!attribute [rw] user_id
185
+ # @return [Integer]
186
+ # Deprecated. Unique ID of the user on whose behalf transfer is done.
187
+ # @!attribute [r] schedule
188
+ # @return [String]
189
+ # Output only. Describes the schedule of this transfer run if it was
190
+ # created as part of a regular schedule. For batch transfer runs that are
191
+ # scheduled manually, this is empty.
192
+ # NOTE: the system might choose to delay the schedule depending on the
193
+ # current load, so `schedule_time` doesn't always match this.
194
+ # @!attribute [r] notification_pubsub_topic
195
+ # @return [String]
196
+ # Output only. Pub/Sub topic where a notification will be sent after this
197
+ # transfer run finishes
198
+ # @!attribute [r] email_preferences
199
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
200
+ # Output only. Email notifications will be sent according to these
201
+ # preferences to the email address of the user who owns the transfer config
202
+ # this run was derived from.
203
+ class TransferRun
204
+ include Google::Protobuf::MessageExts
205
+ extend Google::Protobuf::MessageExts::ClassMethods
206
+ end
207
+
208
+ # Represents a user facing message for a particular data transfer run.
209
+ # @!attribute [rw] message_time
210
+ # @return [Google::Protobuf::Timestamp]
211
+ # Time when message was logged.
212
+ # @!attribute [rw] severity
213
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity]
214
+ # Message severity.
215
+ # @!attribute [rw] message_text
216
+ # @return [String]
217
+ # Message text.
218
+ class TransferMessage
219
+ include Google::Protobuf::MessageExts
220
+ extend Google::Protobuf::MessageExts::ClassMethods
221
+
222
+ # Represents data transfer user facing message severity.
223
+ module MessageSeverity
224
+ # No severity specified.
225
+ MESSAGE_SEVERITY_UNSPECIFIED = 0
226
+
227
+ # Informational message.
228
+ INFO = 1
229
+
230
+ # Warning message.
231
+ WARNING = 2
232
+
233
+ # Error message.
234
+ ERROR = 3
235
+ end
236
+ end
237
+
238
+ # DEPRECATED. Represents data transfer type.
239
+ module TransferType
240
+ # Invalid or Unknown transfer type placeholder.
241
+ TRANSFER_TYPE_UNSPECIFIED = 0
242
+
243
+ # Batch data transfer.
244
+ BATCH = 1
245
+
246
+ # Streaming data transfer. Streaming data source currently doesn't
247
+ # support multiple transfer configs per project.
248
+ STREAMING = 2
249
+ end
250
+
251
+ # Represents data transfer run state.
252
+ module TransferState
253
+ # State placeholder.
254
+ TRANSFER_STATE_UNSPECIFIED = 0
255
+
256
+ # Data transfer is scheduled and is waiting to be picked up by
257
+ # data transfer backend.
258
+ PENDING = 2
259
+
260
+ # Data transfer is in progress.
261
+ RUNNING = 3
262
+
263
+ # Data transfer completed successfully.
264
+ SUCCEEDED = 4
265
+
266
+ # Data transfer failed.
267
+ FAILED = 5
268
+
269
+ # Data transfer is cancelled.
270
+ CANCELLED = 6
271
+ end
272
+ end
273
+ end
274
+ end
275
+ end
276
+ end
@@ -0,0 +1,138 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Protobuf
22
+ # `Any` contains an arbitrary serialized protocol buffer message along with a
23
+ # URL that describes the type of the serialized message.
24
+ #
25
+ # Protobuf library provides support to pack/unpack Any values in the form
26
+ # of utility functions or additional generated methods of the Any type.
27
+ #
28
+ # Example 1: Pack and unpack a message in C++.
29
+ #
30
+ # Foo foo = ...;
31
+ # Any any;
32
+ # any.PackFrom(foo);
33
+ # ...
34
+ # if (any.UnpackTo(&foo)) {
35
+ # ...
36
+ # }
37
+ #
38
+ # Example 2: Pack and unpack a message in Java.
39
+ #
40
+ # Foo foo = ...;
41
+ # Any any = Any.pack(foo);
42
+ # ...
43
+ # if (any.is(Foo.class)) {
44
+ # foo = any.unpack(Foo.class);
45
+ # }
46
+ #
47
+ # Example 3: Pack and unpack a message in Python.
48
+ #
49
+ # foo = Foo(...)
50
+ # any = Any()
51
+ # any.Pack(foo)
52
+ # ...
53
+ # if any.Is(Foo.DESCRIPTOR):
54
+ # any.Unpack(foo)
55
+ # ...
56
+ #
57
+ # Example 4: Pack and unpack a message in Go
58
+ #
59
+ # foo := &pb.Foo{...}
60
+ # any, err := ptypes.MarshalAny(foo)
61
+ # ...
62
+ # foo := &pb.Foo{}
63
+ # if err := ptypes.UnmarshalAny(any, foo); err != nil {
64
+ # ...
65
+ # }
66
+ #
67
+ # The pack methods provided by protobuf library will by default use
68
+ # 'type.googleapis.com/full.type.name' as the type URL and the unpack
69
+ # methods only use the fully qualified type name after the last '/'
70
+ # in the type URL, for example "foo.bar.com/x/y.z" will yield type
71
+ # name "y.z".
72
+ #
73
+ #
74
+ # JSON
75
+ # ====
76
+ # The JSON representation of an `Any` value uses the regular
77
+ # representation of the deserialized, embedded message, with an
78
+ # additional field `@type` which contains the type URL. Example:
79
+ #
80
+ # package google.profile;
81
+ # message Person {
82
+ # string first_name = 1;
83
+ # string last_name = 2;
84
+ # }
85
+ #
86
+ # {
87
+ # "@type": "type.googleapis.com/google.profile.Person",
88
+ # "firstName": <string>,
89
+ # "lastName": <string>
90
+ # }
91
+ #
92
+ # If the embedded message type is well-known and has a custom JSON
93
+ # representation, that representation will be embedded adding a field
94
+ # `value` which holds the custom JSON in addition to the `@type`
95
+ # field. Example (for message [google.protobuf.Duration][]):
96
+ #
97
+ # {
98
+ # "@type": "type.googleapis.com/google.protobuf.Duration",
99
+ # "value": "1.212s"
100
+ # }
101
+ # @!attribute [rw] type_url
102
+ # @return [String]
103
+ # A URL/resource name that uniquely identifies the type of the serialized
104
+ # protocol buffer message. This string must contain at least
105
+ # one "/" character. The last segment of the URL's path must represent
106
+ # the fully qualified name of the type (as in
107
+ # `path/google.protobuf.Duration`). The name should be in a canonical form
108
+ # (e.g., leading "." is not accepted).
109
+ #
110
+ # In practice, teams usually precompile into the binary all types that they
111
+ # expect it to use in the context of Any. However, for URLs which use the
112
+ # scheme `http`, `https`, or no scheme, one can optionally set up a type
113
+ # server that maps type URLs to message definitions as follows:
114
+ #
115
+ # * If no scheme is provided, `https` is assumed.
116
+ # * An HTTP GET on the URL must yield a [google.protobuf.Type][]
117
+ # value in binary format, or produce an error.
118
+ # * Applications are allowed to cache lookup results based on the
119
+ # URL, or have them precompiled into a binary to avoid any
120
+ # lookup. Therefore, binary compatibility needs to be preserved
121
+ # on changes to types. (Use versioned type names to manage
122
+ # breaking changes.)
123
+ #
124
+ # Note: this functionality is not currently available in the official
125
+ # protobuf release, and it is not used for type URLs beginning with
126
+ # type.googleapis.com.
127
+ #
128
+ # Schemes other than `http`, `https` (or the empty scheme) might be
129
+ # used with implementation specific semantics.
130
+ # @!attribute [rw] value
131
+ # @return [String]
132
+ # Must be a valid serialized protocol buffer of the above specified type.
133
+ class Any
134
+ include Google::Protobuf::MessageExts
135
+ extend Google::Protobuf::MessageExts::ClassMethods
136
+ end
137
+ end
138
+ end
@@ -0,0 +1,98 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Protobuf
22
+ # A Duration represents a signed, fixed-length span of time represented
23
+ # as a count of seconds and fractions of seconds at nanosecond
24
+ # resolution. It is independent of any calendar and concepts like "day"
25
+ # or "month". It is related to Timestamp in that the difference between
26
+ # two Timestamp values is a Duration and it can be added or subtracted
27
+ # from a Timestamp. Range is approximately +-10,000 years.
28
+ #
29
+ # # Examples
30
+ #
31
+ # Example 1: Compute Duration from two Timestamps in pseudo code.
32
+ #
33
+ # Timestamp start = ...;
34
+ # Timestamp end = ...;
35
+ # Duration duration = ...;
36
+ #
37
+ # duration.seconds = end.seconds - start.seconds;
38
+ # duration.nanos = end.nanos - start.nanos;
39
+ #
40
+ # if (duration.seconds < 0 && duration.nanos > 0) {
41
+ # duration.seconds += 1;
42
+ # duration.nanos -= 1000000000;
43
+ # } else if (duration.seconds > 0 && duration.nanos < 0) {
44
+ # duration.seconds -= 1;
45
+ # duration.nanos += 1000000000;
46
+ # }
47
+ #
48
+ # Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
49
+ #
50
+ # Timestamp start = ...;
51
+ # Duration duration = ...;
52
+ # Timestamp end = ...;
53
+ #
54
+ # end.seconds = start.seconds + duration.seconds;
55
+ # end.nanos = start.nanos + duration.nanos;
56
+ #
57
+ # if (end.nanos < 0) {
58
+ # end.seconds -= 1;
59
+ # end.nanos += 1000000000;
60
+ # } else if (end.nanos >= 1000000000) {
61
+ # end.seconds += 1;
62
+ # end.nanos -= 1000000000;
63
+ # }
64
+ #
65
+ # Example 3: Compute Duration from datetime.timedelta in Python.
66
+ #
67
+ # td = datetime.timedelta(days=3, minutes=10)
68
+ # duration = Duration()
69
+ # duration.FromTimedelta(td)
70
+ #
71
+ # # JSON Mapping
72
+ #
73
+ # In JSON format, the Duration type is encoded as a string rather than an
74
+ # object, where the string ends in the suffix "s" (indicating seconds) and
75
+ # is preceded by the number of seconds, with nanoseconds expressed as
76
+ # fractional seconds. For example, 3 seconds with 0 nanoseconds should be
77
+ # encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
78
+ # be expressed in JSON format as "3.000000001s", and 3 seconds and 1
79
+ # microsecond should be expressed in JSON format as "3.000001s".
80
+ # @!attribute [rw] seconds
81
+ # @return [Integer]
82
+ # Signed seconds of the span of time. Must be from -315,576,000,000
83
+ # to +315,576,000,000 inclusive. Note: these bounds are computed from:
84
+ # 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
85
+ # @!attribute [rw] nanos
86
+ # @return [Integer]
87
+ # Signed fractions of a second at nanosecond resolution of the span
88
+ # of time. Durations less than one second are represented with a 0
89
+ # `seconds` field and a positive or negative `nanos` field. For durations
90
+ # of one second or more, a non-zero value for the `nanos` field must be
91
+ # of the same sign as the `seconds` field. Must be from -999,999,999
92
+ # to +999,999,999 inclusive.
93
+ class Duration
94
+ include Google::Protobuf::MessageExts
95
+ extend Google::Protobuf::MessageExts::ClassMethods
96
+ end
97
+ end
98
+ end