google-api-client 0.9.20 → 0.9.21

Sign up to get free protection for your applications and to get access to all the features.
Files changed (159) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +8 -4
  3. data/api_names.yaml +33179 -1058
  4. data/generated/google/apis/adexchangebuyer2_v2beta1.rb +1 -1
  5. data/generated/google/apis/adexchangebuyer_v1_4.rb +1 -1
  6. data/generated/google/apis/adexchangebuyer_v1_4/classes.rb +24 -6
  7. data/generated/google/apis/adexchangebuyer_v1_4/representations.rb +2 -0
  8. data/generated/google/apis/admin_directory_v1.rb +1 -1
  9. data/generated/google/apis/admin_directory_v1/classes.rb +14 -0
  10. data/generated/google/apis/admin_directory_v1/representations.rb +2 -0
  11. data/generated/google/apis/adsense_v1_4.rb +1 -1
  12. data/generated/google/apis/adsensehost_v4_1.rb +1 -1
  13. data/generated/google/apis/analyticsreporting_v4.rb +1 -1
  14. data/generated/google/apis/analyticsreporting_v4/classes.rb +7 -0
  15. data/generated/google/apis/analyticsreporting_v4/representations.rb +1 -0
  16. data/generated/google/apis/androidenterprise_v1.rb +1 -1
  17. data/generated/google/apis/androidenterprise_v1/classes.rb +96 -156
  18. data/generated/google/apis/androidenterprise_v1/representations.rb +18 -47
  19. data/generated/google/apis/androidenterprise_v1/service.rb +15 -459
  20. data/generated/google/apis/androidpublisher_v2.rb +1 -1
  21. data/generated/google/apis/androidpublisher_v2/classes.rb +176 -0
  22. data/generated/google/apis/androidpublisher_v2/representations.rb +62 -0
  23. data/generated/google/apis/androidpublisher_v2/service.rb +60 -2
  24. data/generated/google/apis/appengine_v1beta5.rb +1 -1
  25. data/generated/google/apis/appengine_v1beta5/classes.rb +300 -198
  26. data/generated/google/apis/appengine_v1beta5/representations.rb +36 -0
  27. data/generated/google/apis/appengine_v1beta5/service.rb +54 -49
  28. data/generated/google/apis/appsactivity_v1.rb +1 -1
  29. data/generated/google/apis/appstate_v1.rb +1 -1
  30. data/generated/google/apis/bigquery_v2.rb +1 -1
  31. data/generated/google/apis/bigquery_v2/classes.rb +50 -6
  32. data/generated/google/apis/bigquery_v2/representations.rb +5 -0
  33. data/generated/google/apis/books_v1.rb +1 -1
  34. data/generated/google/apis/books_v1/classes.rb +7 -2
  35. data/generated/google/apis/books_v1/representations.rb +1 -0
  36. data/generated/google/apis/calendar_v3.rb +1 -1
  37. data/generated/google/apis/classroom_v1.rb +1 -1
  38. data/generated/google/apis/classroom_v1/classes.rb +74 -74
  39. data/generated/google/apis/classroom_v1/representations.rb +29 -29
  40. data/generated/google/apis/classroom_v1/service.rb +132 -28
  41. data/generated/google/apis/cloudbuild_v1.rb +1 -1
  42. data/generated/google/apis/cloudbuild_v1/classes.rb +47 -27
  43. data/generated/google/apis/cloudbuild_v1/representations.rb +20 -7
  44. data/generated/google/apis/cloudbuild_v1/service.rb +42 -0
  45. data/generated/google/apis/clouddebugger_v2.rb +1 -1
  46. data/generated/google/apis/clouddebugger_v2/classes.rb +648 -578
  47. data/generated/google/apis/clouddebugger_v2/representations.rb +140 -140
  48. data/generated/google/apis/clouddebugger_v2/service.rb +161 -155
  49. data/generated/google/apis/cloudresourcemanager_v1.rb +1 -1
  50. data/generated/google/apis/cloudresourcemanager_v1/classes.rb +563 -423
  51. data/generated/google/apis/cloudresourcemanager_v1/representations.rb +116 -76
  52. data/generated/google/apis/cloudresourcemanager_v1/service.rb +318 -249
  53. data/generated/google/apis/cloudresourcemanager_v1beta1.rb +1 -1
  54. data/generated/google/apis/cloudresourcemanager_v1beta1/classes.rb +396 -331
  55. data/generated/google/apis/cloudresourcemanager_v1beta1/representations.rb +79 -79
  56. data/generated/google/apis/cloudresourcemanager_v1beta1/service.rb +317 -281
  57. data/generated/google/apis/compute_beta.rb +1 -1
  58. data/generated/google/apis/compute_beta/classes.rb +352 -80
  59. data/generated/google/apis/compute_beta/representations.rb +132 -19
  60. data/generated/google/apis/compute_beta/service.rb +214 -0
  61. data/generated/google/apis/compute_v1.rb +1 -1
  62. data/generated/google/apis/compute_v1/classes.rb +673 -73
  63. data/generated/google/apis/compute_v1/representations.rb +274 -9
  64. data/generated/google/apis/compute_v1/service.rb +1574 -2
  65. data/generated/google/apis/content_v2.rb +1 -1
  66. data/generated/google/apis/content_v2/classes.rb +7 -8
  67. data/generated/google/apis/content_v2/service.rb +125 -54
  68. data/generated/google/apis/dataflow_v1b3.rb +2 -3
  69. data/generated/google/apis/dataflow_v1b3/classes.rb +286 -114
  70. data/generated/google/apis/dataflow_v1b3/representations.rb +73 -0
  71. data/generated/google/apis/dataflow_v1b3/service.rb +37 -38
  72. data/generated/google/apis/datastore_v1.rb +1 -1
  73. data/generated/google/apis/datastore_v1/classes.rb +5 -0
  74. data/generated/google/apis/deploymentmanager_v2.rb +1 -1
  75. data/generated/google/apis/deploymentmanager_v2/classes.rb +37 -5
  76. data/generated/google/apis/deploymentmanager_v2/representations.rb +16 -0
  77. data/generated/google/apis/deploymentmanager_v2/service.rb +4 -1
  78. data/generated/google/apis/dns_v1.rb +1 -1
  79. data/generated/google/apis/doubleclicksearch_v2.rb +1 -1
  80. data/generated/google/apis/drive_v2.rb +1 -1
  81. data/generated/google/apis/drive_v2/classes.rb +55 -21
  82. data/generated/google/apis/drive_v2/representations.rb +2 -0
  83. data/generated/google/apis/drive_v3.rb +1 -1
  84. data/generated/google/apis/drive_v3/classes.rb +41 -12
  85. data/generated/google/apis/drive_v3/representations.rb +2 -0
  86. data/generated/google/apis/fitness_v1.rb +31 -1
  87. data/generated/google/apis/fitness_v1/classes.rb +1 -1
  88. data/generated/google/apis/games_configuration_v1configuration.rb +1 -1
  89. data/generated/google/apis/games_management_v1management.rb +1 -1
  90. data/generated/google/apis/games_v1.rb +1 -1
  91. data/generated/google/apis/gmail_v1.rb +1 -1
  92. data/generated/google/apis/gmail_v1/classes.rb +32 -1
  93. data/generated/google/apis/gmail_v1/representations.rb +15 -0
  94. data/generated/google/apis/gmail_v1/service.rb +41 -2
  95. data/generated/google/apis/identitytoolkit_v3.rb +4 -1
  96. data/generated/google/apis/identitytoolkit_v3/classes.rb +7 -0
  97. data/generated/google/apis/identitytoolkit_v3/representations.rb +1 -0
  98. data/generated/google/apis/kgsearch_v1.rb +2 -3
  99. data/generated/google/apis/kgsearch_v1/classes.rb +10 -10
  100. data/generated/google/apis/kgsearch_v1/representations.rb +2 -2
  101. data/generated/google/apis/kgsearch_v1/service.rb +28 -26
  102. data/generated/google/apis/language_v1beta1.rb +1 -1
  103. data/generated/google/apis/language_v1beta1/classes.rb +166 -8
  104. data/generated/google/apis/language_v1beta1/representations.rb +50 -0
  105. data/generated/google/apis/language_v1beta1/service.rb +32 -0
  106. data/generated/google/apis/logging_v2beta1.rb +6 -6
  107. data/generated/google/apis/logging_v2beta1/classes.rb +868 -789
  108. data/generated/google/apis/logging_v2beta1/representations.rb +179 -147
  109. data/generated/google/apis/logging_v2beta1/service.rb +378 -211
  110. data/generated/google/apis/monitoring_v3.rb +1 -1
  111. data/generated/google/apis/monitoring_v3/classes.rb +110 -112
  112. data/generated/google/apis/monitoring_v3/service.rb +68 -73
  113. data/generated/google/apis/pagespeedonline_v2.rb +1 -1
  114. data/generated/google/apis/plus_domains_v1.rb +1 -1
  115. data/generated/google/apis/plus_v1.rb +1 -1
  116. data/generated/google/apis/pubsub_v1.rb +1 -1
  117. data/generated/google/apis/pubsub_v1/classes.rb +380 -327
  118. data/generated/google/apis/pubsub_v1/representations.rb +79 -79
  119. data/generated/google/apis/pubsub_v1/service.rb +424 -396
  120. data/generated/google/apis/sheets_v4.rb +1 -1
  121. data/generated/google/apis/sheets_v4/classes.rb +175 -53
  122. data/generated/google/apis/sheets_v4/representations.rb +48 -13
  123. data/generated/google/apis/site_verification_v1.rb +1 -1
  124. data/generated/google/apis/slides_v1.rb +1 -1
  125. data/generated/google/apis/slides_v1/classes.rb +149 -31
  126. data/generated/google/apis/slides_v1/representations.rb +52 -9
  127. data/generated/google/apis/slides_v1/service.rb +5 -5
  128. data/generated/google/apis/speech_v1beta1.rb +1 -1
  129. data/generated/google/apis/speech_v1beta1/service.rb +3 -1
  130. data/generated/google/apis/sqladmin_v1beta4.rb +1 -1
  131. data/generated/google/apis/sqladmin_v1beta4/classes.rb +54 -2
  132. data/generated/google/apis/sqladmin_v1beta4/representations.rb +29 -0
  133. data/generated/google/apis/sqladmin_v1beta4/service.rb +41 -0
  134. data/generated/google/apis/storage_v1.rb +1 -1
  135. data/generated/google/apis/storage_v1/classes.rb +7 -0
  136. data/generated/google/apis/storage_v1/representations.rb +2 -0
  137. data/generated/google/apis/tagmanager_v1.rb +5 -5
  138. data/generated/google/apis/vision_v1.rb +1 -1
  139. data/generated/google/apis/vision_v1/classes.rb +63 -59
  140. data/generated/google/apis/youtube_analytics_v1.rb +1 -1
  141. data/generated/google/apis/youtube_analytics_v1/classes.rb +0 -209
  142. data/generated/google/apis/youtube_analytics_v1/representations.rb +0 -99
  143. data/generated/google/apis/youtube_analytics_v1/service.rb +0 -76
  144. data/generated/google/apis/youtube_partner_v1.rb +1 -1
  145. data/generated/google/apis/youtube_partner_v1/classes.rb +71 -0
  146. data/generated/google/apis/youtube_partner_v1/representations.rb +33 -0
  147. data/generated/google/apis/youtube_partner_v1/service.rb +52 -0
  148. data/generated/google/apis/youtube_v3.rb +1 -1
  149. data/generated/google/apis/youtube_v3/classes.rb +15 -2
  150. data/generated/google/apis/youtube_v3/representations.rb +3 -0
  151. data/generated/google/apis/youtube_v3/service.rb +4 -4
  152. data/generated/google/apis/youtubereporting_v1.rb +4 -4
  153. data/generated/google/apis/youtubereporting_v1/classes.rb +146 -137
  154. data/generated/google/apis/youtubereporting_v1/representations.rb +33 -33
  155. data/generated/google/apis/youtubereporting_v1/service.rb +149 -145
  156. data/lib/google/apis/version.rb +1 -1
  157. data/samples/cli/lib/samples/calendar.rb +1 -1
  158. data/samples/cli/lib/samples/gmail.rb +74 -0
  159. metadata +29 -29
@@ -20,13 +20,12 @@ module Google
20
20
  module Apis
21
21
  # Google Dataflow API
22
22
  #
23
- # Develops and executes data processing patterns like ETL, batch computation,
24
- # and continuous computation.
23
+ # Manages Google Cloud Dataflow projects on Google Cloud Platform.
25
24
  #
26
25
  # @see https://cloud.google.com/dataflow
27
26
  module DataflowV1b3
28
27
  VERSION = 'V1b3'
29
- REVISION = '20160928'
28
+ REVISION = '20161004'
30
29
 
31
30
  # View and manage your data across Google Cloud Platform services
32
31
  AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
@@ -110,30 +110,31 @@ module Google
110
110
  end
111
111
  end
112
112
 
113
- # Defines a job to be run by the Dataflow service.
113
+ # Defines a job to be run by the Cloud Dataflow service.
114
114
  class Job
115
115
  include Google::Apis::Core::Hashable
116
116
 
117
- # The unique ID of this job. This field is set by the Dataflow service when the
118
- # Job is created, and is immutable for the life of the Job.
117
+ # The unique ID of this job. This field is set by the Cloud Dataflow service
118
+ # when the Job is created, and is immutable for the life of the job.
119
119
  # Corresponds to the JSON property `id`
120
120
  # @return [String]
121
121
  attr_accessor :id
122
122
 
123
- # The project which owns the job.
123
+ # The ID of the Cloud Platform project that the job belongs to.
124
124
  # Corresponds to the JSON property `projectId`
125
125
  # @return [String]
126
126
  attr_accessor :project_id
127
127
 
128
- # The user-specified Dataflow job name. Only one Job with a given name may exist
129
- # in a project at any given time. If a caller attempts to create a Job with the
130
- # same name as an already-existing Job, the attempt will return the existing Job.
131
- # The name must match the regular expression [a-z]([-a-z0-9]`0,38`[a-z0-9])?
128
+ # The user-specified Cloud Dataflow job name. Only one Job with a given name may
129
+ # exist in a project at any given time. If a caller attempts to create a Job
130
+ # with the same name as an already-existing Job, the attempt returns the
131
+ # existing Job. The name must match the regular expression `[a-z]([-a-z0-9]`0,38`
132
+ # [a-z0-9])?`
132
133
  # Corresponds to the JSON property `name`
133
134
  # @return [String]
134
135
  attr_accessor :name
135
136
 
136
- # The type of dataflow job.
137
+ # The type of Cloud Dataflow job.
137
138
  # Corresponds to the JSON property `type`
138
139
  # @return [String]
139
140
  attr_accessor :type
@@ -148,11 +149,11 @@ module Google
148
149
  # @return [Array<Google::Apis::DataflowV1b3::Step>]
149
150
  attr_accessor :steps
150
151
 
151
- # The current state of the job. Jobs are created in the JOB_STATE_STOPPED state
152
- # unless otherwise specified. A job in the JOB_STATE_RUNNING state may
153
- # asynchronously enter a terminal state. Once a job has reached a terminal state,
154
- # no further state updates may be made. This field may be mutated by the
155
- # Dataflow service; callers cannot mutate it.
152
+ # The current state of the job. Jobs are created in the `JOB_STATE_STOPPED`
153
+ # state unless otherwise specified. A job in the `JOB_STATE_RUNNING` state may
154
+ # asynchronously enter a terminal state. After a job has reached a terminal
155
+ # state, no further state updates may be made. This field may be mutated by the
156
+ # Cloud Dataflow service; callers cannot mutate it.
156
157
  # Corresponds to the JSON property `currentState`
157
158
  # @return [String]
158
159
  attr_accessor :current_state
@@ -162,54 +163,53 @@ module Google
162
163
  # @return [String]
163
164
  attr_accessor :current_state_time
164
165
 
165
- # The job's requested state. UpdateJob may be used to switch between the
166
- # JOB_STATE_STOPPED and JOB_STATE_RUNNING states, by setting requested_state.
167
- # UpdateJob may also be used to directly set a job's requested state to
168
- # JOB_STATE_CANCELLED or JOB_STATE_DONE, irrevocably terminating the job if it
169
- # has not already reached a terminal state.
166
+ # The job's requested state. `UpdateJob` may be used to switch between the `
167
+ # JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state.
168
+ # `UpdateJob` may also be used to directly set a job's requested state to `
169
+ # JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if
170
+ # it has not already reached a terminal state.
170
171
  # Corresponds to the JSON property `requestedState`
171
172
  # @return [String]
172
173
  attr_accessor :requested_state
173
174
 
174
- # Additional information about how a Dataflow job will be executed which isn’t
175
- # contained in the submitted job.
175
+ # Additional information about how a Cloud Dataflow job will be executed that
176
+ # isn't contained in the submitted job.
176
177
  # Corresponds to the JSON property `executionInfo`
177
178
  # @return [Google::Apis::DataflowV1b3::JobExecutionInfo]
178
179
  attr_accessor :execution_info
179
180
 
180
- # Timestamp when job was initially created. Immutable, set by the Dataflow
181
- # service.
181
+ # The timestamp when the job was initially created. Immutable and set by the
182
+ # Cloud Dataflow service.
182
183
  # Corresponds to the JSON property `createTime`
183
184
  # @return [String]
184
185
  attr_accessor :create_time
185
186
 
186
- # If this job is an update of an existing job, this field will be the ID of the
187
- # job it replaced. When sending a CreateJobRequest, you can update a job by
188
- # specifying it here. The job named here will be stopped, and its intermediate
189
- # state transferred to this job.
187
+ # If this job is an update of an existing job, this field is the job ID of the
188
+ # job it replaced. When sending a `CreateJobRequest`, you can update a job by
189
+ # specifying it here. The job named here is stopped, and its intermediate state
190
+ # is transferred to this job.
190
191
  # Corresponds to the JSON property `replaceJobId`
191
192
  # @return [String]
192
193
  attr_accessor :replace_job_id
193
194
 
194
- # Map of transform name prefixes of the job to be replaced to the corresponding
195
- # name prefixes of the new job.
195
+ # The map of transform name prefixes of the job to be replaced to the
196
+ # corresponding name prefixes of the new job.
196
197
  # Corresponds to the JSON property `transformNameMapping`
197
198
  # @return [Hash<String,String>]
198
199
  attr_accessor :transform_name_mapping
199
200
 
200
- # Client's unique identifier of the job, re-used by SDK across retried attempts.
201
- # If this field is set, the service will ensure its uniqueness. That is, the
202
- # request to create a job will fail if the service has knowledge of a previously
203
- # submitted job with the same client's id and job name. The caller may, for
204
- # example, use this field to ensure idempotence of job creation across retried
205
- # attempts to create a job. By default, the field is empty and, in that case,
206
- # the service ignores it.
201
+ # The client's unique identifier of the job, re-used across retried attempts. If
202
+ # this field is set, the service will ensure its uniqueness. The request to
203
+ # create a job will fail if the service has knowledge of a previously submitted
204
+ # job with the same client's ID and job name. The caller may use this field to
205
+ # ensure idempotence of job creation across retried attempts to create a job. By
206
+ # default, the field is empty and, in that case, the service ignores it.
207
207
  # Corresponds to the JSON property `clientRequestId`
208
208
  # @return [String]
209
209
  attr_accessor :client_request_id
210
210
 
211
- # If another job is an update of this job (and thus, this job is in
212
- # JOB_STATE_UPDATED), this field will contain the ID of that job.
211
+ # If another job is an update of this job (and thus, this job is in `
212
+ # JOB_STATE_UPDATED`), this field contains the ID of that job.
213
213
  # Corresponds to the JSON property `replacedByJobId`
214
214
  # @return [String]
215
215
  attr_accessor :replaced_by_job_id
@@ -232,7 +232,7 @@ module Google
232
232
  # @return [Hash<String,String>]
233
233
  attr_accessor :labels
234
234
 
235
- # The location which contains this job.
235
+ # The location that contains this job.
236
236
  # Corresponds to the JSON property `location`
237
237
  # @return [String]
238
238
  attr_accessor :location
@@ -291,8 +291,8 @@ module Google
291
291
  # @return [Array<String>]
292
292
  attr_accessor :experiments
293
293
 
294
- # Worker pools. At least one "harness" worker pool must be specified in order
295
- # for the job to have workers.
294
+ # The worker pools. At least one "harness" worker pool must be specified in
295
+ # order for the job to have workers.
296
296
  # Corresponds to the JSON property `workerPools`
297
297
  # @return [Array<Google::Apis::DataflowV1b3::WorkerPool>]
298
298
  attr_accessor :worker_pools
@@ -315,9 +315,9 @@ module Google
315
315
  # @return [String]
316
316
  attr_accessor :dataset
317
317
 
318
- # The Dataflow SDK pipeline options specified by the user. These options are
319
- # passed through the service and are used to recreate the SDK pipeline options
320
- # on the worker in a language agnostic and platform independent way.
318
+ # The Cloud Dataflow SDK pipeline options specified by the user. These options
319
+ # are passed through the service and are used to recreate the SDK pipeline
320
+ # options on the worker in a language agnostic and platform independent way.
321
321
  # Corresponds to the JSON property `sdkPipelineOptions`
322
322
  # @return [Hash<String,Object>]
323
323
  attr_accessor :sdk_pipeline_options
@@ -351,14 +351,14 @@ module Google
351
351
  end
352
352
  end
353
353
 
354
- # Describes one particular pool of Dataflow workers to be instantiated by the
355
- # Dataflow service in order to perform the computations required by a job. Note
356
- # that a workflow job may use multiple pools, in order to match the various
357
- # computational requirements of the various stages of the job.
354
+ # Describes one particular pool of Cloud Dataflow workers to be instantiated by
355
+ # the Cloud Dataflow service in order to perform the computations required by a
356
+ # job. Note that a workflow job may use multiple pools, in order to match the
357
+ # various computational requirements of the various stages of the job.
358
358
  class WorkerPool
359
359
  include Google::Apis::Core::Hashable
360
360
 
361
- # The kind of the worker pool; currently only 'harness' and 'shuffle' are
361
+ # The kind of the worker pool; currently only `harness` and `shuffle` are
362
362
  # supported.
363
363
  # Corresponds to the JSON property `kind`
364
364
  # @return [String]
@@ -390,15 +390,16 @@ module Google
390
390
  attr_accessor :machine_type
391
391
 
392
392
  # Sets the policy for determining when to turndown worker pool. Allowed values
393
- # are: TEARDOWN_ALWAYS, TEARDOWN_ON_SUCCESS, and TEARDOWN_NEVER. TEARDOWN_ALWAYS
394
- # means workers are always torn down regardless of whether the job succeeds.
395
- # TEARDOWN_ON_SUCCESS means workers are torn down if the job succeeds.
396
- # TEARDOWN_NEVER means the workers are never torn down. If the workers are not
397
- # torn down by the service, they will continue to run and use Google Compute
398
- # Engine VM resources in the user's project until they are explicitly terminated
399
- # by the user. Because of this, Google recommends using the TEARDOWN_ALWAYS
400
- # policy except for small, manually supervised test jobs. If unknown or
401
- # unspecified, the service will attempt to choose a reasonable default.
393
+ # are: `TEARDOWN_ALWAYS`, `TEARDOWN_ON_SUCCESS`, and `TEARDOWN_NEVER`. `
394
+ # TEARDOWN_ALWAYS` means workers are always torn down regardless of whether the
395
+ # job succeeds. `TEARDOWN_ON_SUCCESS` means workers are torn down if the job
396
+ # succeeds. `TEARDOWN_NEVER` means the workers are never torn down. If the
397
+ # workers are not torn down by the service, they will continue to run and use
398
+ # Google Compute Engine VM resources in the user's project until they are
399
+ # explicitly terminated by the user. Because of this, Google recommends using
400
+ # the `TEARDOWN_ALWAYS` policy except for small, manually supervised test jobs.
401
+ # If unknown or unspecified, the service will attempt to choose a reasonable
402
+ # default.
402
403
  # Corresponds to the JSON property `teardownPolicy`
403
404
  # @return [String]
404
405
  attr_accessor :teardown_policy
@@ -469,8 +470,8 @@ module Google
469
470
  # @return [String]
470
471
  attr_accessor :subnetwork
471
472
 
472
- # Docker container image that executes Dataflow worker harness, residing in
473
- # Google Container Registry. Required.
473
+ # Required. Docker container image that executes the Cloud Dataflow worker
474
+ # harness, residing in Google Container Registry.
474
475
  # Corresponds to the JSON property `workerHarnessContainerImage`
475
476
  # @return [String]
476
477
  attr_accessor :worker_harness_container_image
@@ -517,12 +518,12 @@ module Google
517
518
  end
518
519
  end
519
520
 
520
- # Packages that need to be installed in order for a worker to run the steps of
521
- # the Dataflow job which will be assigned to its worker pool. This is the
522
- # mechanism by which the SDK causes code to be loaded onto the workers. For
523
- # example, the Dataflow Java SDK might use this to install jars containing the
524
- # user's code and all of the various dependencies (libraries, data files, etc)
525
- # required in order for that code to run.
521
+ # The packages that must be installed in order for a worker to run the steps of
522
+ # the Cloud Dataflow job that will be assigned to its worker pool. This is the
523
+ # mechanism by which the Cloud Dataflow SDK causes code to be loaded onto the
524
+ # workers. For example, the Cloud Dataflow Java SDK might use this to install
525
+ # jars containing the user's code and all of the various dependencies (libraries,
526
+ # data files, etc.) required in order for that code to run.
526
527
  class Package
527
528
  include Google::Apis::Core::Hashable
528
529
 
@@ -564,8 +565,8 @@ module Google
564
565
  # @return [String]
565
566
  attr_accessor :task_group
566
567
 
567
- # OAuth2 scopes to be requested by the taskrunner in order to access the
568
- # dataflow API.
568
+ # The OAuth2 scopes to be requested by the taskrunner in order to access the
569
+ # Cloud Dataflow API.
569
570
  # Corresponds to the JSON property `oauthScopes`
570
571
  # @return [Array<String>]
571
572
  attr_accessor :oauth_scopes
@@ -580,7 +581,7 @@ module Google
580
581
  # @return [String]
581
582
  attr_accessor :base_url
582
583
 
583
- # API version of endpoint, e.g. "v1b3"
584
+ # The API version of endpoint, e.g. "v1b3"
584
585
  # Corresponds to the JSON property `dataflowApiVersion`
585
586
  # @return [String]
586
587
  attr_accessor :dataflow_api_version
@@ -590,24 +591,24 @@ module Google
590
591
  # @return [Google::Apis::DataflowV1b3::WorkerSettings]
591
592
  attr_accessor :parallel_worker_settings
592
593
 
593
- # Location on the worker for task-specific subdirectories.
594
+ # The location on the worker for task-specific subdirectories.
594
595
  # Corresponds to the JSON property `baseTaskDir`
595
596
  # @return [String]
596
597
  attr_accessor :base_task_dir
597
598
 
598
- # Do we continue taskrunner if an exception is hit?
599
+ # Whether to continue taskrunner if an exception is hit.
599
600
  # Corresponds to the JSON property `continueOnException`
600
601
  # @return [Boolean]
601
602
  attr_accessor :continue_on_exception
602
603
  alias_method :continue_on_exception?, :continue_on_exception
603
604
 
604
- # Send taskrunner log into to Google Compute Engine VM serial console?
605
+ # Whether to send taskrunner log info to Google Compute Engine VM serial console.
605
606
  # Corresponds to the JSON property `logToSerialconsole`
606
607
  # @return [Boolean]
607
608
  attr_accessor :log_to_serialconsole
608
609
  alias_method :log_to_serialconsole?, :log_to_serialconsole
609
610
 
610
- # Also send taskrunner log info to stderr?
611
+ # Whether to also send taskrunner log info to stderr.
611
612
  # Corresponds to the JSON property `alsologtostderr`
612
613
  # @return [Boolean]
613
614
  attr_accessor :alsologtostderr
@@ -620,7 +621,7 @@ module Google
620
621
  # @return [String]
621
622
  attr_accessor :log_upload_location
622
623
 
623
- # Directory on the VM to store logs.
624
+ # The directory on the VM to store logs.
624
625
  # Corresponds to the JSON property `logDir`
625
626
  # @return [String]
626
627
  attr_accessor :log_dir
@@ -632,32 +633,32 @@ module Google
632
633
  # @return [String]
633
634
  attr_accessor :temp_storage_prefix
634
635
 
635
- # Command to launch the worker harness.
636
+ # The command to launch the worker harness.
636
637
  # Corresponds to the JSON property `harnessCommand`
637
638
  # @return [String]
638
639
  attr_accessor :harness_command
639
640
 
640
- # Store the workflow in this file.
641
+ # The file to store the workflow in.
641
642
  # Corresponds to the JSON property `workflowFileName`
642
643
  # @return [String]
643
644
  attr_accessor :workflow_file_name
644
645
 
645
- # Store preprocessing commands in this file.
646
+ # The file to store preprocessing commands in.
646
647
  # Corresponds to the JSON property `commandlinesFileName`
647
648
  # @return [String]
648
649
  attr_accessor :commandlines_file_name
649
650
 
650
- # ID string of VM.
651
+ # The ID string of the VM.
651
652
  # Corresponds to the JSON property `vmId`
652
653
  # @return [String]
653
654
  attr_accessor :vm_id
654
655
 
655
- # Suggested backend language.
656
+ # The suggested backend language.
656
657
  # Corresponds to the JSON property `languageHint`
657
658
  # @return [String]
658
659
  attr_accessor :language_hint
659
660
 
660
- # Streaming worker main class name.
661
+ # The streaming worker main class name.
661
662
  # Corresponds to the JSON property `streamingWorkerMainClass`
662
663
  # @return [String]
663
664
  attr_accessor :streaming_worker_main_class
@@ -703,14 +704,14 @@ module Google
703
704
  # @return [String]
704
705
  attr_accessor :base_url
705
706
 
706
- # Send work progress updates to service.
707
+ # Whether to send work progress updates to the service.
707
708
  # Corresponds to the JSON property `reportingEnabled`
708
709
  # @return [Boolean]
709
710
  attr_accessor :reporting_enabled
710
711
  alias_method :reporting_enabled?, :reporting_enabled
711
712
 
712
- # The Dataflow service path relative to the root URL, for example, "dataflow/
713
- # v1b3/projects".
713
+ # The Cloud Dataflow service path relative to the root URL, for example, "
714
+ # dataflow/v1b3/projects".
714
715
  # Corresponds to the JSON property `servicePath`
715
716
  # @return [String]
716
717
  attr_accessor :service_path
@@ -721,7 +722,7 @@ module Google
721
722
  # @return [String]
722
723
  attr_accessor :shuffle_service_path
723
724
 
724
- # ID of the worker running this pipeline.
725
+ # The ID of the worker running this pipeline.
725
726
  # Corresponds to the JSON property `workerId`
726
727
  # @return [String]
727
728
  attr_accessor :worker_id
@@ -764,13 +765,12 @@ module Google
764
765
  # default. For example, the standard persistent disk type is a resource name
765
766
  # typically ending in "pd-standard". If SSD persistent disks are available, the
766
767
  # resource name typically ends with "pd-ssd". The actual valid values are
767
- # defined the Google Compute Engine API, not by the Dataflow API; consult the
768
- # Google Compute Engine documentation for more information about determining the
769
- # set of available disk types for a particular project and zone. Google Compute
770
- # Engine Disk types are local to a particular project in a particular zone, and
771
- # so the resource name will typically look something like this: compute.
772
- # googleapis.com/projects/
773
- # /zones//diskTypes/pd-standard
768
+ # defined the Google Compute Engine API, not by the Cloud Dataflow API; consult
769
+ # the Google Compute Engine documentation for more information about determining
770
+ # the set of available disk types for a particular project and zone. Google
771
+ # Compute Engine Disk types are local to a particular project in a particular
772
+ # zone, and so the resource name will typically look something like this:
773
+ # compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
774
774
  # Corresponds to the JSON property `diskType`
775
775
  # @return [String]
776
776
  attr_accessor :disk_type
@@ -817,27 +817,27 @@ module Google
817
817
  end
818
818
  end
819
819
 
820
- # Defines a particular step within a Dataflow job. A job consists of multiple
821
- # steps, each of which performs some specific operation as part of the overall
822
- # job. Data is typically passed from one step to another as part of the job.
823
- # Here's an example of a sequence of steps which together implement a Map-Reduce
824
- # job: * Read a collection of data from some source, parsing the collection's
825
- # elements. * Validate the elements. * Apply a user-defined function to map each
826
- # element to some value and extract an element-specific key value. * Group
827
- # elements with the same key into a single element with that key, transforming a
828
- # multiply-keyed collection into a uniquely-keyed collection. * Write the
829
- # elements out to some data sink. (Note that the Dataflow service may be used to
830
- # run many different types of jobs, not just Map-Reduce).
820
+ # Defines a particular step within a Cloud Dataflow job. A job consists of
821
+ # multiple steps, each of which performs some specific operation as part of the
822
+ # overall job. Data is typically passed from one step to another as part of the
823
+ # job. Here's an example of a sequence of steps which together implement a Map-
824
+ # Reduce job: * Read a collection of data from some source, parsing the
825
+ # collection's elements. * Validate the elements. * Apply a user-defined
826
+ # function to map each element to some value and extract an element-specific key
827
+ # value. * Group elements with the same key into a single element with that key,
828
+ # transforming a multiply-keyed collection into a uniquely-keyed collection. *
829
+ # Write the elements out to some data sink. Note that the Cloud Dataflow service
830
+ # may be used to run many different types of jobs, not just Map-Reduce.
831
831
  class Step
832
832
  include Google::Apis::Core::Hashable
833
833
 
834
- # The kind of step in the dataflow Job.
834
+ # The kind of step in the Cloud Dataflow job.
835
835
  # Corresponds to the JSON property `kind`
836
836
  # @return [String]
837
837
  attr_accessor :kind
838
838
 
839
- # Name identifying the step. This must be unique for each step with respect to
840
- # all other steps in the dataflow Job.
839
+ # The name that identifies the step. This must be unique for each step with
840
+ # respect to all other steps in the Cloud Dataflow job.
841
841
  # Corresponds to the JSON property `name`
842
842
  # @return [String]
843
843
  attr_accessor :name
@@ -860,8 +860,8 @@ module Google
860
860
  end
861
861
  end
862
862
 
863
- # Additional information about how a Dataflow job will be executed which isn’t
864
- # contained in the submitted job.
863
+ # Additional information about how a Cloud Dataflow job will be executed that
864
+ # isn't contained in the submitted job.
865
865
  class JobExecutionInfo
866
866
  include Google::Apis::Core::Hashable
867
867
 
@@ -901,8 +901,8 @@ module Google
901
901
  end
902
902
  end
903
903
 
904
- # Response to a request to list Dataflow jobs. This may be a partial response,
905
- # depending on the page size in the ListJobsRequest.
904
+ # Response to a request to list Cloud Dataflow jobs. This may be a partial
905
+ # response, depending on the page size in the ListJobsRequest.
906
906
  class ListJobsResponse
907
907
  include Google::Apis::Core::Hashable
908
908
 
@@ -933,8 +933,7 @@ module Google
933
933
  end
934
934
  end
935
935
 
936
- # FailedLocation indicates which location failed to respond to a request for
937
- # data.
936
+ # Indicates which location failed to respond to a request for data.
938
937
  class FailedLocation
939
938
  include Google::Apis::Core::Hashable
940
939
 
@@ -1166,28 +1165,90 @@ module Google
1166
1165
  end
1167
1166
  end
1168
1167
 
1169
- # Request to create a Dataflow job.
1168
+ # A request to create a Cloud Dataflow job from a template.
1170
1169
  class CreateJobFromTemplateRequest
1171
1170
  include Google::Apis::Core::Hashable
1172
1171
 
1173
- # A path to the serialized JSON representation of the job.
1172
+ # Required. The job name to use for the created job.
1173
+ # Corresponds to the JSON property `jobName`
1174
+ # @return [String]
1175
+ attr_accessor :job_name
1176
+
1177
+ # Required. A Cloud Storage path to the template from which to create the job.
1178
+ # Must be a valid Cloud Storage URL, beginning with `gs://`.
1174
1179
  # Corresponds to the JSON property `gcsPath`
1175
1180
  # @return [String]
1176
1181
  attr_accessor :gcs_path
1177
1182
 
1178
- # Dynamic parameterization of the job's runtime environment.
1183
+ # The runtime parameters to pass to the job.
1179
1184
  # Corresponds to the JSON property `parameters`
1180
1185
  # @return [Hash<String,String>]
1181
1186
  attr_accessor :parameters
1182
1187
 
1188
+ # The environment values to set at runtime.
1189
+ # Corresponds to the JSON property `environment`
1190
+ # @return [Google::Apis::DataflowV1b3::RuntimeEnvironment]
1191
+ attr_accessor :environment
1192
+
1183
1193
  def initialize(**args)
1184
1194
  update!(**args)
1185
1195
  end
1186
1196
 
1187
1197
  # Update properties of this object
1188
1198
  def update!(**args)
1199
+ @job_name = args[:job_name] if args.key?(:job_name)
1189
1200
  @gcs_path = args[:gcs_path] if args.key?(:gcs_path)
1190
1201
  @parameters = args[:parameters] if args.key?(:parameters)
1202
+ @environment = args[:environment] if args.key?(:environment)
1203
+ end
1204
+ end
1205
+
1206
+ # The environment values to set at runtime.
1207
+ class RuntimeEnvironment
1208
+ include Google::Apis::Core::Hashable
1209
+
1210
+ # The maximum number of Google Compute Engine instances to be made available to
1211
+ # your pipeline during execution, from 1 to 1000.
1212
+ # Corresponds to the JSON property `maxWorkers`
1213
+ # @return [Fixnum]
1214
+ attr_accessor :max_workers
1215
+
1216
+ # The Compute Engine [availability zone](https://cloud.google.com/compute/docs/
1217
+ # regions-zones/regions-zones) for launching worker instances to run your
1218
+ # pipeline.
1219
+ # Corresponds to the JSON property `zone`
1220
+ # @return [String]
1221
+ attr_accessor :zone
1222
+
1223
+ # The email address of the service account to run the job as.
1224
+ # Corresponds to the JSON property `serviceAccountEmail`
1225
+ # @return [String]
1226
+ attr_accessor :service_account_email
1227
+
1228
+ # The Cloud Storage path to use for temporary files. Must be a valid Cloud
1229
+ # Storage URL, beginning with `gs://`.
1230
+ # Corresponds to the JSON property `tempLocation`
1231
+ # @return [String]
1232
+ attr_accessor :temp_location
1233
+
1234
+ # Whether to bypass the safety checks for the job's temporary directory. Use
1235
+ # with caution.
1236
+ # Corresponds to the JSON property `bypassTempDirValidation`
1237
+ # @return [Boolean]
1238
+ attr_accessor :bypass_temp_dir_validation
1239
+ alias_method :bypass_temp_dir_validation?, :bypass_temp_dir_validation
1240
+
1241
+ def initialize(**args)
1242
+ update!(**args)
1243
+ end
1244
+
1245
+ # Update properties of this object
1246
+ def update!(**args)
1247
+ @max_workers = args[:max_workers] if args.key?(:max_workers)
1248
+ @zone = args[:zone] if args.key?(:zone)
1249
+ @service_account_email = args[:service_account_email] if args.key?(:service_account_email)
1250
+ @temp_location = args[:temp_location] if args.key?(:temp_location)
1251
+ @bypass_temp_dir_validation = args[:bypass_temp_dir_validation] if args.key?(:bypass_temp_dir_validation)
1191
1252
  end
1192
1253
  end
1193
1254
 
@@ -1474,6 +1535,11 @@ module Google
1474
1535
  # @return [Google::Apis::DataflowV1b3::StringList]
1475
1536
  attr_accessor :string_list
1476
1537
 
1538
+ # A metric value representing a distribution.
1539
+ # Corresponds to the JSON property `distribution`
1540
+ # @return [Google::Apis::DataflowV1b3::DistributionUpdate]
1541
+ attr_accessor :distribution
1542
+
1477
1543
  # Value for internally-defined counters used by the Dataflow service.
1478
1544
  # Corresponds to the JSON property `internal`
1479
1545
  # @return [Object]
@@ -1497,6 +1563,7 @@ module Google
1497
1563
  @integer_list = args[:integer_list] if args.key?(:integer_list)
1498
1564
  @floating_point_list = args[:floating_point_list] if args.key?(:floating_point_list)
1499
1565
  @string_list = args[:string_list] if args.key?(:string_list)
1566
+ @distribution = args[:distribution] if args.key?(:distribution)
1500
1567
  @internal = args[:internal] if args.key?(:internal)
1501
1568
  end
1502
1569
  end
@@ -1790,6 +1857,53 @@ module Google
1790
1857
  end
1791
1858
  end
1792
1859
 
1860
+ # A metric value representing a distribution.
1861
+ class DistributionUpdate
1862
+ include Google::Apis::Core::Hashable
1863
+
1864
+ # A representation of an int64, n, that is immune to precision loss when encoded
1865
+ # in JSON.
1866
+ # Corresponds to the JSON property `min`
1867
+ # @return [Google::Apis::DataflowV1b3::SplitInt64]
1868
+ attr_accessor :min
1869
+
1870
+ # A representation of an int64, n, that is immune to precision loss when encoded
1871
+ # in JSON.
1872
+ # Corresponds to the JSON property `max`
1873
+ # @return [Google::Apis::DataflowV1b3::SplitInt64]
1874
+ attr_accessor :max
1875
+
1876
+ # A representation of an int64, n, that is immune to precision loss when encoded
1877
+ # in JSON.
1878
+ # Corresponds to the JSON property `count`
1879
+ # @return [Google::Apis::DataflowV1b3::SplitInt64]
1880
+ attr_accessor :count
1881
+
1882
+ # A representation of an int64, n, that is immune to precision loss when encoded
1883
+ # in JSON.
1884
+ # Corresponds to the JSON property `sum`
1885
+ # @return [Google::Apis::DataflowV1b3::SplitInt64]
1886
+ attr_accessor :sum
1887
+
1888
+ # Use a double since the sum of squares is likely to overflow int64.
1889
+ # Corresponds to the JSON property `sumOfSquares`
1890
+ # @return [Float]
1891
+ attr_accessor :sum_of_squares
1892
+
1893
+ def initialize(**args)
1894
+ update!(**args)
1895
+ end
1896
+
1897
+ # Update properties of this object
1898
+ def update!(**args)
1899
+ @min = args[:min] if args.key?(:min)
1900
+ @max = args[:max] if args.key?(:max)
1901
+ @count = args[:count] if args.key?(:count)
1902
+ @sum = args[:sum] if args.key?(:sum)
1903
+ @sum_of_squares = args[:sum_of_squares] if args.key?(:sum_of_squares)
1904
+ end
1905
+ end
1906
+
1793
1907
  # A progress measurement of a WorkItem by a worker.
1794
1908
  class ApproximateReportedProgress
1795
1909
  include Google::Apis::Core::Hashable
@@ -3426,6 +3540,12 @@ module Google
3426
3540
  # @return [String]
3427
3541
  attr_accessor :tracking_subscription
3428
3542
 
3543
+ # If true, then the client has requested to get pubsub attributes.
3544
+ # Corresponds to the JSON property `withAttributes`
3545
+ # @return [Boolean]
3546
+ attr_accessor :with_attributes
3547
+ alias_method :with_attributes?, :with_attributes
3548
+
3429
3549
  def initialize(**args)
3430
3550
  update!(**args)
3431
3551
  end
@@ -3438,6 +3558,7 @@ module Google
3438
3558
  @id_label = args[:id_label] if args.key?(:id_label)
3439
3559
  @drop_late_data = args[:drop_late_data] if args.key?(:drop_late_data)
3440
3560
  @tracking_subscription = args[:tracking_subscription] if args.key?(:tracking_subscription)
3561
+ @with_attributes = args[:with_attributes] if args.key?(:with_attributes)
3441
3562
  end
3442
3563
  end
3443
3564
 
@@ -3887,6 +4008,14 @@ module Google
3887
4008
  # @return [Google::Apis::DataflowV1b3::WorkerMessageCode]
3888
4009
  attr_accessor :worker_message_code
3889
4010
 
4011
+ # Worker metrics exported from workers. This contains resource utilization
4012
+ # metrics accumulated from a variety of sources. For more information, see go/df-
4013
+ # resource-signals. Note that this proto closely follows the structure of its
4014
+ # DFE siblings in its contents.
4015
+ # Corresponds to the JSON property `workerMetrics`
4016
+ # @return [Google::Apis::DataflowV1b3::ResourceUtilizationReport]
4017
+ attr_accessor :worker_metrics
4018
+
3890
4019
  def initialize(**args)
3891
4020
  update!(**args)
3892
4021
  end
@@ -3897,6 +4026,7 @@ module Google
3897
4026
  @time = args[:time] if args.key?(:time)
3898
4027
  @worker_health_report = args[:worker_health_report] if args.key?(:worker_health_report)
3899
4028
  @worker_message_code = args[:worker_message_code] if args.key?(:worker_message_code)
4029
+ @worker_metrics = args[:worker_metrics] if args.key?(:worker_metrics)
3900
4030
  end
3901
4031
  end
3902
4032
 
@@ -3988,6 +4118,29 @@ module Google
3988
4118
  end
3989
4119
  end
3990
4120
 
4121
+ # Worker metrics exported from workers. This contains resource utilization
4122
+ # metrics accumulated from a variety of sources. For more information, see go/df-
4123
+ # resource-signals. Note that this proto closely follows the structure of its
4124
+ # DFE siblings in its contents.
4125
+ class ResourceUtilizationReport
4126
+ include Google::Apis::Core::Hashable
4127
+
4128
+ # Each Struct must parallel DFE worker metrics protos (eg., cpu_time metric will
4129
+ # have nested values “timestamp_ms, total_ms, rate”).
4130
+ # Corresponds to the JSON property `metrics`
4131
+ # @return [Array<Hash<String,Object>>]
4132
+ attr_accessor :metrics
4133
+
4134
+ def initialize(**args)
4135
+ update!(**args)
4136
+ end
4137
+
4138
+ # Update properties of this object
4139
+ def update!(**args)
4140
+ @metrics = args[:metrics] if args.key?(:metrics)
4141
+ end
4142
+ end
4143
+
3991
4144
  # The response to the worker messages.
3992
4145
  class SendWorkerMessagesResponse
3993
4146
  include Google::Apis::Core::Hashable
@@ -4017,6 +4170,11 @@ module Google
4017
4170
  # @return [Google::Apis::DataflowV1b3::WorkerHealthReportResponse]
4018
4171
  attr_accessor :worker_health_report_response
4019
4172
 
4173
+ # Service-side response to WorkerMessage reporting resource utilization.
4174
+ # Corresponds to the JSON property `workerMetricsResponse`
4175
+ # @return [Google::Apis::DataflowV1b3::ResourceUtilizationReportResponse]
4176
+ attr_accessor :worker_metrics_response
4177
+
4020
4178
  def initialize(**args)
4021
4179
  update!(**args)
4022
4180
  end
@@ -4024,6 +4182,7 @@ module Google
4024
4182
  # Update properties of this object
4025
4183
  def update!(**args)
4026
4184
  @worker_health_report_response = args[:worker_health_report_response] if args.key?(:worker_health_report_response)
4185
+ @worker_metrics_response = args[:worker_metrics_response] if args.key?(:worker_metrics_response)
4027
4186
  end
4028
4187
  end
4029
4188
 
@@ -4048,6 +4207,19 @@ module Google
4048
4207
  @report_interval = args[:report_interval] if args.key?(:report_interval)
4049
4208
  end
4050
4209
  end
4210
+
4211
+ # Service-side response to WorkerMessage reporting resource utilization.
4212
+ class ResourceUtilizationReportResponse
4213
+ include Google::Apis::Core::Hashable
4214
+
4215
+ def initialize(**args)
4216
+ update!(**args)
4217
+ end
4218
+
4219
+ # Update properties of this object
4220
+ def update!(**args)
4221
+ end
4222
+ end
4051
4223
  end
4052
4224
  end
4053
4225
  end