google-api-client 0.28.4 → 0.29.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (750) hide show
  1. checksums.yaml +4 -4
  2. data/.kokoro/build.bat +9 -6
  3. data/.kokoro/build.sh +2 -34
  4. data/.kokoro/continuous/common.cfg +6 -1
  5. data/.kokoro/continuous/linux.cfg +1 -1
  6. data/.kokoro/continuous/windows.cfg +17 -1
  7. data/.kokoro/osx.sh +2 -33
  8. data/.kokoro/presubmit/common.cfg +6 -1
  9. data/.kokoro/presubmit/linux.cfg +1 -1
  10. data/.kokoro/presubmit/windows.cfg +17 -1
  11. data/.kokoro/trampoline.bat +10 -0
  12. data/.kokoro/trampoline.sh +3 -23
  13. data/CHANGELOG.md +460 -0
  14. data/README.md +1 -1
  15. data/Rakefile +31 -0
  16. data/bin/generate-api +4 -2
  17. data/generated/google/apis/abusiveexperiencereport_v1/service.rb +2 -2
  18. data/generated/google/apis/acceleratedmobilepageurl_v1/service.rb +1 -1
  19. data/generated/google/apis/accessapproval_v1beta1/classes.rb +333 -0
  20. data/generated/google/apis/accessapproval_v1beta1/representations.rb +174 -0
  21. data/generated/google/apis/accessapproval_v1beta1/service.rb +728 -0
  22. data/generated/google/apis/accessapproval_v1beta1.rb +34 -0
  23. data/generated/google/apis/accesscontextmanager_v1/classes.rb +755 -0
  24. data/generated/google/apis/accesscontextmanager_v1/representations.rb +282 -0
  25. data/generated/google/apis/accesscontextmanager_v1/service.rb +788 -0
  26. data/generated/google/apis/accesscontextmanager_v1.rb +34 -0
  27. data/generated/google/apis/accesscontextmanager_v1beta/classes.rb +47 -31
  28. data/generated/google/apis/accesscontextmanager_v1beta/representations.rb +4 -0
  29. data/generated/google/apis/accesscontextmanager_v1beta/service.rb +16 -16
  30. data/generated/google/apis/accesscontextmanager_v1beta.rb +1 -1
  31. data/generated/google/apis/adexchangebuyer2_v2beta1/classes.rb +95 -200
  32. data/generated/google/apis/adexchangebuyer2_v2beta1/representations.rb +0 -32
  33. data/generated/google/apis/adexchangebuyer2_v2beta1/service.rb +64 -104
  34. data/generated/google/apis/adexchangebuyer2_v2beta1.rb +1 -1
  35. data/generated/google/apis/adexchangebuyer_v1_2/service.rb +7 -7
  36. data/generated/google/apis/adexchangebuyer_v1_3/service.rb +21 -21
  37. data/generated/google/apis/adexchangebuyer_v1_4/service.rb +38 -38
  38. data/generated/google/apis/adexperiencereport_v1/service.rb +2 -2
  39. data/generated/google/apis/admin_datatransfer_v1/service.rb +5 -5
  40. data/generated/google/apis/admin_directory_v1/classes.rb +5 -50
  41. data/generated/google/apis/admin_directory_v1/representations.rb +0 -2
  42. data/generated/google/apis/admin_directory_v1/service.rb +113 -113
  43. data/generated/google/apis/admin_directory_v1.rb +1 -1
  44. data/generated/google/apis/admin_reports_v1/service.rb +6 -6
  45. data/generated/google/apis/admin_reports_v1.rb +1 -1
  46. data/generated/google/apis/adsense_v1_4/service.rb +39 -39
  47. data/generated/google/apis/adsensehost_v4_1/service.rb +26 -26
  48. data/generated/google/apis/alertcenter_v1beta1/classes.rb +101 -2
  49. data/generated/google/apis/alertcenter_v1beta1/representations.rb +25 -0
  50. data/generated/google/apis/alertcenter_v1beta1/service.rb +17 -16
  51. data/generated/google/apis/alertcenter_v1beta1.rb +1 -1
  52. data/generated/google/apis/analytics_v2_4/service.rb +6 -6
  53. data/generated/google/apis/analytics_v3/service.rb +88 -88
  54. data/generated/google/apis/analyticsreporting_v4/classes.rb +638 -0
  55. data/generated/google/apis/analyticsreporting_v4/representations.rb +248 -0
  56. data/generated/google/apis/analyticsreporting_v4/service.rb +31 -1
  57. data/generated/google/apis/analyticsreporting_v4.rb +1 -1
  58. data/generated/google/apis/androiddeviceprovisioning_v1/classes.rb +51 -11
  59. data/generated/google/apis/androiddeviceprovisioning_v1/representations.rb +6 -0
  60. data/generated/google/apis/androiddeviceprovisioning_v1/service.rb +26 -26
  61. data/generated/google/apis/androiddeviceprovisioning_v1.rb +1 -1
  62. data/generated/google/apis/androidenterprise_v1/classes.rb +26 -30
  63. data/generated/google/apis/androidenterprise_v1/representations.rb +2 -14
  64. data/generated/google/apis/androidenterprise_v1/service.rb +85 -121
  65. data/generated/google/apis/androidenterprise_v1.rb +1 -1
  66. data/generated/google/apis/androidmanagement_v1/classes.rb +358 -4
  67. data/generated/google/apis/androidmanagement_v1/representations.rb +163 -0
  68. data/generated/google/apis/androidmanagement_v1/service.rb +191 -21
  69. data/generated/google/apis/androidmanagement_v1.rb +1 -1
  70. data/generated/google/apis/androidpublisher_v1/service.rb +2 -2
  71. data/generated/google/apis/androidpublisher_v1_1/service.rb +3 -3
  72. data/generated/google/apis/androidpublisher_v2/service.rb +64 -70
  73. data/generated/google/apis/androidpublisher_v2.rb +1 -1
  74. data/generated/google/apis/androidpublisher_v3/classes.rb +113 -0
  75. data/generated/google/apis/androidpublisher_v3/representations.rb +58 -0
  76. data/generated/google/apis/androidpublisher_v3/service.rb +234 -64
  77. data/generated/google/apis/androidpublisher_v3.rb +1 -1
  78. data/generated/google/apis/appengine_v1/classes.rb +45 -100
  79. data/generated/google/apis/appengine_v1/representations.rb +17 -35
  80. data/generated/google/apis/appengine_v1/service.rb +45 -39
  81. data/generated/google/apis/appengine_v1.rb +1 -1
  82. data/generated/google/apis/appengine_v1alpha/classes.rb +2 -99
  83. data/generated/google/apis/appengine_v1alpha/representations.rb +0 -35
  84. data/generated/google/apis/appengine_v1alpha/service.rb +15 -15
  85. data/generated/google/apis/appengine_v1alpha.rb +1 -1
  86. data/generated/google/apis/appengine_v1beta/classes.rb +7 -102
  87. data/generated/google/apis/appengine_v1beta/representations.rb +0 -35
  88. data/generated/google/apis/appengine_v1beta/service.rb +45 -39
  89. data/generated/google/apis/appengine_v1beta.rb +1 -1
  90. data/generated/google/apis/appengine_v1beta4/service.rb +20 -20
  91. data/generated/google/apis/appengine_v1beta5/service.rb +20 -20
  92. data/generated/google/apis/appsactivity_v1/service.rb +5 -4
  93. data/generated/google/apis/appsactivity_v1.rb +1 -1
  94. data/generated/google/apis/appsmarket_v2/service.rb +3 -3
  95. data/generated/google/apis/appstate_v1/service.rb +5 -5
  96. data/generated/google/apis/bigquery_v2/classes.rb +1121 -114
  97. data/generated/google/apis/bigquery_v2/representations.rb +414 -26
  98. data/generated/google/apis/bigquery_v2/service.rb +184 -22
  99. data/generated/google/apis/bigquery_v2.rb +1 -1
  100. data/generated/google/apis/bigquerydatatransfer_v1/classes.rb +88 -10
  101. data/generated/google/apis/bigquerydatatransfer_v1/representations.rb +43 -0
  102. data/generated/google/apis/bigquerydatatransfer_v1/service.rb +142 -34
  103. data/generated/google/apis/bigquerydatatransfer_v1.rb +3 -3
  104. data/generated/google/apis/bigtableadmin_v1/service.rb +3 -3
  105. data/generated/google/apis/bigtableadmin_v1.rb +2 -2
  106. data/generated/google/apis/bigtableadmin_v2/classes.rb +14 -14
  107. data/generated/google/apis/bigtableadmin_v2/service.rb +142 -33
  108. data/generated/google/apis/bigtableadmin_v2.rb +2 -2
  109. data/generated/google/apis/binaryauthorization_v1beta1/classes.rb +66 -6
  110. data/generated/google/apis/binaryauthorization_v1beta1/representations.rb +17 -0
  111. data/generated/google/apis/binaryauthorization_v1beta1/service.rb +17 -13
  112. data/generated/google/apis/binaryauthorization_v1beta1.rb +1 -1
  113. data/generated/google/apis/blogger_v2/service.rb +9 -9
  114. data/generated/google/apis/blogger_v3/service.rb +33 -33
  115. data/generated/google/apis/books_v1/service.rb +51 -51
  116. data/generated/google/apis/calendar_v3/classes.rb +1 -1
  117. data/generated/google/apis/calendar_v3/service.rb +47 -47
  118. data/generated/google/apis/calendar_v3.rb +1 -1
  119. data/generated/google/apis/chat_v1/service.rb +8 -8
  120. data/generated/google/apis/civicinfo_v2/service.rb +5 -5
  121. data/generated/google/apis/classroom_v1/classes.rb +77 -0
  122. data/generated/google/apis/classroom_v1/representations.rb +32 -0
  123. data/generated/google/apis/classroom_v1/service.rb +276 -51
  124. data/generated/google/apis/classroom_v1.rb +7 -1
  125. data/generated/google/apis/cloudasset_v1/classes.rb +818 -0
  126. data/generated/google/apis/cloudasset_v1/representations.rb +264 -0
  127. data/generated/google/apis/cloudasset_v1/service.rb +191 -0
  128. data/generated/google/apis/cloudasset_v1.rb +34 -0
  129. data/generated/google/apis/cloudasset_v1beta1/classes.rb +33 -18
  130. data/generated/google/apis/cloudasset_v1beta1/representations.rb +1 -0
  131. data/generated/google/apis/cloudasset_v1beta1/service.rb +13 -13
  132. data/generated/google/apis/cloudasset_v1beta1.rb +2 -2
  133. data/generated/google/apis/cloudbilling_v1/classes.rb +1 -1
  134. data/generated/google/apis/cloudbilling_v1/service.rb +14 -14
  135. data/generated/google/apis/cloudbilling_v1.rb +1 -1
  136. data/generated/google/apis/cloudbuild_v1/classes.rb +162 -11
  137. data/generated/google/apis/cloudbuild_v1/representations.rb +67 -0
  138. data/generated/google/apis/cloudbuild_v1/service.rb +21 -15
  139. data/generated/google/apis/cloudbuild_v1.rb +1 -1
  140. data/generated/google/apis/cloudbuild_v1alpha1/classes.rb +7 -1
  141. data/generated/google/apis/cloudbuild_v1alpha1/representations.rb +2 -0
  142. data/generated/google/apis/cloudbuild_v1alpha1/service.rb +6 -6
  143. data/generated/google/apis/cloudbuild_v1alpha1.rb +1 -1
  144. data/generated/google/apis/clouddebugger_v2/service.rb +8 -8
  145. data/generated/google/apis/clouderrorreporting_v1beta1/classes.rb +19 -16
  146. data/generated/google/apis/clouderrorreporting_v1beta1/service.rb +12 -11
  147. data/generated/google/apis/clouderrorreporting_v1beta1.rb +1 -1
  148. data/generated/google/apis/cloudfunctions_v1/classes.rb +21 -17
  149. data/generated/google/apis/cloudfunctions_v1/service.rb +22 -16
  150. data/generated/google/apis/cloudfunctions_v1.rb +1 -1
  151. data/generated/google/apis/cloudfunctions_v1beta2/classes.rb +20 -16
  152. data/generated/google/apis/cloudfunctions_v1beta2/service.rb +17 -11
  153. data/generated/google/apis/cloudfunctions_v1beta2.rb +1 -1
  154. data/generated/google/apis/cloudidentity_v1/classes.rb +14 -14
  155. data/generated/google/apis/cloudidentity_v1/service.rb +18 -27
  156. data/generated/google/apis/cloudidentity_v1.rb +7 -1
  157. data/generated/google/apis/cloudidentity_v1beta1/classes.rb +11 -11
  158. data/generated/google/apis/cloudidentity_v1beta1/service.rb +15 -21
  159. data/generated/google/apis/cloudidentity_v1beta1.rb +7 -1
  160. data/generated/google/apis/cloudiot_v1/classes.rb +11 -11
  161. data/generated/google/apis/cloudiot_v1/service.rb +23 -330
  162. data/generated/google/apis/cloudiot_v1.rb +1 -1
  163. data/generated/google/apis/cloudkms_v1/classes.rb +7 -3
  164. data/generated/google/apis/cloudkms_v1/service.rb +30 -30
  165. data/generated/google/apis/cloudkms_v1.rb +1 -1
  166. data/generated/google/apis/cloudprivatecatalog_v1beta1/classes.rb +358 -0
  167. data/generated/google/apis/cloudprivatecatalog_v1beta1/representations.rb +123 -0
  168. data/generated/google/apis/cloudprivatecatalog_v1beta1/service.rb +486 -0
  169. data/generated/google/apis/cloudprivatecatalog_v1beta1.rb +35 -0
  170. data/generated/google/apis/cloudprivatecatalogproducer_v1beta1/classes.rb +1212 -0
  171. data/generated/google/apis/cloudprivatecatalogproducer_v1beta1/representations.rb +399 -0
  172. data/generated/google/apis/cloudprivatecatalogproducer_v1beta1/service.rb +1073 -0
  173. data/generated/google/apis/cloudprivatecatalogproducer_v1beta1.rb +35 -0
  174. data/generated/google/apis/cloudprofiler_v2/service.rb +3 -3
  175. data/generated/google/apis/cloudresourcemanager_v1/classes.rb +24 -22
  176. data/generated/google/apis/cloudresourcemanager_v1/service.rb +68 -59
  177. data/generated/google/apis/cloudresourcemanager_v1.rb +1 -1
  178. data/generated/google/apis/cloudresourcemanager_v1beta1/classes.rb +3 -3
  179. data/generated/google/apis/cloudresourcemanager_v1beta1/service.rb +53 -42
  180. data/generated/google/apis/cloudresourcemanager_v1beta1.rb +1 -1
  181. data/generated/google/apis/cloudresourcemanager_v2/classes.rb +15 -16
  182. data/generated/google/apis/cloudresourcemanager_v2/service.rb +13 -13
  183. data/generated/google/apis/cloudresourcemanager_v2.rb +1 -1
  184. data/generated/google/apis/cloudresourcemanager_v2beta1/classes.rb +15 -16
  185. data/generated/google/apis/cloudresourcemanager_v2beta1/service.rb +13 -13
  186. data/generated/google/apis/cloudresourcemanager_v2beta1.rb +1 -1
  187. data/generated/google/apis/cloudscheduler_v1/classes.rb +994 -0
  188. data/generated/google/apis/cloudscheduler_v1/representations.rb +297 -0
  189. data/generated/google/apis/cloudscheduler_v1/service.rb +448 -0
  190. data/generated/google/apis/cloudscheduler_v1.rb +34 -0
  191. data/generated/google/apis/cloudscheduler_v1beta1/classes.rb +160 -44
  192. data/generated/google/apis/cloudscheduler_v1beta1/representations.rb +33 -0
  193. data/generated/google/apis/cloudscheduler_v1beta1/service.rb +15 -12
  194. data/generated/google/apis/cloudscheduler_v1beta1.rb +1 -1
  195. data/generated/google/apis/cloudsearch_v1/classes.rb +245 -59
  196. data/generated/google/apis/cloudsearch_v1/representations.rb +91 -0
  197. data/generated/google/apis/cloudsearch_v1/service.rb +86 -80
  198. data/generated/google/apis/cloudsearch_v1.rb +1 -1
  199. data/generated/google/apis/cloudshell_v1/classes.rb +11 -11
  200. data/generated/google/apis/cloudshell_v1/service.rb +4 -4
  201. data/generated/google/apis/cloudshell_v1.rb +1 -1
  202. data/generated/google/apis/cloudshell_v1alpha1/classes.rb +24 -11
  203. data/generated/google/apis/cloudshell_v1alpha1/representations.rb +2 -0
  204. data/generated/google/apis/cloudshell_v1alpha1/service.rb +11 -10
  205. data/generated/google/apis/cloudshell_v1alpha1.rb +1 -1
  206. data/generated/google/apis/cloudtasks_v2/classes.rb +1436 -0
  207. data/generated/google/apis/cloudtasks_v2/representations.rb +408 -0
  208. data/generated/google/apis/cloudtasks_v2/service.rb +856 -0
  209. data/generated/google/apis/{partners_v2.rb → cloudtasks_v2.rb} +11 -9
  210. data/generated/google/apis/cloudtasks_v2beta2/classes.rb +141 -102
  211. data/generated/google/apis/cloudtasks_v2beta2/service.rb +44 -43
  212. data/generated/google/apis/cloudtasks_v2beta2.rb +1 -1
  213. data/generated/google/apis/cloudtasks_v2beta3/classes.rb +388 -108
  214. data/generated/google/apis/cloudtasks_v2beta3/representations.rb +65 -0
  215. data/generated/google/apis/cloudtasks_v2beta3/service.rb +40 -39
  216. data/generated/google/apis/cloudtasks_v2beta3.rb +1 -1
  217. data/generated/google/apis/cloudtrace_v1/service.rb +3 -3
  218. data/generated/google/apis/cloudtrace_v2/classes.rb +10 -10
  219. data/generated/google/apis/cloudtrace_v2/service.rb +2 -2
  220. data/generated/google/apis/cloudtrace_v2.rb +1 -1
  221. data/generated/google/apis/commentanalyzer_v1alpha1/classes.rb +484 -0
  222. data/generated/google/apis/commentanalyzer_v1alpha1/representations.rb +210 -0
  223. data/generated/google/apis/commentanalyzer_v1alpha1/service.rb +124 -0
  224. data/generated/google/apis/commentanalyzer_v1alpha1.rb +39 -0
  225. data/generated/google/apis/composer_v1/classes.rb +21 -15
  226. data/generated/google/apis/composer_v1/service.rb +9 -9
  227. data/generated/google/apis/composer_v1.rb +1 -1
  228. data/generated/google/apis/composer_v1beta1/classes.rb +175 -36
  229. data/generated/google/apis/composer_v1beta1/representations.rb +50 -0
  230. data/generated/google/apis/composer_v1beta1/service.rb +9 -9
  231. data/generated/google/apis/composer_v1beta1.rb +1 -1
  232. data/generated/google/apis/compute_alpha/classes.rb +10112 -7289
  233. data/generated/google/apis/compute_alpha/representations.rb +1337 -219
  234. data/generated/google/apis/compute_alpha/service.rb +4259 -2728
  235. data/generated/google/apis/compute_alpha.rb +1 -1
  236. data/generated/google/apis/compute_beta/classes.rb +4254 -2781
  237. data/generated/google/apis/compute_beta/representations.rb +853 -283
  238. data/generated/google/apis/compute_beta/service.rb +7077 -5955
  239. data/generated/google/apis/compute_beta.rb +1 -1
  240. data/generated/google/apis/compute_v1/classes.rb +1259 -93
  241. data/generated/google/apis/compute_v1/representations.rb +450 -1
  242. data/generated/google/apis/compute_v1/service.rb +1085 -400
  243. data/generated/google/apis/compute_v1.rb +1 -1
  244. data/generated/google/apis/container_v1/classes.rb +201 -22
  245. data/generated/google/apis/container_v1/representations.rb +69 -0
  246. data/generated/google/apis/container_v1/service.rb +151 -102
  247. data/generated/google/apis/container_v1.rb +1 -1
  248. data/generated/google/apis/container_v1beta1/classes.rb +215 -25
  249. data/generated/google/apis/container_v1beta1/representations.rb +86 -0
  250. data/generated/google/apis/container_v1beta1/service.rb +106 -106
  251. data/generated/google/apis/container_v1beta1.rb +1 -1
  252. data/generated/google/apis/containeranalysis_v1alpha1/classes.rb +26 -18
  253. data/generated/google/apis/containeranalysis_v1alpha1/representations.rb +1 -0
  254. data/generated/google/apis/containeranalysis_v1alpha1/service.rb +33 -33
  255. data/generated/google/apis/containeranalysis_v1alpha1.rb +1 -1
  256. data/generated/google/apis/containeranalysis_v1beta1/classes.rb +226 -12
  257. data/generated/google/apis/containeranalysis_v1beta1/representations.rb +58 -0
  258. data/generated/google/apis/containeranalysis_v1beta1/service.rb +24 -24
  259. data/generated/google/apis/containeranalysis_v1beta1.rb +1 -1
  260. data/generated/google/apis/content_v2/classes.rb +218 -101
  261. data/generated/google/apis/content_v2/representations.rb +49 -0
  262. data/generated/google/apis/content_v2/service.rb +189 -152
  263. data/generated/google/apis/content_v2.rb +1 -1
  264. data/generated/google/apis/content_v2_1/classes.rb +387 -216
  265. data/generated/google/apis/content_v2_1/representations.rb +131 -56
  266. data/generated/google/apis/content_v2_1/service.rb +190 -107
  267. data/generated/google/apis/content_v2_1.rb +1 -1
  268. data/generated/google/apis/customsearch_v1/service.rb +2 -2
  269. data/generated/google/apis/dataflow_v1b3/classes.rb +148 -31
  270. data/generated/google/apis/dataflow_v1b3/representations.rb +45 -0
  271. data/generated/google/apis/dataflow_v1b3/service.rb +415 -56
  272. data/generated/google/apis/dataflow_v1b3.rb +1 -1
  273. data/generated/google/apis/datafusion_v1beta1/classes.rb +1304 -0
  274. data/generated/google/apis/datafusion_v1beta1/representations.rb +469 -0
  275. data/generated/google/apis/datafusion_v1beta1/service.rb +657 -0
  276. data/generated/google/apis/datafusion_v1beta1.rb +43 -0
  277. data/generated/google/apis/dataproc_v1/classes.rb +27 -22
  278. data/generated/google/apis/dataproc_v1/representations.rb +1 -0
  279. data/generated/google/apis/dataproc_v1/service.rb +261 -45
  280. data/generated/google/apis/dataproc_v1.rb +1 -1
  281. data/generated/google/apis/dataproc_v1beta2/classes.rb +534 -50
  282. data/generated/google/apis/dataproc_v1beta2/representations.rb +185 -7
  283. data/generated/google/apis/dataproc_v1beta2/service.rb +617 -51
  284. data/generated/google/apis/dataproc_v1beta2.rb +1 -1
  285. data/generated/google/apis/datastore_v1/classes.rb +20 -16
  286. data/generated/google/apis/datastore_v1/service.rb +15 -15
  287. data/generated/google/apis/datastore_v1.rb +1 -1
  288. data/generated/google/apis/datastore_v1beta1/classes.rb +10 -10
  289. data/generated/google/apis/datastore_v1beta1/service.rb +2 -2
  290. data/generated/google/apis/datastore_v1beta1.rb +1 -1
  291. data/generated/google/apis/datastore_v1beta3/classes.rb +10 -6
  292. data/generated/google/apis/datastore_v1beta3/service.rb +7 -7
  293. data/generated/google/apis/datastore_v1beta3.rb +1 -1
  294. data/generated/google/apis/deploymentmanager_alpha/service.rb +37 -37
  295. data/generated/google/apis/deploymentmanager_v2/service.rb +18 -18
  296. data/generated/google/apis/deploymentmanager_v2beta/service.rb +32 -32
  297. data/generated/google/apis/dfareporting_v3_1/service.rb +206 -206
  298. data/generated/google/apis/dfareporting_v3_2/service.rb +206 -206
  299. data/generated/google/apis/dfareporting_v3_3/classes.rb +3 -3
  300. data/generated/google/apis/dfareporting_v3_3/service.rb +204 -204
  301. data/generated/google/apis/dfareporting_v3_3.rb +1 -1
  302. data/generated/google/apis/dialogflow_v2/classes.rb +367 -82
  303. data/generated/google/apis/dialogflow_v2/representations.rb +99 -0
  304. data/generated/google/apis/dialogflow_v2/service.rb +76 -60
  305. data/generated/google/apis/dialogflow_v2.rb +1 -1
  306. data/generated/google/apis/dialogflow_v2beta1/classes.rb +199 -88
  307. data/generated/google/apis/dialogflow_v2beta1/representations.rb +31 -0
  308. data/generated/google/apis/dialogflow_v2beta1/service.rb +154 -94
  309. data/generated/google/apis/dialogflow_v2beta1.rb +1 -1
  310. data/generated/google/apis/digitalassetlinks_v1/service.rb +7 -6
  311. data/generated/google/apis/digitalassetlinks_v1.rb +1 -1
  312. data/generated/google/apis/discovery_v1/service.rb +2 -2
  313. data/generated/google/apis/dlp_v2/classes.rb +116 -45
  314. data/generated/google/apis/dlp_v2/representations.rb +32 -0
  315. data/generated/google/apis/dlp_v2/service.rb +85 -45
  316. data/generated/google/apis/dlp_v2.rb +1 -1
  317. data/generated/google/apis/dns_v1/classes.rb +83 -1
  318. data/generated/google/apis/dns_v1/representations.rb +34 -0
  319. data/generated/google/apis/dns_v1/service.rb +15 -15
  320. data/generated/google/apis/dns_v1.rb +1 -1
  321. data/generated/google/apis/dns_v1beta2/classes.rb +81 -1
  322. data/generated/google/apis/dns_v1beta2/representations.rb +33 -0
  323. data/generated/google/apis/dns_v1beta2/service.rb +21 -21
  324. data/generated/google/apis/dns_v1beta2.rb +1 -1
  325. data/generated/google/apis/dns_v2beta1/classes.rb +83 -1
  326. data/generated/google/apis/dns_v2beta1/representations.rb +34 -0
  327. data/generated/google/apis/dns_v2beta1/service.rb +16 -16
  328. data/generated/google/apis/dns_v2beta1.rb +1 -1
  329. data/generated/google/apis/docs_v1/classes.rb +265 -47
  330. data/generated/google/apis/docs_v1/representations.rb +96 -0
  331. data/generated/google/apis/docs_v1/service.rb +3 -3
  332. data/generated/google/apis/docs_v1.rb +1 -1
  333. data/generated/google/apis/doubleclickbidmanager_v1/classes.rb +6 -4
  334. data/generated/google/apis/doubleclickbidmanager_v1/service.rb +9 -9
  335. data/generated/google/apis/doubleclickbidmanager_v1.rb +1 -1
  336. data/generated/google/apis/doubleclicksearch_v2/service.rb +10 -10
  337. data/generated/google/apis/drive_v2/classes.rb +601 -80
  338. data/generated/google/apis/drive_v2/representations.rb +152 -0
  339. data/generated/google/apis/drive_v2/service.rb +574 -164
  340. data/generated/google/apis/drive_v2.rb +1 -1
  341. data/generated/google/apis/drive_v3/classes.rb +591 -75
  342. data/generated/google/apis/drive_v3/representations.rb +151 -0
  343. data/generated/google/apis/drive_v3/service.rb +483 -116
  344. data/generated/google/apis/drive_v3.rb +1 -1
  345. data/generated/google/apis/driveactivity_v2/classes.rb +149 -17
  346. data/generated/google/apis/driveactivity_v2/representations.rb +69 -0
  347. data/generated/google/apis/driveactivity_v2/service.rb +1 -1
  348. data/generated/google/apis/driveactivity_v2.rb +1 -1
  349. data/generated/google/apis/factchecktools_v1alpha1/classes.rb +459 -0
  350. data/generated/google/apis/factchecktools_v1alpha1/representations.rb +207 -0
  351. data/generated/google/apis/factchecktools_v1alpha1/service.rb +300 -0
  352. data/generated/google/apis/factchecktools_v1alpha1.rb +34 -0
  353. data/generated/google/apis/fcm_v1/classes.rb +424 -0
  354. data/generated/google/apis/fcm_v1/representations.rb +167 -0
  355. data/generated/google/apis/fcm_v1/service.rb +97 -0
  356. data/generated/google/apis/fcm_v1.rb +35 -0
  357. data/generated/google/apis/file_v1/classes.rb +646 -11
  358. data/generated/google/apis/file_v1/representations.rb +207 -0
  359. data/generated/google/apis/file_v1/service.rb +196 -6
  360. data/generated/google/apis/file_v1.rb +1 -1
  361. data/generated/google/apis/file_v1beta1/classes.rb +461 -19
  362. data/generated/google/apis/file_v1beta1/representations.rb +137 -0
  363. data/generated/google/apis/file_v1beta1/service.rb +11 -11
  364. data/generated/google/apis/file_v1beta1.rb +1 -1
  365. data/generated/google/apis/firebasedynamiclinks_v1/classes.rb +41 -14
  366. data/generated/google/apis/firebasedynamiclinks_v1/representations.rb +4 -0
  367. data/generated/google/apis/firebasedynamiclinks_v1/service.rb +5 -5
  368. data/generated/google/apis/firebasedynamiclinks_v1.rb +1 -1
  369. data/generated/google/apis/firebasehosting_v1beta1/classes.rb +13 -13
  370. data/generated/google/apis/firebasehosting_v1beta1/service.rb +14 -14
  371. data/generated/google/apis/firebasehosting_v1beta1.rb +1 -1
  372. data/generated/google/apis/firebaserules_v1/classes.rb +10 -2
  373. data/generated/google/apis/firebaserules_v1/service.rb +12 -12
  374. data/generated/google/apis/firebaserules_v1.rb +1 -1
  375. data/generated/google/apis/firestore_v1/classes.rb +15 -15
  376. data/generated/google/apis/firestore_v1/service.rb +28 -28
  377. data/generated/google/apis/firestore_v1.rb +1 -1
  378. data/generated/google/apis/firestore_v1beta1/classes.rb +15 -15
  379. data/generated/google/apis/firestore_v1beta1/service.rb +19 -19
  380. data/generated/google/apis/firestore_v1beta1.rb +1 -1
  381. data/generated/google/apis/firestore_v1beta2/classes.rb +10 -10
  382. data/generated/google/apis/firestore_v1beta2/service.rb +9 -9
  383. data/generated/google/apis/firestore_v1beta2.rb +1 -1
  384. data/generated/google/apis/fitness_v1/classes.rb +4 -1
  385. data/generated/google/apis/fitness_v1/service.rb +14 -58
  386. data/generated/google/apis/fitness_v1.rb +1 -1
  387. data/generated/google/apis/fusiontables_v1/service.rb +32 -32
  388. data/generated/google/apis/fusiontables_v2/service.rb +34 -34
  389. data/generated/google/apis/games_configuration_v1configuration/service.rb +13 -13
  390. data/generated/google/apis/games_management_v1management/service.rb +27 -27
  391. data/generated/google/apis/games_management_v1management.rb +2 -2
  392. data/generated/google/apis/games_v1/service.rb +53 -53
  393. data/generated/google/apis/games_v1.rb +3 -3
  394. data/generated/google/apis/genomics_v1/classes.rb +190 -3321
  395. data/generated/google/apis/genomics_v1/representations.rb +128 -1265
  396. data/generated/google/apis/genomics_v1/service.rb +75 -1982
  397. data/generated/google/apis/genomics_v1.rb +1 -10
  398. data/generated/google/apis/genomics_v1alpha2/classes.rb +13 -53
  399. data/generated/google/apis/genomics_v1alpha2/representations.rb +0 -26
  400. data/generated/google/apis/genomics_v1alpha2/service.rb +11 -12
  401. data/generated/google/apis/genomics_v1alpha2.rb +1 -1
  402. data/generated/google/apis/genomics_v2alpha1/classes.rb +26 -58
  403. data/generated/google/apis/genomics_v2alpha1/representations.rb +1 -26
  404. data/generated/google/apis/genomics_v2alpha1/service.rb +6 -7
  405. data/generated/google/apis/genomics_v2alpha1.rb +1 -1
  406. data/generated/google/apis/gmail_v1/classes.rb +29 -0
  407. data/generated/google/apis/gmail_v1/representations.rb +13 -0
  408. data/generated/google/apis/gmail_v1/service.rb +142 -66
  409. data/generated/google/apis/gmail_v1.rb +1 -1
  410. data/generated/google/apis/groupsmigration_v1/service.rb +1 -1
  411. data/generated/google/apis/groupssettings_v1/classes.rb +126 -1
  412. data/generated/google/apis/groupssettings_v1/representations.rb +18 -0
  413. data/generated/google/apis/groupssettings_v1/service.rb +4 -4
  414. data/generated/google/apis/groupssettings_v1.rb +2 -2
  415. data/generated/google/apis/healthcare_v1alpha2/classes.rb +2849 -0
  416. data/generated/google/apis/healthcare_v1alpha2/representations.rb +1260 -0
  417. data/generated/google/apis/healthcare_v1alpha2/service.rb +4011 -0
  418. data/generated/google/apis/healthcare_v1alpha2.rb +34 -0
  419. data/generated/google/apis/healthcare_v1beta1/classes.rb +2464 -0
  420. data/generated/google/apis/healthcare_v1beta1/representations.rb +1042 -0
  421. data/generated/google/apis/healthcare_v1beta1/service.rb +3413 -0
  422. data/generated/google/apis/healthcare_v1beta1.rb +34 -0
  423. data/generated/google/apis/iam_v1/classes.rb +171 -1
  424. data/generated/google/apis/iam_v1/representations.rb +95 -0
  425. data/generated/google/apis/iam_v1/service.rb +249 -39
  426. data/generated/google/apis/iam_v1.rb +1 -1
  427. data/generated/google/apis/iamcredentials_v1/classes.rb +8 -4
  428. data/generated/google/apis/iamcredentials_v1/service.rb +15 -10
  429. data/generated/google/apis/iamcredentials_v1.rb +1 -1
  430. data/generated/google/apis/iap_v1/classes.rb +1 -1
  431. data/generated/google/apis/iap_v1/service.rb +3 -3
  432. data/generated/google/apis/iap_v1.rb +1 -1
  433. data/generated/google/apis/iap_v1beta1/classes.rb +1 -1
  434. data/generated/google/apis/iap_v1beta1/service.rb +3 -3
  435. data/generated/google/apis/iap_v1beta1.rb +1 -1
  436. data/generated/google/apis/identitytoolkit_v3/service.rb +20 -20
  437. data/generated/google/apis/indexing_v3/service.rb +2 -2
  438. data/generated/google/apis/jobs_v2/classes.rb +16 -17
  439. data/generated/google/apis/jobs_v2/service.rb +17 -17
  440. data/generated/google/apis/jobs_v2.rb +1 -1
  441. data/generated/google/apis/jobs_v3/classes.rb +14 -8
  442. data/generated/google/apis/jobs_v3/service.rb +16 -17
  443. data/generated/google/apis/jobs_v3.rb +1 -1
  444. data/generated/google/apis/jobs_v3p1beta1/classes.rb +26 -20
  445. data/generated/google/apis/jobs_v3p1beta1/service.rb +17 -18
  446. data/generated/google/apis/jobs_v3p1beta1.rb +1 -1
  447. data/generated/google/apis/kgsearch_v1/service.rb +1 -1
  448. data/generated/google/apis/language_v1/classes.rb +8 -7
  449. data/generated/google/apis/language_v1/service.rb +6 -6
  450. data/generated/google/apis/language_v1.rb +1 -1
  451. data/generated/google/apis/language_v1beta1/classes.rb +5 -5
  452. data/generated/google/apis/language_v1beta1/service.rb +4 -4
  453. data/generated/google/apis/language_v1beta1.rb +1 -1
  454. data/generated/google/apis/language_v1beta2/classes.rb +8 -7
  455. data/generated/google/apis/language_v1beta2/service.rb +6 -6
  456. data/generated/google/apis/language_v1beta2.rb +1 -1
  457. data/generated/google/apis/libraryagent_v1/service.rb +6 -6
  458. data/generated/google/apis/licensing_v1/service.rb +7 -7
  459. data/generated/google/apis/logging_v2/classes.rb +8 -3
  460. data/generated/google/apis/logging_v2/representations.rb +1 -0
  461. data/generated/google/apis/logging_v2/service.rb +72 -72
  462. data/generated/google/apis/logging_v2.rb +1 -1
  463. data/generated/google/apis/manufacturers_v1/service.rb +4 -4
  464. data/generated/google/apis/mirror_v1/service.rb +24 -24
  465. data/generated/google/apis/ml_v1/classes.rb +240 -52
  466. data/generated/google/apis/ml_v1/representations.rb +25 -2
  467. data/generated/google/apis/ml_v1/service.rb +36 -36
  468. data/generated/google/apis/ml_v1.rb +1 -1
  469. data/generated/google/apis/monitoring_v3/classes.rb +22 -18
  470. data/generated/google/apis/monitoring_v3/representations.rb +2 -1
  471. data/generated/google/apis/monitoring_v3/service.rb +42 -37
  472. data/generated/google/apis/monitoring_v3.rb +1 -1
  473. data/generated/google/apis/oauth2_v1/classes.rb +0 -124
  474. data/generated/google/apis/oauth2_v1/representations.rb +0 -62
  475. data/generated/google/apis/oauth2_v1/service.rb +3 -162
  476. data/generated/google/apis/oauth2_v1.rb +3 -6
  477. data/generated/google/apis/oauth2_v2/service.rb +4 -4
  478. data/generated/google/apis/oauth2_v2.rb +3 -6
  479. data/generated/google/apis/oslogin_v1/service.rb +8 -7
  480. data/generated/google/apis/oslogin_v1.rb +3 -2
  481. data/generated/google/apis/oslogin_v1alpha/service.rb +8 -7
  482. data/generated/google/apis/oslogin_v1alpha.rb +3 -2
  483. data/generated/google/apis/oslogin_v1beta/service.rb +8 -7
  484. data/generated/google/apis/oslogin_v1beta.rb +3 -2
  485. data/generated/google/apis/pagespeedonline_v1/service.rb +1 -1
  486. data/generated/google/apis/pagespeedonline_v2/service.rb +1 -1
  487. data/generated/google/apis/pagespeedonline_v4/service.rb +1 -1
  488. data/generated/google/apis/pagespeedonline_v5/classes.rb +43 -0
  489. data/generated/google/apis/pagespeedonline_v5/representations.rb +18 -0
  490. data/generated/google/apis/pagespeedonline_v5/service.rb +1 -1
  491. data/generated/google/apis/pagespeedonline_v5.rb +1 -1
  492. data/generated/google/apis/people_v1/classes.rb +38 -29
  493. data/generated/google/apis/people_v1/representations.rb +1 -0
  494. data/generated/google/apis/people_v1/service.rb +18 -13
  495. data/generated/google/apis/people_v1.rb +2 -5
  496. data/generated/google/apis/playcustomapp_v1/service.rb +1 -1
  497. data/generated/google/apis/plus_domains_v1/service.rb +18 -392
  498. data/generated/google/apis/plus_domains_v1.rb +4 -10
  499. data/generated/google/apis/plus_v1/service.rb +16 -16
  500. data/generated/google/apis/plus_v1.rb +4 -4
  501. data/generated/google/apis/poly_v1/classes.rb +8 -6
  502. data/generated/google/apis/poly_v1/service.rb +15 -12
  503. data/generated/google/apis/poly_v1.rb +1 -1
  504. data/generated/google/apis/proximitybeacon_v1beta1/classes.rb +8 -6
  505. data/generated/google/apis/proximitybeacon_v1beta1/service.rb +17 -17
  506. data/generated/google/apis/proximitybeacon_v1beta1.rb +1 -1
  507. data/generated/google/apis/pubsub_v1/classes.rb +55 -39
  508. data/generated/google/apis/pubsub_v1/representations.rb +16 -0
  509. data/generated/google/apis/pubsub_v1/service.rb +46 -69
  510. data/generated/google/apis/pubsub_v1.rb +1 -1
  511. data/generated/google/apis/pubsub_v1beta1a/service.rb +15 -15
  512. data/generated/google/apis/pubsub_v1beta2/classes.rb +45 -1
  513. data/generated/google/apis/pubsub_v1beta2/representations.rb +16 -0
  514. data/generated/google/apis/pubsub_v1beta2/service.rb +20 -20
  515. data/generated/google/apis/pubsub_v1beta2.rb +1 -1
  516. data/generated/google/apis/redis_v1/classes.rb +30 -10
  517. data/generated/google/apis/redis_v1/representations.rb +13 -0
  518. data/generated/google/apis/redis_v1/service.rb +51 -15
  519. data/generated/google/apis/redis_v1.rb +1 -1
  520. data/generated/google/apis/redis_v1beta1/classes.rb +18 -21
  521. data/generated/google/apis/redis_v1beta1/representations.rb +0 -1
  522. data/generated/google/apis/redis_v1beta1/service.rb +15 -15
  523. data/generated/google/apis/redis_v1beta1.rb +1 -1
  524. data/generated/google/apis/remotebuildexecution_v1/classes.rb +50 -35
  525. data/generated/google/apis/remotebuildexecution_v1/representations.rb +2 -0
  526. data/generated/google/apis/remotebuildexecution_v1/service.rb +7 -7
  527. data/generated/google/apis/remotebuildexecution_v1.rb +1 -1
  528. data/generated/google/apis/remotebuildexecution_v1alpha/classes.rb +48 -33
  529. data/generated/google/apis/remotebuildexecution_v1alpha/representations.rb +2 -0
  530. data/generated/google/apis/remotebuildexecution_v1alpha/service.rb +10 -10
  531. data/generated/google/apis/remotebuildexecution_v1alpha.rb +1 -1
  532. data/generated/google/apis/remotebuildexecution_v2/classes.rb +58 -43
  533. data/generated/google/apis/remotebuildexecution_v2/representations.rb +2 -0
  534. data/generated/google/apis/remotebuildexecution_v2/service.rb +9 -9
  535. data/generated/google/apis/remotebuildexecution_v2.rb +1 -1
  536. data/generated/google/apis/replicapool_v1beta1/service.rb +10 -10
  537. data/generated/google/apis/reseller_v1/classes.rb +32 -39
  538. data/generated/google/apis/reseller_v1/service.rb +18 -18
  539. data/generated/google/apis/reseller_v1.rb +1 -1
  540. data/generated/google/apis/run_v1/classes.rb +73 -0
  541. data/generated/google/apis/run_v1/representations.rb +43 -0
  542. data/generated/google/apis/run_v1/service.rb +90 -0
  543. data/generated/google/apis/run_v1.rb +35 -0
  544. data/generated/google/apis/run_v1alpha1/classes.rb +3882 -0
  545. data/generated/google/apis/run_v1alpha1/representations.rb +1425 -0
  546. data/generated/google/apis/run_v1alpha1/service.rb +2071 -0
  547. data/generated/google/apis/run_v1alpha1.rb +35 -0
  548. data/generated/google/apis/runtimeconfig_v1/classes.rb +11 -11
  549. data/generated/google/apis/runtimeconfig_v1/service.rb +3 -3
  550. data/generated/google/apis/runtimeconfig_v1.rb +1 -1
  551. data/generated/google/apis/runtimeconfig_v1beta1/classes.rb +26 -25
  552. data/generated/google/apis/runtimeconfig_v1beta1/service.rb +22 -22
  553. data/generated/google/apis/runtimeconfig_v1beta1.rb +1 -1
  554. data/generated/google/apis/safebrowsing_v4/service.rb +7 -7
  555. data/generated/google/apis/script_v1/classes.rb +167 -6
  556. data/generated/google/apis/script_v1/representations.rb +79 -1
  557. data/generated/google/apis/script_v1/service.rb +16 -16
  558. data/generated/google/apis/script_v1.rb +1 -1
  559. data/generated/google/apis/searchconsole_v1/service.rb +1 -1
  560. data/generated/google/apis/securitycenter_v1/classes.rb +1627 -0
  561. data/generated/google/apis/securitycenter_v1/representations.rb +569 -0
  562. data/generated/google/apis/securitycenter_v1/service.rb +1110 -0
  563. data/generated/google/apis/securitycenter_v1.rb +35 -0
  564. data/generated/google/apis/securitycenter_v1beta1/classes.rb +1514 -0
  565. data/generated/google/apis/securitycenter_v1beta1/representations.rb +548 -0
  566. data/generated/google/apis/securitycenter_v1beta1/service.rb +1035 -0
  567. data/generated/google/apis/securitycenter_v1beta1.rb +35 -0
  568. data/generated/google/apis/servicebroker_v1/classes.rb +1 -1
  569. data/generated/google/apis/servicebroker_v1/service.rb +3 -3
  570. data/generated/google/apis/servicebroker_v1.rb +1 -1
  571. data/generated/google/apis/servicebroker_v1alpha1/classes.rb +1 -1
  572. data/generated/google/apis/servicebroker_v1alpha1/service.rb +16 -16
  573. data/generated/google/apis/servicebroker_v1alpha1.rb +1 -1
  574. data/generated/google/apis/servicebroker_v1beta1/classes.rb +1 -1
  575. data/generated/google/apis/servicebroker_v1beta1/service.rb +21 -21
  576. data/generated/google/apis/servicebroker_v1beta1.rb +1 -1
  577. data/generated/google/apis/serviceconsumermanagement_v1/classes.rb +453 -149
  578. data/generated/google/apis/serviceconsumermanagement_v1/representations.rb +202 -29
  579. data/generated/google/apis/serviceconsumermanagement_v1/service.rb +148 -62
  580. data/generated/google/apis/serviceconsumermanagement_v1.rb +1 -1
  581. data/generated/google/apis/servicecontrol_v1/classes.rb +122 -25
  582. data/generated/google/apis/servicecontrol_v1/representations.rb +47 -0
  583. data/generated/google/apis/servicecontrol_v1/service.rb +3 -3
  584. data/generated/google/apis/servicecontrol_v1.rb +1 -1
  585. data/generated/google/apis/servicemanagement_v1/classes.rb +93 -110
  586. data/generated/google/apis/servicemanagement_v1/representations.rb +13 -26
  587. data/generated/google/apis/servicemanagement_v1/service.rb +30 -27
  588. data/generated/google/apis/servicemanagement_v1.rb +1 -1
  589. data/generated/google/apis/servicenetworking_v1/classes.rb +3626 -0
  590. data/generated/google/apis/servicenetworking_v1/representations.rb +1055 -0
  591. data/generated/google/apis/servicenetworking_v1/service.rb +440 -0
  592. data/generated/google/apis/servicenetworking_v1.rb +38 -0
  593. data/generated/google/apis/servicenetworking_v1beta/classes.rb +65 -108
  594. data/generated/google/apis/servicenetworking_v1beta/representations.rb +2 -29
  595. data/generated/google/apis/servicenetworking_v1beta/service.rb +6 -6
  596. data/generated/google/apis/servicenetworking_v1beta.rb +1 -1
  597. data/generated/google/apis/serviceusage_v1/classes.rb +160 -109
  598. data/generated/google/apis/serviceusage_v1/representations.rb +42 -26
  599. data/generated/google/apis/serviceusage_v1/service.rb +17 -19
  600. data/generated/google/apis/serviceusage_v1.rb +1 -1
  601. data/generated/google/apis/serviceusage_v1beta1/classes.rb +161 -110
  602. data/generated/google/apis/serviceusage_v1beta1/representations.rb +42 -26
  603. data/generated/google/apis/serviceusage_v1beta1/service.rb +7 -7
  604. data/generated/google/apis/serviceusage_v1beta1.rb +1 -1
  605. data/generated/google/apis/sheets_v4/classes.rb +115 -26
  606. data/generated/google/apis/sheets_v4/service.rb +17 -17
  607. data/generated/google/apis/sheets_v4.rb +1 -1
  608. data/generated/google/apis/site_verification_v1/service.rb +7 -7
  609. data/generated/google/apis/slides_v1/classes.rb +2 -2
  610. data/generated/google/apis/slides_v1/service.rb +5 -5
  611. data/generated/google/apis/slides_v1.rb +1 -1
  612. data/generated/google/apis/sourcerepo_v1/classes.rb +183 -1
  613. data/generated/google/apis/sourcerepo_v1/representations.rb +45 -0
  614. data/generated/google/apis/sourcerepo_v1/service.rb +45 -10
  615. data/generated/google/apis/sourcerepo_v1.rb +1 -1
  616. data/generated/google/apis/spanner_v1/classes.rb +231 -17
  617. data/generated/google/apis/spanner_v1/representations.rb +66 -0
  618. data/generated/google/apis/spanner_v1/service.rb +92 -42
  619. data/generated/google/apis/spanner_v1.rb +1 -1
  620. data/generated/google/apis/speech_v1/classes.rb +110 -13
  621. data/generated/google/apis/speech_v1/representations.rb +24 -0
  622. data/generated/google/apis/speech_v1/service.rb +9 -7
  623. data/generated/google/apis/speech_v1.rb +1 -1
  624. data/generated/google/apis/speech_v1p1beta1/classes.rb +19 -13
  625. data/generated/google/apis/speech_v1p1beta1/representations.rb +1 -0
  626. data/generated/google/apis/speech_v1p1beta1/service.rb +9 -7
  627. data/generated/google/apis/speech_v1p1beta1.rb +1 -1
  628. data/generated/google/apis/sqladmin_v1beta4/classes.rb +94 -17
  629. data/generated/google/apis/sqladmin_v1beta4/representations.rb +36 -0
  630. data/generated/google/apis/sqladmin_v1beta4/service.rb +44 -44
  631. data/generated/google/apis/sqladmin_v1beta4.rb +1 -1
  632. data/generated/google/apis/storage_v1/classes.rb +201 -4
  633. data/generated/google/apis/storage_v1/representations.rb +76 -1
  634. data/generated/google/apis/storage_v1/service.rb +488 -93
  635. data/generated/google/apis/storage_v1.rb +1 -1
  636. data/generated/google/apis/storage_v1beta1/service.rb +24 -24
  637. data/generated/google/apis/storage_v1beta2/service.rb +34 -34
  638. data/generated/google/apis/storagetransfer_v1/classes.rb +44 -44
  639. data/generated/google/apis/storagetransfer_v1/service.rb +35 -36
  640. data/generated/google/apis/storagetransfer_v1.rb +2 -2
  641. data/generated/google/apis/streetviewpublish_v1/classes.rb +27 -27
  642. data/generated/google/apis/streetviewpublish_v1/service.rb +36 -40
  643. data/generated/google/apis/streetviewpublish_v1.rb +1 -1
  644. data/generated/google/apis/surveys_v2/service.rb +8 -8
  645. data/generated/google/apis/tagmanager_v1/service.rb +49 -95
  646. data/generated/google/apis/tagmanager_v1.rb +1 -1
  647. data/generated/google/apis/tagmanager_v2/classes.rb +197 -292
  648. data/generated/google/apis/tagmanager_v2/representations.rb +62 -103
  649. data/generated/google/apis/tagmanager_v2/service.rb +287 -249
  650. data/generated/google/apis/tagmanager_v2.rb +1 -1
  651. data/generated/google/apis/tasks_v1/service.rb +19 -19
  652. data/generated/google/apis/tasks_v1.rb +2 -2
  653. data/generated/google/apis/testing_v1/classes.rb +44 -39
  654. data/generated/google/apis/testing_v1/representations.rb +3 -1
  655. data/generated/google/apis/testing_v1/service.rb +5 -5
  656. data/generated/google/apis/testing_v1.rb +1 -1
  657. data/generated/google/apis/texttospeech_v1/service.rb +2 -2
  658. data/generated/google/apis/texttospeech_v1.rb +1 -1
  659. data/generated/google/apis/texttospeech_v1beta1/service.rb +2 -2
  660. data/generated/google/apis/texttospeech_v1beta1.rb +1 -1
  661. data/generated/google/apis/toolresults_v1beta3/classes.rb +340 -17
  662. data/generated/google/apis/toolresults_v1beta3/representations.rb +90 -0
  663. data/generated/google/apis/toolresults_v1beta3/service.rb +140 -24
  664. data/generated/google/apis/toolresults_v1beta3.rb +1 -1
  665. data/generated/google/apis/tpu_v1/classes.rb +21 -15
  666. data/generated/google/apis/tpu_v1/representations.rb +1 -0
  667. data/generated/google/apis/tpu_v1/service.rb +17 -17
  668. data/generated/google/apis/tpu_v1.rb +1 -1
  669. data/generated/google/apis/tpu_v1alpha1/classes.rb +21 -15
  670. data/generated/google/apis/tpu_v1alpha1/representations.rb +1 -0
  671. data/generated/google/apis/tpu_v1alpha1/service.rb +17 -17
  672. data/generated/google/apis/tpu_v1alpha1.rb +1 -1
  673. data/generated/google/apis/translate_v2/service.rb +5 -5
  674. data/generated/google/apis/urlshortener_v1/service.rb +3 -3
  675. data/generated/google/apis/vault_v1/classes.rb +44 -18
  676. data/generated/google/apis/vault_v1/representations.rb +4 -0
  677. data/generated/google/apis/vault_v1/service.rb +28 -28
  678. data/generated/google/apis/vault_v1.rb +1 -1
  679. data/generated/google/apis/videointelligence_v1/classes.rb +2193 -350
  680. data/generated/google/apis/videointelligence_v1/representations.rb +805 -6
  681. data/generated/google/apis/videointelligence_v1/service.rb +7 -6
  682. data/generated/google/apis/videointelligence_v1.rb +3 -2
  683. data/generated/google/apis/videointelligence_v1beta2/classes.rb +2448 -605
  684. data/generated/google/apis/videointelligence_v1beta2/representations.rb +806 -7
  685. data/generated/google/apis/videointelligence_v1beta2/service.rb +3 -2
  686. data/generated/google/apis/videointelligence_v1beta2.rb +3 -2
  687. data/generated/google/apis/videointelligence_v1p1beta1/classes.rb +2422 -579
  688. data/generated/google/apis/videointelligence_v1p1beta1/representations.rb +806 -7
  689. data/generated/google/apis/videointelligence_v1p1beta1/service.rb +3 -2
  690. data/generated/google/apis/videointelligence_v1p1beta1.rb +3 -2
  691. data/generated/google/apis/videointelligence_v1p2beta1/classes.rb +2645 -830
  692. data/generated/google/apis/videointelligence_v1p2beta1/representations.rb +796 -12
  693. data/generated/google/apis/videointelligence_v1p2beta1/service.rb +3 -2
  694. data/generated/google/apis/videointelligence_v1p2beta1.rb +3 -2
  695. data/generated/google/apis/videointelligence_v1p3beta1/classes.rb +4687 -0
  696. data/generated/google/apis/videointelligence_v1p3beta1/representations.rb +2005 -0
  697. data/generated/google/apis/videointelligence_v1p3beta1/service.rb +94 -0
  698. data/generated/google/apis/videointelligence_v1p3beta1.rb +36 -0
  699. data/generated/google/apis/vision_v1/classes.rb +4397 -124
  700. data/generated/google/apis/vision_v1/representations.rb +2366 -541
  701. data/generated/google/apis/vision_v1/service.rb +160 -33
  702. data/generated/google/apis/vision_v1.rb +1 -1
  703. data/generated/google/apis/vision_v1p1beta1/classes.rb +4451 -158
  704. data/generated/google/apis/vision_v1p1beta1/representations.rb +2415 -576
  705. data/generated/google/apis/vision_v1p1beta1/service.rb +73 -2
  706. data/generated/google/apis/vision_v1p1beta1.rb +1 -1
  707. data/generated/google/apis/vision_v1p2beta1/classes.rb +4451 -158
  708. data/generated/google/apis/vision_v1p2beta1/representations.rb +2443 -604
  709. data/generated/google/apis/vision_v1p2beta1/service.rb +73 -2
  710. data/generated/google/apis/vision_v1p2beta1.rb +1 -1
  711. data/generated/google/apis/webfonts_v1/service.rb +1 -1
  712. data/generated/google/apis/webmasters_v3/classes.rb +0 -166
  713. data/generated/google/apis/webmasters_v3/representations.rb +0 -93
  714. data/generated/google/apis/webmasters_v3/service.rb +9 -180
  715. data/generated/google/apis/webmasters_v3.rb +1 -1
  716. data/generated/google/apis/websecurityscanner_v1alpha/service.rb +13 -13
  717. data/generated/google/apis/websecurityscanner_v1beta/classes.rb +973 -0
  718. data/generated/google/apis/websecurityscanner_v1beta/representations.rb +452 -0
  719. data/generated/google/apis/websecurityscanner_v1beta/service.rb +548 -0
  720. data/generated/google/apis/websecurityscanner_v1beta.rb +34 -0
  721. data/generated/google/apis/youtube_analytics_v1/service.rb +8 -8
  722. data/generated/google/apis/youtube_analytics_v1beta1/service.rb +8 -8
  723. data/generated/google/apis/youtube_analytics_v2/service.rb +8 -8
  724. data/generated/google/apis/youtube_partner_v1/classes.rb +15 -34
  725. data/generated/google/apis/youtube_partner_v1/representations.rb +4 -17
  726. data/generated/google/apis/youtube_partner_v1/service.rb +74 -74
  727. data/generated/google/apis/youtube_partner_v1.rb +1 -1
  728. data/generated/google/apis/youtube_v3/service.rb +71 -71
  729. data/generated/google/apis/youtube_v3.rb +1 -1
  730. data/generated/google/apis/youtubereporting_v1/classes.rb +2 -2
  731. data/generated/google/apis/youtubereporting_v1/service.rb +8 -8
  732. data/generated/google/apis/youtubereporting_v1.rb +1 -1
  733. data/google-api-client.gemspec +2 -2
  734. data/lib/google/apis/core/http_command.rb +1 -0
  735. data/lib/google/apis/core/json_representation.rb +4 -0
  736. data/lib/google/apis/core/upload.rb +3 -3
  737. data/lib/google/apis/generator/model.rb +1 -1
  738. data/lib/google/apis/generator/templates/_method.tmpl +3 -3
  739. data/lib/google/apis/version.rb +1 -1
  740. metadata +86 -17
  741. data/.kokoro/common.cfg +0 -22
  742. data/.kokoro/windows.sh +0 -32
  743. data/generated/google/apis/logging_v2beta1/classes.rb +0 -1765
  744. data/generated/google/apis/logging_v2beta1/representations.rb +0 -537
  745. data/generated/google/apis/logging_v2beta1/service.rb +0 -570
  746. data/generated/google/apis/logging_v2beta1.rb +0 -46
  747. data/generated/google/apis/partners_v2/classes.rb +0 -2260
  748. data/generated/google/apis/partners_v2/representations.rb +0 -905
  749. data/generated/google/apis/partners_v2/service.rb +0 -1077
  750. data/samples/web/.env +0 -2
@@ -0,0 +1,4687 @@
1
+ # Copyright 2015 Google Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ require 'date'
16
+ require 'google/apis/core/base_service'
17
+ require 'google/apis/core/json_representation'
18
+ require 'google/apis/core/hashable'
19
+ require 'google/apis/errors'
20
+
21
+ module Google
22
+ module Apis
23
+ module VideointelligenceV1p3beta1
24
+
25
+ # Video annotation progress. Included in the `metadata`
26
+ # field of the `Operation` returned by the `GetOperation`
27
+ # call of the `google::longrunning::Operations` service.
28
+ class GoogleCloudVideointelligenceV1AnnotateVideoProgress
29
+ include Google::Apis::Core::Hashable
30
+
31
+ # Progress metadata for all videos specified in `AnnotateVideoRequest`.
32
+ # Corresponds to the JSON property `annotationProgress`
33
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoAnnotationProgress>]
34
+ attr_accessor :annotation_progress
35
+
36
+ def initialize(**args)
37
+ update!(**args)
38
+ end
39
+
40
+ # Update properties of this object
41
+ def update!(**args)
42
+ @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress)
43
+ end
44
+ end
45
+
46
+ # Video annotation response. Included in the `response`
47
+ # field of the `Operation` returned by the `GetOperation`
48
+ # call of the `google::longrunning::Operations` service.
49
+ class GoogleCloudVideointelligenceV1AnnotateVideoResponse
50
+ include Google::Apis::Core::Hashable
51
+
52
+ # Annotation results for all videos specified in `AnnotateVideoRequest`.
53
+ # Corresponds to the JSON property `annotationResults`
54
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoAnnotationResults>]
55
+ attr_accessor :annotation_results
56
+
57
+ def initialize(**args)
58
+ update!(**args)
59
+ end
60
+
61
+ # Update properties of this object
62
+ def update!(**args)
63
+ @annotation_results = args[:annotation_results] if args.key?(:annotation_results)
64
+ end
65
+ end
66
+
67
+ # Detected entity from video analysis.
68
+ class GoogleCloudVideointelligenceV1Entity
69
+ include Google::Apis::Core::Hashable
70
+
71
+ # Textual description, e.g. `Fixed-gear bicycle`.
72
+ # Corresponds to the JSON property `description`
73
+ # @return [String]
74
+ attr_accessor :description
75
+
76
+ # Opaque entity ID. Some IDs may be available in
77
+ # [Google Knowledge Graph Search
78
+ # API](https://developers.google.com/knowledge-graph/).
79
+ # Corresponds to the JSON property `entityId`
80
+ # @return [String]
81
+ attr_accessor :entity_id
82
+
83
+ # Language code for `description` in BCP-47 format.
84
+ # Corresponds to the JSON property `languageCode`
85
+ # @return [String]
86
+ attr_accessor :language_code
87
+
88
+ def initialize(**args)
89
+ update!(**args)
90
+ end
91
+
92
+ # Update properties of this object
93
+ def update!(**args)
94
+ @description = args[:description] if args.key?(:description)
95
+ @entity_id = args[:entity_id] if args.key?(:entity_id)
96
+ @language_code = args[:language_code] if args.key?(:language_code)
97
+ end
98
+ end
99
+
100
+ # Explicit content annotation (based on per-frame visual signals only).
101
+ # If no explicit content has been detected in a frame, no annotations are
102
+ # present for that frame.
103
+ class GoogleCloudVideointelligenceV1ExplicitContentAnnotation
104
+ include Google::Apis::Core::Hashable
105
+
106
+ # All video frames where explicit content was detected.
107
+ # Corresponds to the JSON property `frames`
108
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentFrame>]
109
+ attr_accessor :frames
110
+
111
+ def initialize(**args)
112
+ update!(**args)
113
+ end
114
+
115
+ # Update properties of this object
116
+ def update!(**args)
117
+ @frames = args[:frames] if args.key?(:frames)
118
+ end
119
+ end
120
+
121
+ # Video frame level annotation results for explicit content.
122
+ class GoogleCloudVideointelligenceV1ExplicitContentFrame
123
+ include Google::Apis::Core::Hashable
124
+
125
+ # Likelihood of the pornography content..
126
+ # Corresponds to the JSON property `pornographyLikelihood`
127
+ # @return [String]
128
+ attr_accessor :pornography_likelihood
129
+
130
+ # Time-offset, relative to the beginning of the video, corresponding to the
131
+ # video frame for this location.
132
+ # Corresponds to the JSON property `timeOffset`
133
+ # @return [String]
134
+ attr_accessor :time_offset
135
+
136
+ def initialize(**args)
137
+ update!(**args)
138
+ end
139
+
140
+ # Update properties of this object
141
+ def update!(**args)
142
+ @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood)
143
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
144
+ end
145
+ end
146
+
147
+ # Label annotation.
148
+ class GoogleCloudVideointelligenceV1LabelAnnotation
149
+ include Google::Apis::Core::Hashable
150
+
151
+ # Common categories for the detected entity.
152
+ # E.g. when the label is `Terrier` the category is likely `dog`. And in some
153
+ # cases there might be more than one categories e.g. `Terrier` could also be
154
+ # a `pet`.
155
+ # Corresponds to the JSON property `categoryEntities`
156
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity>]
157
+ attr_accessor :category_entities
158
+
159
+ # Detected entity from video analysis.
160
+ # Corresponds to the JSON property `entity`
161
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity]
162
+ attr_accessor :entity
163
+
164
+ # All video frames where a label was detected.
165
+ # Corresponds to the JSON property `frames`
166
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelFrame>]
167
+ attr_accessor :frames
168
+
169
+ # All video segments where a label was detected.
170
+ # Corresponds to the JSON property `segments`
171
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelSegment>]
172
+ attr_accessor :segments
173
+
174
+ def initialize(**args)
175
+ update!(**args)
176
+ end
177
+
178
+ # Update properties of this object
179
+ def update!(**args)
180
+ @category_entities = args[:category_entities] if args.key?(:category_entities)
181
+ @entity = args[:entity] if args.key?(:entity)
182
+ @frames = args[:frames] if args.key?(:frames)
183
+ @segments = args[:segments] if args.key?(:segments)
184
+ end
185
+ end
186
+
187
+ # Video frame level annotation results for label detection.
188
+ class GoogleCloudVideointelligenceV1LabelFrame
189
+ include Google::Apis::Core::Hashable
190
+
191
+ # Confidence that the label is accurate. Range: [0, 1].
192
+ # Corresponds to the JSON property `confidence`
193
+ # @return [Float]
194
+ attr_accessor :confidence
195
+
196
+ # Time-offset, relative to the beginning of the video, corresponding to the
197
+ # video frame for this location.
198
+ # Corresponds to the JSON property `timeOffset`
199
+ # @return [String]
200
+ attr_accessor :time_offset
201
+
202
+ def initialize(**args)
203
+ update!(**args)
204
+ end
205
+
206
+ # Update properties of this object
207
+ def update!(**args)
208
+ @confidence = args[:confidence] if args.key?(:confidence)
209
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
210
+ end
211
+ end
212
+
213
+ # Video segment level annotation results for label detection.
214
+ class GoogleCloudVideointelligenceV1LabelSegment
215
+ include Google::Apis::Core::Hashable
216
+
217
+ # Confidence that the label is accurate. Range: [0, 1].
218
+ # Corresponds to the JSON property `confidence`
219
+ # @return [Float]
220
+ attr_accessor :confidence
221
+
222
+ # Video segment.
223
+ # Corresponds to the JSON property `segment`
224
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment]
225
+ attr_accessor :segment
226
+
227
+ def initialize(**args)
228
+ update!(**args)
229
+ end
230
+
231
+ # Update properties of this object
232
+ def update!(**args)
233
+ @confidence = args[:confidence] if args.key?(:confidence)
234
+ @segment = args[:segment] if args.key?(:segment)
235
+ end
236
+ end
237
+
238
+ # Normalized bounding box.
239
+ # The normalized vertex coordinates are relative to the original image.
240
+ # Range: [0, 1].
241
+ class GoogleCloudVideointelligenceV1NormalizedBoundingBox
242
+ include Google::Apis::Core::Hashable
243
+
244
+ # Bottom Y coordinate.
245
+ # Corresponds to the JSON property `bottom`
246
+ # @return [Float]
247
+ attr_accessor :bottom
248
+
249
+ # Left X coordinate.
250
+ # Corresponds to the JSON property `left`
251
+ # @return [Float]
252
+ attr_accessor :left
253
+
254
+ # Right X coordinate.
255
+ # Corresponds to the JSON property `right`
256
+ # @return [Float]
257
+ attr_accessor :right
258
+
259
+ # Top Y coordinate.
260
+ # Corresponds to the JSON property `top`
261
+ # @return [Float]
262
+ attr_accessor :top
263
+
264
+ def initialize(**args)
265
+ update!(**args)
266
+ end
267
+
268
+ # Update properties of this object
269
+ def update!(**args)
270
+ @bottom = args[:bottom] if args.key?(:bottom)
271
+ @left = args[:left] if args.key?(:left)
272
+ @right = args[:right] if args.key?(:right)
273
+ @top = args[:top] if args.key?(:top)
274
+ end
275
+ end
276
+
277
+ # Normalized bounding polygon for text (that might not be aligned with axis).
278
+ # Contains list of the corner points in clockwise order starting from
279
+ # top-left corner. For example, for a rectangular bounding box:
280
+ # When the text is horizontal it might look like:
281
+ # 0----1
282
+ # | |
283
+ # 3----2
284
+ # When it's clockwise rotated 180 degrees around the top-left corner it
285
+ # becomes:
286
+ # 2----3
287
+ # | |
288
+ # 1----0
289
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
290
+ # than 0, or greater than 1 due to trignometric calculations for location of
291
+ # the box.
292
+ class GoogleCloudVideointelligenceV1NormalizedBoundingPoly
293
+ include Google::Apis::Core::Hashable
294
+
295
+ # Normalized vertices of the bounding polygon.
296
+ # Corresponds to the JSON property `vertices`
297
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedVertex>]
298
+ attr_accessor :vertices
299
+
300
+ def initialize(**args)
301
+ update!(**args)
302
+ end
303
+
304
+ # Update properties of this object
305
+ def update!(**args)
306
+ @vertices = args[:vertices] if args.key?(:vertices)
307
+ end
308
+ end
309
+
310
+ # A vertex represents a 2D point in the image.
311
+ # NOTE: the normalized vertex coordinates are relative to the original image
312
+ # and range from 0 to 1.
313
+ class GoogleCloudVideointelligenceV1NormalizedVertex
314
+ include Google::Apis::Core::Hashable
315
+
316
+ # X coordinate.
317
+ # Corresponds to the JSON property `x`
318
+ # @return [Float]
319
+ attr_accessor :x
320
+
321
+ # Y coordinate.
322
+ # Corresponds to the JSON property `y`
323
+ # @return [Float]
324
+ attr_accessor :y
325
+
326
+ def initialize(**args)
327
+ update!(**args)
328
+ end
329
+
330
+ # Update properties of this object
331
+ def update!(**args)
332
+ @x = args[:x] if args.key?(:x)
333
+ @y = args[:y] if args.key?(:y)
334
+ end
335
+ end
336
+
337
+ # Annotations corresponding to one tracked object.
338
+ class GoogleCloudVideointelligenceV1ObjectTrackingAnnotation
339
+ include Google::Apis::Core::Hashable
340
+
341
+ # Object category's labeling confidence of this track.
342
+ # Corresponds to the JSON property `confidence`
343
+ # @return [Float]
344
+ attr_accessor :confidence
345
+
346
+ # Detected entity from video analysis.
347
+ # Corresponds to the JSON property `entity`
348
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1Entity]
349
+ attr_accessor :entity
350
+
351
+ # Information corresponding to all frames where this object track appears.
352
+ # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame
353
+ # messages in frames.
354
+ # Streaming mode: it can only be one ObjectTrackingFrame message in frames.
355
+ # Corresponds to the JSON property `frames`
356
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ObjectTrackingFrame>]
357
+ attr_accessor :frames
358
+
359
+ # Video segment.
360
+ # Corresponds to the JSON property `segment`
361
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment]
362
+ attr_accessor :segment
363
+
364
+ # Streaming mode ONLY.
365
+ # In streaming mode, we do not know the end time of a tracked object
366
+ # before it is completed. Hence, there is no VideoSegment info returned.
367
+ # Instead, we provide a unique identifiable integer track_id so that
368
+ # the customers can correlate the results of the ongoing
369
+ # ObjectTrackAnnotation of the same track_id over time.
370
+ # Corresponds to the JSON property `trackId`
371
+ # @return [Fixnum]
372
+ attr_accessor :track_id
373
+
374
+ def initialize(**args)
375
+ update!(**args)
376
+ end
377
+
378
+ # Update properties of this object
379
+ def update!(**args)
380
+ @confidence = args[:confidence] if args.key?(:confidence)
381
+ @entity = args[:entity] if args.key?(:entity)
382
+ @frames = args[:frames] if args.key?(:frames)
383
+ @segment = args[:segment] if args.key?(:segment)
384
+ @track_id = args[:track_id] if args.key?(:track_id)
385
+ end
386
+ end
387
+
388
+ # Video frame level annotations for object detection and tracking. This field
389
+ # stores per frame location, time offset, and confidence.
390
+ class GoogleCloudVideointelligenceV1ObjectTrackingFrame
391
+ include Google::Apis::Core::Hashable
392
+
393
+ # Normalized bounding box.
394
+ # The normalized vertex coordinates are relative to the original image.
395
+ # Range: [0, 1].
396
+ # Corresponds to the JSON property `normalizedBoundingBox`
397
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingBox]
398
+ attr_accessor :normalized_bounding_box
399
+
400
+ # The timestamp of the frame in microseconds.
401
+ # Corresponds to the JSON property `timeOffset`
402
+ # @return [String]
403
+ attr_accessor :time_offset
404
+
405
+ def initialize(**args)
406
+ update!(**args)
407
+ end
408
+
409
+ # Update properties of this object
410
+ def update!(**args)
411
+ @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box)
412
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
413
+ end
414
+ end
415
+
416
+ # Alternative hypotheses (a.k.a. n-best list).
417
+ class GoogleCloudVideointelligenceV1SpeechRecognitionAlternative
418
+ include Google::Apis::Core::Hashable
419
+
420
+ # The confidence estimate between 0.0 and 1.0. A higher number
421
+ # indicates an estimated greater likelihood that the recognized words are
422
+ # correct. This field is typically provided only for the top hypothesis, and
423
+ # only for `is_final=true` results. Clients should not rely on the
424
+ # `confidence` field as it is not guaranteed to be accurate or consistent.
425
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
426
+ # Corresponds to the JSON property `confidence`
427
+ # @return [Float]
428
+ attr_accessor :confidence
429
+
430
+ # Transcript text representing the words that the user spoke.
431
+ # Corresponds to the JSON property `transcript`
432
+ # @return [String]
433
+ attr_accessor :transcript
434
+
435
+ # A list of word-specific information for each recognized word.
436
+ # Corresponds to the JSON property `words`
437
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1WordInfo>]
438
+ attr_accessor :words
439
+
440
+ def initialize(**args)
441
+ update!(**args)
442
+ end
443
+
444
+ # Update properties of this object
445
+ def update!(**args)
446
+ @confidence = args[:confidence] if args.key?(:confidence)
447
+ @transcript = args[:transcript] if args.key?(:transcript)
448
+ @words = args[:words] if args.key?(:words)
449
+ end
450
+ end
451
+
452
+ # A speech recognition result corresponding to a portion of the audio.
453
+ class GoogleCloudVideointelligenceV1SpeechTranscription
454
+ include Google::Apis::Core::Hashable
455
+
456
+ # May contain one or more recognition hypotheses (up to the maximum specified
457
+ # in `max_alternatives`). These alternatives are ordered in terms of
458
+ # accuracy, with the top (first) alternative being the most probable, as
459
+ # ranked by the recognizer.
460
+ # Corresponds to the JSON property `alternatives`
461
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1SpeechRecognitionAlternative>]
462
+ attr_accessor :alternatives
463
+
464
+ # Output only. The
465
+ # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the
466
+ # language in this result. This language code was detected to have the most
467
+ # likelihood of being spoken in the audio.
468
+ # Corresponds to the JSON property `languageCode`
469
+ # @return [String]
470
+ attr_accessor :language_code
471
+
472
+ def initialize(**args)
473
+ update!(**args)
474
+ end
475
+
476
+ # Update properties of this object
477
+ def update!(**args)
478
+ @alternatives = args[:alternatives] if args.key?(:alternatives)
479
+ @language_code = args[:language_code] if args.key?(:language_code)
480
+ end
481
+ end
482
+
483
+ # Annotations related to one detected OCR text snippet. This will contain the
484
+ # corresponding text, confidence value, and frame level information for each
485
+ # detection.
486
+ class GoogleCloudVideointelligenceV1TextAnnotation
487
+ include Google::Apis::Core::Hashable
488
+
489
+ # All video segments where OCR detected text appears.
490
+ # Corresponds to the JSON property `segments`
491
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextSegment>]
492
+ attr_accessor :segments
493
+
494
+ # The detected text.
495
+ # Corresponds to the JSON property `text`
496
+ # @return [String]
497
+ attr_accessor :text
498
+
499
+ def initialize(**args)
500
+ update!(**args)
501
+ end
502
+
503
+ # Update properties of this object
504
+ def update!(**args)
505
+ @segments = args[:segments] if args.key?(:segments)
506
+ @text = args[:text] if args.key?(:text)
507
+ end
508
+ end
509
+
510
+ # Video frame level annotation results for text annotation (OCR).
511
+ # Contains information regarding timestamp and bounding box locations for the
512
+ # frames containing detected OCR text snippets.
513
+ class GoogleCloudVideointelligenceV1TextFrame
514
+ include Google::Apis::Core::Hashable
515
+
516
+ # Normalized bounding polygon for text (that might not be aligned with axis).
517
+ # Contains list of the corner points in clockwise order starting from
518
+ # top-left corner. For example, for a rectangular bounding box:
519
+ # When the text is horizontal it might look like:
520
+ # 0----1
521
+ # | |
522
+ # 3----2
523
+ # When it's clockwise rotated 180 degrees around the top-left corner it
524
+ # becomes:
525
+ # 2----3
526
+ # | |
527
+ # 1----0
528
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
529
+ # than 0, or greater than 1 due to trignometric calculations for location of
530
+ # the box.
531
+ # Corresponds to the JSON property `rotatedBoundingBox`
532
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1NormalizedBoundingPoly]
533
+ attr_accessor :rotated_bounding_box
534
+
535
+ # Timestamp of this frame.
536
+ # Corresponds to the JSON property `timeOffset`
537
+ # @return [String]
538
+ attr_accessor :time_offset
539
+
540
+ def initialize(**args)
541
+ update!(**args)
542
+ end
543
+
544
+ # Update properties of this object
545
+ def update!(**args)
546
+ @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box)
547
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
548
+ end
549
+ end
550
+
551
+ # Video segment level annotation results for text detection.
552
+ class GoogleCloudVideointelligenceV1TextSegment
553
+ include Google::Apis::Core::Hashable
554
+
555
+ # Confidence for the track of detected text. It is calculated as the highest
556
+ # over all frames where OCR detected text appears.
557
+ # Corresponds to the JSON property `confidence`
558
+ # @return [Float]
559
+ attr_accessor :confidence
560
+
561
+ # Information related to the frames where OCR detected text appears.
562
+ # Corresponds to the JSON property `frames`
563
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextFrame>]
564
+ attr_accessor :frames
565
+
566
+ # Video segment.
567
+ # Corresponds to the JSON property `segment`
568
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment]
569
+ attr_accessor :segment
570
+
571
+ def initialize(**args)
572
+ update!(**args)
573
+ end
574
+
575
+ # Update properties of this object
576
+ def update!(**args)
577
+ @confidence = args[:confidence] if args.key?(:confidence)
578
+ @frames = args[:frames] if args.key?(:frames)
579
+ @segment = args[:segment] if args.key?(:segment)
580
+ end
581
+ end
582
+
583
+ # Annotation progress for a single video.
584
+ class GoogleCloudVideointelligenceV1VideoAnnotationProgress
585
+ include Google::Apis::Core::Hashable
586
+
587
+ # Video file location in
588
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
589
+ # Corresponds to the JSON property `inputUri`
590
+ # @return [String]
591
+ attr_accessor :input_uri
592
+
593
+ # Approximate percentage processed thus far. Guaranteed to be
594
+ # 100 when fully processed.
595
+ # Corresponds to the JSON property `progressPercent`
596
+ # @return [Fixnum]
597
+ attr_accessor :progress_percent
598
+
599
+ # Time when the request was received.
600
+ # Corresponds to the JSON property `startTime`
601
+ # @return [String]
602
+ attr_accessor :start_time
603
+
604
+ # Time of the most recent update.
605
+ # Corresponds to the JSON property `updateTime`
606
+ # @return [String]
607
+ attr_accessor :update_time
608
+
609
+ def initialize(**args)
610
+ update!(**args)
611
+ end
612
+
613
+ # Update properties of this object
614
+ def update!(**args)
615
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
616
+ @progress_percent = args[:progress_percent] if args.key?(:progress_percent)
617
+ @start_time = args[:start_time] if args.key?(:start_time)
618
+ @update_time = args[:update_time] if args.key?(:update_time)
619
+ end
620
+ end
621
+
622
+ # Annotation results for a single video.
623
+ class GoogleCloudVideointelligenceV1VideoAnnotationResults
624
+ include Google::Apis::Core::Hashable
625
+
626
+ # The `Status` type defines a logical error model that is suitable for
627
+ # different programming environments, including REST APIs and RPC APIs. It is
628
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
629
+ # - Simple to use and understand for most users
630
+ # - Flexible enough to meet unexpected needs
631
+ # # Overview
632
+ # The `Status` message contains three pieces of data: error code, error
633
+ # message, and error details. The error code should be an enum value of
634
+ # google.rpc.Code, but it may accept additional error codes if needed. The
635
+ # error message should be a developer-facing English message that helps
636
+ # developers *understand* and *resolve* the error. If a localized user-facing
637
+ # error message is needed, put the localized message in the error details or
638
+ # localize it in the client. The optional error details may contain arbitrary
639
+ # information about the error. There is a predefined set of error detail types
640
+ # in the package `google.rpc` that can be used for common error conditions.
641
+ # # Language mapping
642
+ # The `Status` message is the logical representation of the error model, but it
643
+ # is not necessarily the actual wire format. When the `Status` message is
644
+ # exposed in different client libraries and different wire protocols, it can be
645
+ # mapped differently. For example, it will likely be mapped to some exceptions
646
+ # in Java, but more likely mapped to some error codes in C.
647
+ # # Other uses
648
+ # The error model and the `Status` message can be used in a variety of
649
+ # environments, either with or without APIs, to provide a
650
+ # consistent developer experience across different environments.
651
+ # Example uses of this error model include:
652
+ # - Partial errors. If a service needs to return partial errors to the client,
653
+ # it may embed the `Status` in the normal response to indicate the partial
654
+ # errors.
655
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
656
+ # have a `Status` message for error reporting.
657
+ # - Batch operations. If a client uses batch request and batch response, the
658
+ # `Status` message should be used directly inside batch response, one for
659
+ # each error sub-response.
660
+ # - Asynchronous operations. If an API call embeds asynchronous operation
661
+ # results in its response, the status of those operations should be
662
+ # represented directly using the `Status` message.
663
+ # - Logging. If some API errors are stored in logs, the message `Status` could
664
+ # be used directly after any stripping needed for security/privacy reasons.
665
+ # Corresponds to the JSON property `error`
666
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
667
+ attr_accessor :error
668
+
669
+ # Explicit content annotation (based on per-frame visual signals only).
670
+ # If no explicit content has been detected in a frame, no annotations are
671
+ # present for that frame.
672
+ # Corresponds to the JSON property `explicitAnnotation`
673
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ExplicitContentAnnotation]
674
+ attr_accessor :explicit_annotation
675
+
676
+ # Label annotations on frame level.
677
+ # There is exactly one element for each unique label.
678
+ # Corresponds to the JSON property `frameLabelAnnotations`
679
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation>]
680
+ attr_accessor :frame_label_annotations
681
+
682
+ # Video file location in
683
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
684
+ # Corresponds to the JSON property `inputUri`
685
+ # @return [String]
686
+ attr_accessor :input_uri
687
+
688
+ # Annotations for list of objects detected and tracked in video.
689
+ # Corresponds to the JSON property `objectAnnotations`
690
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1ObjectTrackingAnnotation>]
691
+ attr_accessor :object_annotations
692
+
693
+ # Label annotations on video level or user specified segment level.
694
+ # There is exactly one element for each unique label.
695
+ # Corresponds to the JSON property `segmentLabelAnnotations`
696
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation>]
697
+ attr_accessor :segment_label_annotations
698
+
699
+ # Shot annotations. Each shot is represented as a video segment.
700
+ # Corresponds to the JSON property `shotAnnotations`
701
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1VideoSegment>]
702
+ attr_accessor :shot_annotations
703
+
704
+ # Label annotations on shot level.
705
+ # There is exactly one element for each unique label.
706
+ # Corresponds to the JSON property `shotLabelAnnotations`
707
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1LabelAnnotation>]
708
+ attr_accessor :shot_label_annotations
709
+
710
+ # Speech transcription.
711
+ # Corresponds to the JSON property `speechTranscriptions`
712
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1SpeechTranscription>]
713
+ attr_accessor :speech_transcriptions
714
+
715
+ # OCR text detection and tracking.
716
+ # Annotations for list of detected text snippets. Each will have list of
717
+ # frame information associated with it.
718
+ # Corresponds to the JSON property `textAnnotations`
719
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1TextAnnotation>]
720
+ attr_accessor :text_annotations
721
+
722
+ def initialize(**args)
723
+ update!(**args)
724
+ end
725
+
726
+ # Update properties of this object
727
+ def update!(**args)
728
+ @error = args[:error] if args.key?(:error)
729
+ @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation)
730
+ @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations)
731
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
732
+ @object_annotations = args[:object_annotations] if args.key?(:object_annotations)
733
+ @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations)
734
+ @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations)
735
+ @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations)
736
+ @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions)
737
+ @text_annotations = args[:text_annotations] if args.key?(:text_annotations)
738
+ end
739
+ end
740
+
741
+ # Video segment.
742
+ class GoogleCloudVideointelligenceV1VideoSegment
743
+ include Google::Apis::Core::Hashable
744
+
745
+ # Time-offset, relative to the beginning of the video,
746
+ # corresponding to the end of the segment (inclusive).
747
+ # Corresponds to the JSON property `endTimeOffset`
748
+ # @return [String]
749
+ attr_accessor :end_time_offset
750
+
751
+ # Time-offset, relative to the beginning of the video,
752
+ # corresponding to the start of the segment (inclusive).
753
+ # Corresponds to the JSON property `startTimeOffset`
754
+ # @return [String]
755
+ attr_accessor :start_time_offset
756
+
757
+ def initialize(**args)
758
+ update!(**args)
759
+ end
760
+
761
+ # Update properties of this object
762
+ def update!(**args)
763
+ @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset)
764
+ @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset)
765
+ end
766
+ end
767
+
768
+ # Word-specific information for recognized words. Word information is only
769
+ # included in the response when certain request parameters are set, such
770
+ # as `enable_word_time_offsets`.
771
+ class GoogleCloudVideointelligenceV1WordInfo
772
+ include Google::Apis::Core::Hashable
773
+
774
+ # Output only. The confidence estimate between 0.0 and 1.0. A higher number
775
+ # indicates an estimated greater likelihood that the recognized words are
776
+ # correct. This field is set only for the top alternative.
777
+ # This field is not guaranteed to be accurate and users should not rely on it
778
+ # to be always provided.
779
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
780
+ # Corresponds to the JSON property `confidence`
781
+ # @return [Float]
782
+ attr_accessor :confidence
783
+
784
+ # Time offset relative to the beginning of the audio, and
785
+ # corresponding to the end of the spoken word. This field is only set if
786
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
787
+ # experimental feature and the accuracy of the time offset can vary.
788
+ # Corresponds to the JSON property `endTime`
789
+ # @return [String]
790
+ attr_accessor :end_time
791
+
792
+ # Output only. A distinct integer value is assigned for every speaker within
793
+ # the audio. This field specifies which one of those speakers was detected to
794
+ # have spoken this word. Value ranges from 1 up to diarization_speaker_count,
795
+ # and is only set if speaker diarization is enabled.
796
+ # Corresponds to the JSON property `speakerTag`
797
+ # @return [Fixnum]
798
+ attr_accessor :speaker_tag
799
+
800
+ # Time offset relative to the beginning of the audio, and
801
+ # corresponding to the start of the spoken word. This field is only set if
802
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
803
+ # experimental feature and the accuracy of the time offset can vary.
804
+ # Corresponds to the JSON property `startTime`
805
+ # @return [String]
806
+ attr_accessor :start_time
807
+
808
+ # The word corresponding to this set of information.
809
+ # Corresponds to the JSON property `word`
810
+ # @return [String]
811
+ attr_accessor :word
812
+
813
+ def initialize(**args)
814
+ update!(**args)
815
+ end
816
+
817
+ # Update properties of this object
818
+ def update!(**args)
819
+ @confidence = args[:confidence] if args.key?(:confidence)
820
+ @end_time = args[:end_time] if args.key?(:end_time)
821
+ @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag)
822
+ @start_time = args[:start_time] if args.key?(:start_time)
823
+ @word = args[:word] if args.key?(:word)
824
+ end
825
+ end
826
+
827
+ # Video annotation progress. Included in the `metadata`
828
+ # field of the `Operation` returned by the `GetOperation`
829
+ # call of the `google::longrunning::Operations` service.
830
+ class GoogleCloudVideointelligenceV1beta2AnnotateVideoProgress
831
+ include Google::Apis::Core::Hashable
832
+
833
+ # Progress metadata for all videos specified in `AnnotateVideoRequest`.
834
+ # Corresponds to the JSON property `annotationProgress`
835
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress>]
836
+ attr_accessor :annotation_progress
837
+
838
+ def initialize(**args)
839
+ update!(**args)
840
+ end
841
+
842
+ # Update properties of this object
843
+ def update!(**args)
844
+ @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress)
845
+ end
846
+ end
847
+
848
+ # Video annotation response. Included in the `response`
849
+ # field of the `Operation` returned by the `GetOperation`
850
+ # call of the `google::longrunning::Operations` service.
851
+ class GoogleCloudVideointelligenceV1beta2AnnotateVideoResponse
852
+ include Google::Apis::Core::Hashable
853
+
854
+ # Annotation results for all videos specified in `AnnotateVideoRequest`.
855
+ # Corresponds to the JSON property `annotationResults`
856
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoAnnotationResults>]
857
+ attr_accessor :annotation_results
858
+
859
+ def initialize(**args)
860
+ update!(**args)
861
+ end
862
+
863
+ # Update properties of this object
864
+ def update!(**args)
865
+ @annotation_results = args[:annotation_results] if args.key?(:annotation_results)
866
+ end
867
+ end
868
+
869
+ # Detected entity from video analysis.
870
+ class GoogleCloudVideointelligenceV1beta2Entity
871
+ include Google::Apis::Core::Hashable
872
+
873
+ # Textual description, e.g. `Fixed-gear bicycle`.
874
+ # Corresponds to the JSON property `description`
875
+ # @return [String]
876
+ attr_accessor :description
877
+
878
+ # Opaque entity ID. Some IDs may be available in
879
+ # [Google Knowledge Graph Search
880
+ # API](https://developers.google.com/knowledge-graph/).
881
+ # Corresponds to the JSON property `entityId`
882
+ # @return [String]
883
+ attr_accessor :entity_id
884
+
885
+ # Language code for `description` in BCP-47 format.
886
+ # Corresponds to the JSON property `languageCode`
887
+ # @return [String]
888
+ attr_accessor :language_code
889
+
890
+ def initialize(**args)
891
+ update!(**args)
892
+ end
893
+
894
+ # Update properties of this object
895
+ def update!(**args)
896
+ @description = args[:description] if args.key?(:description)
897
+ @entity_id = args[:entity_id] if args.key?(:entity_id)
898
+ @language_code = args[:language_code] if args.key?(:language_code)
899
+ end
900
+ end
901
+
902
+ # Explicit content annotation (based on per-frame visual signals only).
903
+ # If no explicit content has been detected in a frame, no annotations are
904
+ # present for that frame.
905
+ class GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation
906
+ include Google::Apis::Core::Hashable
907
+
908
+ # All video frames where explicit content was detected.
909
+ # Corresponds to the JSON property `frames`
910
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentFrame>]
911
+ attr_accessor :frames
912
+
913
+ def initialize(**args)
914
+ update!(**args)
915
+ end
916
+
917
+ # Update properties of this object
918
+ def update!(**args)
919
+ @frames = args[:frames] if args.key?(:frames)
920
+ end
921
+ end
922
+
923
+ # Video frame level annotation results for explicit content.
924
+ class GoogleCloudVideointelligenceV1beta2ExplicitContentFrame
925
+ include Google::Apis::Core::Hashable
926
+
927
+ # Likelihood of the pornography content..
928
+ # Corresponds to the JSON property `pornographyLikelihood`
929
+ # @return [String]
930
+ attr_accessor :pornography_likelihood
931
+
932
+ # Time-offset, relative to the beginning of the video, corresponding to the
933
+ # video frame for this location.
934
+ # Corresponds to the JSON property `timeOffset`
935
+ # @return [String]
936
+ attr_accessor :time_offset
937
+
938
+ def initialize(**args)
939
+ update!(**args)
940
+ end
941
+
942
+ # Update properties of this object
943
+ def update!(**args)
944
+ @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood)
945
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
946
+ end
947
+ end
948
+
949
+ # Label annotation.
950
+ class GoogleCloudVideointelligenceV1beta2LabelAnnotation
951
+ include Google::Apis::Core::Hashable
952
+
953
+ # Common categories for the detected entity.
954
+ # E.g. when the label is `Terrier` the category is likely `dog`. And in some
955
+ # cases there might be more than one categories e.g. `Terrier` could also be
956
+ # a `pet`.
957
+ # Corresponds to the JSON property `categoryEntities`
958
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity>]
959
+ attr_accessor :category_entities
960
+
961
+ # Detected entity from video analysis.
962
+ # Corresponds to the JSON property `entity`
963
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity]
964
+ attr_accessor :entity
965
+
966
+ # All video frames where a label was detected.
967
+ # Corresponds to the JSON property `frames`
968
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelFrame>]
969
+ attr_accessor :frames
970
+
971
+ # All video segments where a label was detected.
972
+ # Corresponds to the JSON property `segments`
973
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelSegment>]
974
+ attr_accessor :segments
975
+
976
+ def initialize(**args)
977
+ update!(**args)
978
+ end
979
+
980
+ # Update properties of this object
981
+ def update!(**args)
982
+ @category_entities = args[:category_entities] if args.key?(:category_entities)
983
+ @entity = args[:entity] if args.key?(:entity)
984
+ @frames = args[:frames] if args.key?(:frames)
985
+ @segments = args[:segments] if args.key?(:segments)
986
+ end
987
+ end
988
+
989
+ # Video frame level annotation results for label detection.
990
+ class GoogleCloudVideointelligenceV1beta2LabelFrame
991
+ include Google::Apis::Core::Hashable
992
+
993
+ # Confidence that the label is accurate. Range: [0, 1].
994
+ # Corresponds to the JSON property `confidence`
995
+ # @return [Float]
996
+ attr_accessor :confidence
997
+
998
+ # Time-offset, relative to the beginning of the video, corresponding to the
999
+ # video frame for this location.
1000
+ # Corresponds to the JSON property `timeOffset`
1001
+ # @return [String]
1002
+ attr_accessor :time_offset
1003
+
1004
+ def initialize(**args)
1005
+ update!(**args)
1006
+ end
1007
+
1008
+ # Update properties of this object
1009
+ def update!(**args)
1010
+ @confidence = args[:confidence] if args.key?(:confidence)
1011
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
1012
+ end
1013
+ end
1014
+
1015
+ # Video segment level annotation results for label detection.
1016
+ class GoogleCloudVideointelligenceV1beta2LabelSegment
1017
+ include Google::Apis::Core::Hashable
1018
+
1019
+ # Confidence that the label is accurate. Range: [0, 1].
1020
+ # Corresponds to the JSON property `confidence`
1021
+ # @return [Float]
1022
+ attr_accessor :confidence
1023
+
1024
+ # Video segment.
1025
+ # Corresponds to the JSON property `segment`
1026
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment]
1027
+ attr_accessor :segment
1028
+
1029
+ def initialize(**args)
1030
+ update!(**args)
1031
+ end
1032
+
1033
+ # Update properties of this object
1034
+ def update!(**args)
1035
+ @confidence = args[:confidence] if args.key?(:confidence)
1036
+ @segment = args[:segment] if args.key?(:segment)
1037
+ end
1038
+ end
1039
+
1040
+ # Normalized bounding box.
1041
+ # The normalized vertex coordinates are relative to the original image.
1042
+ # Range: [0, 1].
1043
+ class GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox
1044
+ include Google::Apis::Core::Hashable
1045
+
1046
+ # Bottom Y coordinate.
1047
+ # Corresponds to the JSON property `bottom`
1048
+ # @return [Float]
1049
+ attr_accessor :bottom
1050
+
1051
+ # Left X coordinate.
1052
+ # Corresponds to the JSON property `left`
1053
+ # @return [Float]
1054
+ attr_accessor :left
1055
+
1056
+ # Right X coordinate.
1057
+ # Corresponds to the JSON property `right`
1058
+ # @return [Float]
1059
+ attr_accessor :right
1060
+
1061
+ # Top Y coordinate.
1062
+ # Corresponds to the JSON property `top`
1063
+ # @return [Float]
1064
+ attr_accessor :top
1065
+
1066
+ def initialize(**args)
1067
+ update!(**args)
1068
+ end
1069
+
1070
+ # Update properties of this object
1071
+ def update!(**args)
1072
+ @bottom = args[:bottom] if args.key?(:bottom)
1073
+ @left = args[:left] if args.key?(:left)
1074
+ @right = args[:right] if args.key?(:right)
1075
+ @top = args[:top] if args.key?(:top)
1076
+ end
1077
+ end
1078
+
1079
+ # Normalized bounding polygon for text (that might not be aligned with axis).
1080
+ # Contains list of the corner points in clockwise order starting from
1081
+ # top-left corner. For example, for a rectangular bounding box:
1082
+ # When the text is horizontal it might look like:
1083
+ # 0----1
1084
+ # | |
1085
+ # 3----2
1086
+ # When it's clockwise rotated 180 degrees around the top-left corner it
1087
+ # becomes:
1088
+ # 2----3
1089
+ # | |
1090
+ # 1----0
1091
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
1092
+ # than 0, or greater than 1 due to trignometric calculations for location of
1093
+ # the box.
1094
+ class GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly
1095
+ include Google::Apis::Core::Hashable
1096
+
1097
+ # Normalized vertices of the bounding polygon.
1098
+ # Corresponds to the JSON property `vertices`
1099
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedVertex>]
1100
+ attr_accessor :vertices
1101
+
1102
+ def initialize(**args)
1103
+ update!(**args)
1104
+ end
1105
+
1106
+ # Update properties of this object
1107
+ def update!(**args)
1108
+ @vertices = args[:vertices] if args.key?(:vertices)
1109
+ end
1110
+ end
1111
+
1112
+ # A vertex represents a 2D point in the image.
1113
+ # NOTE: the normalized vertex coordinates are relative to the original image
1114
+ # and range from 0 to 1.
1115
+ class GoogleCloudVideointelligenceV1beta2NormalizedVertex
1116
+ include Google::Apis::Core::Hashable
1117
+
1118
+ # X coordinate.
1119
+ # Corresponds to the JSON property `x`
1120
+ # @return [Float]
1121
+ attr_accessor :x
1122
+
1123
+ # Y coordinate.
1124
+ # Corresponds to the JSON property `y`
1125
+ # @return [Float]
1126
+ attr_accessor :y
1127
+
1128
+ def initialize(**args)
1129
+ update!(**args)
1130
+ end
1131
+
1132
+ # Update properties of this object
1133
+ def update!(**args)
1134
+ @x = args[:x] if args.key?(:x)
1135
+ @y = args[:y] if args.key?(:y)
1136
+ end
1137
+ end
1138
+
1139
+ # Annotations corresponding to one tracked object.
1140
+ class GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation
1141
+ include Google::Apis::Core::Hashable
1142
+
1143
+ # Object category's labeling confidence of this track.
1144
+ # Corresponds to the JSON property `confidence`
1145
+ # @return [Float]
1146
+ attr_accessor :confidence
1147
+
1148
+ # Detected entity from video analysis.
1149
+ # Corresponds to the JSON property `entity`
1150
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2Entity]
1151
+ attr_accessor :entity
1152
+
1153
+ # Information corresponding to all frames where this object track appears.
1154
+ # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame
1155
+ # messages in frames.
1156
+ # Streaming mode: it can only be one ObjectTrackingFrame message in frames.
1157
+ # Corresponds to the JSON property `frames`
1158
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame>]
1159
+ attr_accessor :frames
1160
+
1161
+ # Video segment.
1162
+ # Corresponds to the JSON property `segment`
1163
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment]
1164
+ attr_accessor :segment
1165
+
1166
+ # Streaming mode ONLY.
1167
+ # In streaming mode, we do not know the end time of a tracked object
1168
+ # before it is completed. Hence, there is no VideoSegment info returned.
1169
+ # Instead, we provide a unique identifiable integer track_id so that
1170
+ # the customers can correlate the results of the ongoing
1171
+ # ObjectTrackAnnotation of the same track_id over time.
1172
+ # Corresponds to the JSON property `trackId`
1173
+ # @return [Fixnum]
1174
+ attr_accessor :track_id
1175
+
1176
+ def initialize(**args)
1177
+ update!(**args)
1178
+ end
1179
+
1180
+ # Update properties of this object
1181
+ def update!(**args)
1182
+ @confidence = args[:confidence] if args.key?(:confidence)
1183
+ @entity = args[:entity] if args.key?(:entity)
1184
+ @frames = args[:frames] if args.key?(:frames)
1185
+ @segment = args[:segment] if args.key?(:segment)
1186
+ @track_id = args[:track_id] if args.key?(:track_id)
1187
+ end
1188
+ end
1189
+
1190
+ # Video frame level annotations for object detection and tracking. This field
1191
+ # stores per frame location, time offset, and confidence.
1192
+ class GoogleCloudVideointelligenceV1beta2ObjectTrackingFrame
1193
+ include Google::Apis::Core::Hashable
1194
+
1195
+ # Normalized bounding box.
1196
+ # The normalized vertex coordinates are relative to the original image.
1197
+ # Range: [0, 1].
1198
+ # Corresponds to the JSON property `normalizedBoundingBox`
1199
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingBox]
1200
+ attr_accessor :normalized_bounding_box
1201
+
1202
+ # The timestamp of the frame in microseconds.
1203
+ # Corresponds to the JSON property `timeOffset`
1204
+ # @return [String]
1205
+ attr_accessor :time_offset
1206
+
1207
+ def initialize(**args)
1208
+ update!(**args)
1209
+ end
1210
+
1211
+ # Update properties of this object
1212
+ def update!(**args)
1213
+ @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box)
1214
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
1215
+ end
1216
+ end
1217
+
1218
+ # Alternative hypotheses (a.k.a. n-best list).
1219
+ class GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative
1220
+ include Google::Apis::Core::Hashable
1221
+
1222
+ # The confidence estimate between 0.0 and 1.0. A higher number
1223
+ # indicates an estimated greater likelihood that the recognized words are
1224
+ # correct. This field is typically provided only for the top hypothesis, and
1225
+ # only for `is_final=true` results. Clients should not rely on the
1226
+ # `confidence` field as it is not guaranteed to be accurate or consistent.
1227
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
1228
+ # Corresponds to the JSON property `confidence`
1229
+ # @return [Float]
1230
+ attr_accessor :confidence
1231
+
1232
+ # Transcript text representing the words that the user spoke.
1233
+ # Corresponds to the JSON property `transcript`
1234
+ # @return [String]
1235
+ attr_accessor :transcript
1236
+
1237
+ # A list of word-specific information for each recognized word.
1238
+ # Corresponds to the JSON property `words`
1239
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2WordInfo>]
1240
+ attr_accessor :words
1241
+
1242
+ def initialize(**args)
1243
+ update!(**args)
1244
+ end
1245
+
1246
+ # Update properties of this object
1247
+ def update!(**args)
1248
+ @confidence = args[:confidence] if args.key?(:confidence)
1249
+ @transcript = args[:transcript] if args.key?(:transcript)
1250
+ @words = args[:words] if args.key?(:words)
1251
+ end
1252
+ end
1253
+
1254
+ # A speech recognition result corresponding to a portion of the audio.
1255
+ class GoogleCloudVideointelligenceV1beta2SpeechTranscription
1256
+ include Google::Apis::Core::Hashable
1257
+
1258
+ # May contain one or more recognition hypotheses (up to the maximum specified
1259
+ # in `max_alternatives`). These alternatives are ordered in terms of
1260
+ # accuracy, with the top (first) alternative being the most probable, as
1261
+ # ranked by the recognizer.
1262
+ # Corresponds to the JSON property `alternatives`
1263
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2SpeechRecognitionAlternative>]
1264
+ attr_accessor :alternatives
1265
+
1266
+ # Output only. The
1267
+ # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the
1268
+ # language in this result. This language code was detected to have the most
1269
+ # likelihood of being spoken in the audio.
1270
+ # Corresponds to the JSON property `languageCode`
1271
+ # @return [String]
1272
+ attr_accessor :language_code
1273
+
1274
+ def initialize(**args)
1275
+ update!(**args)
1276
+ end
1277
+
1278
+ # Update properties of this object
1279
+ def update!(**args)
1280
+ @alternatives = args[:alternatives] if args.key?(:alternatives)
1281
+ @language_code = args[:language_code] if args.key?(:language_code)
1282
+ end
1283
+ end
1284
+
1285
+ # Annotations related to one detected OCR text snippet. This will contain the
1286
+ # corresponding text, confidence value, and frame level information for each
1287
+ # detection.
1288
+ class GoogleCloudVideointelligenceV1beta2TextAnnotation
1289
+ include Google::Apis::Core::Hashable
1290
+
1291
+ # All video segments where OCR detected text appears.
1292
+ # Corresponds to the JSON property `segments`
1293
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextSegment>]
1294
+ attr_accessor :segments
1295
+
1296
+ # The detected text.
1297
+ # Corresponds to the JSON property `text`
1298
+ # @return [String]
1299
+ attr_accessor :text
1300
+
1301
+ def initialize(**args)
1302
+ update!(**args)
1303
+ end
1304
+
1305
+ # Update properties of this object
1306
+ def update!(**args)
1307
+ @segments = args[:segments] if args.key?(:segments)
1308
+ @text = args[:text] if args.key?(:text)
1309
+ end
1310
+ end
1311
+
1312
+ # Video frame level annotation results for text annotation (OCR).
1313
+ # Contains information regarding timestamp and bounding box locations for the
1314
+ # frames containing detected OCR text snippets.
1315
+ class GoogleCloudVideointelligenceV1beta2TextFrame
1316
+ include Google::Apis::Core::Hashable
1317
+
1318
+ # Normalized bounding polygon for text (that might not be aligned with axis).
1319
+ # Contains list of the corner points in clockwise order starting from
1320
+ # top-left corner. For example, for a rectangular bounding box:
1321
+ # When the text is horizontal it might look like:
1322
+ # 0----1
1323
+ # | |
1324
+ # 3----2
1325
+ # When it's clockwise rotated 180 degrees around the top-left corner it
1326
+ # becomes:
1327
+ # 2----3
1328
+ # | |
1329
+ # 1----0
1330
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
1331
+ # than 0, or greater than 1 due to trignometric calculations for location of
1332
+ # the box.
1333
+ # Corresponds to the JSON property `rotatedBoundingBox`
1334
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2NormalizedBoundingPoly]
1335
+ attr_accessor :rotated_bounding_box
1336
+
1337
+ # Timestamp of this frame.
1338
+ # Corresponds to the JSON property `timeOffset`
1339
+ # @return [String]
1340
+ attr_accessor :time_offset
1341
+
1342
+ def initialize(**args)
1343
+ update!(**args)
1344
+ end
1345
+
1346
+ # Update properties of this object
1347
+ def update!(**args)
1348
+ @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box)
1349
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
1350
+ end
1351
+ end
1352
+
1353
+ # Video segment level annotation results for text detection.
1354
+ class GoogleCloudVideointelligenceV1beta2TextSegment
1355
+ include Google::Apis::Core::Hashable
1356
+
1357
+ # Confidence for the track of detected text. It is calculated as the highest
1358
+ # over all frames where OCR detected text appears.
1359
+ # Corresponds to the JSON property `confidence`
1360
+ # @return [Float]
1361
+ attr_accessor :confidence
1362
+
1363
+ # Information related to the frames where OCR detected text appears.
1364
+ # Corresponds to the JSON property `frames`
1365
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextFrame>]
1366
+ attr_accessor :frames
1367
+
1368
+ # Video segment.
1369
+ # Corresponds to the JSON property `segment`
1370
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment]
1371
+ attr_accessor :segment
1372
+
1373
+ def initialize(**args)
1374
+ update!(**args)
1375
+ end
1376
+
1377
+ # Update properties of this object
1378
+ def update!(**args)
1379
+ @confidence = args[:confidence] if args.key?(:confidence)
1380
+ @frames = args[:frames] if args.key?(:frames)
1381
+ @segment = args[:segment] if args.key?(:segment)
1382
+ end
1383
+ end
1384
+
1385
+ # Annotation progress for a single video.
1386
+ class GoogleCloudVideointelligenceV1beta2VideoAnnotationProgress
1387
+ include Google::Apis::Core::Hashable
1388
+
1389
+ # Video file location in
1390
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
1391
+ # Corresponds to the JSON property `inputUri`
1392
+ # @return [String]
1393
+ attr_accessor :input_uri
1394
+
1395
+ # Approximate percentage processed thus far. Guaranteed to be
1396
+ # 100 when fully processed.
1397
+ # Corresponds to the JSON property `progressPercent`
1398
+ # @return [Fixnum]
1399
+ attr_accessor :progress_percent
1400
+
1401
+ # Time when the request was received.
1402
+ # Corresponds to the JSON property `startTime`
1403
+ # @return [String]
1404
+ attr_accessor :start_time
1405
+
1406
+ # Time of the most recent update.
1407
+ # Corresponds to the JSON property `updateTime`
1408
+ # @return [String]
1409
+ attr_accessor :update_time
1410
+
1411
+ def initialize(**args)
1412
+ update!(**args)
1413
+ end
1414
+
1415
+ # Update properties of this object
1416
+ def update!(**args)
1417
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
1418
+ @progress_percent = args[:progress_percent] if args.key?(:progress_percent)
1419
+ @start_time = args[:start_time] if args.key?(:start_time)
1420
+ @update_time = args[:update_time] if args.key?(:update_time)
1421
+ end
1422
+ end
1423
+
1424
+ # Annotation results for a single video.
1425
+ class GoogleCloudVideointelligenceV1beta2VideoAnnotationResults
1426
+ include Google::Apis::Core::Hashable
1427
+
1428
+ # The `Status` type defines a logical error model that is suitable for
1429
+ # different programming environments, including REST APIs and RPC APIs. It is
1430
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
1431
+ # - Simple to use and understand for most users
1432
+ # - Flexible enough to meet unexpected needs
1433
+ # # Overview
1434
+ # The `Status` message contains three pieces of data: error code, error
1435
+ # message, and error details. The error code should be an enum value of
1436
+ # google.rpc.Code, but it may accept additional error codes if needed. The
1437
+ # error message should be a developer-facing English message that helps
1438
+ # developers *understand* and *resolve* the error. If a localized user-facing
1439
+ # error message is needed, put the localized message in the error details or
1440
+ # localize it in the client. The optional error details may contain arbitrary
1441
+ # information about the error. There is a predefined set of error detail types
1442
+ # in the package `google.rpc` that can be used for common error conditions.
1443
+ # # Language mapping
1444
+ # The `Status` message is the logical representation of the error model, but it
1445
+ # is not necessarily the actual wire format. When the `Status` message is
1446
+ # exposed in different client libraries and different wire protocols, it can be
1447
+ # mapped differently. For example, it will likely be mapped to some exceptions
1448
+ # in Java, but more likely mapped to some error codes in C.
1449
+ # # Other uses
1450
+ # The error model and the `Status` message can be used in a variety of
1451
+ # environments, either with or without APIs, to provide a
1452
+ # consistent developer experience across different environments.
1453
+ # Example uses of this error model include:
1454
+ # - Partial errors. If a service needs to return partial errors to the client,
1455
+ # it may embed the `Status` in the normal response to indicate the partial
1456
+ # errors.
1457
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
1458
+ # have a `Status` message for error reporting.
1459
+ # - Batch operations. If a client uses batch request and batch response, the
1460
+ # `Status` message should be used directly inside batch response, one for
1461
+ # each error sub-response.
1462
+ # - Asynchronous operations. If an API call embeds asynchronous operation
1463
+ # results in its response, the status of those operations should be
1464
+ # represented directly using the `Status` message.
1465
+ # - Logging. If some API errors are stored in logs, the message `Status` could
1466
+ # be used directly after any stripping needed for security/privacy reasons.
1467
+ # Corresponds to the JSON property `error`
1468
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
1469
+ attr_accessor :error
1470
+
1471
+ # Explicit content annotation (based on per-frame visual signals only).
1472
+ # If no explicit content has been detected in a frame, no annotations are
1473
+ # present for that frame.
1474
+ # Corresponds to the JSON property `explicitAnnotation`
1475
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ExplicitContentAnnotation]
1476
+ attr_accessor :explicit_annotation
1477
+
1478
+ # Label annotations on frame level.
1479
+ # There is exactly one element for each unique label.
1480
+ # Corresponds to the JSON property `frameLabelAnnotations`
1481
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation>]
1482
+ attr_accessor :frame_label_annotations
1483
+
1484
+ # Video file location in
1485
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
1486
+ # Corresponds to the JSON property `inputUri`
1487
+ # @return [String]
1488
+ attr_accessor :input_uri
1489
+
1490
+ # Annotations for list of objects detected and tracked in video.
1491
+ # Corresponds to the JSON property `objectAnnotations`
1492
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2ObjectTrackingAnnotation>]
1493
+ attr_accessor :object_annotations
1494
+
1495
+ # Label annotations on video level or user specified segment level.
1496
+ # There is exactly one element for each unique label.
1497
+ # Corresponds to the JSON property `segmentLabelAnnotations`
1498
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation>]
1499
+ attr_accessor :segment_label_annotations
1500
+
1501
+ # Shot annotations. Each shot is represented as a video segment.
1502
+ # Corresponds to the JSON property `shotAnnotations`
1503
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2VideoSegment>]
1504
+ attr_accessor :shot_annotations
1505
+
1506
+ # Label annotations on shot level.
1507
+ # There is exactly one element for each unique label.
1508
+ # Corresponds to the JSON property `shotLabelAnnotations`
1509
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2LabelAnnotation>]
1510
+ attr_accessor :shot_label_annotations
1511
+
1512
+ # Speech transcription.
1513
+ # Corresponds to the JSON property `speechTranscriptions`
1514
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2SpeechTranscription>]
1515
+ attr_accessor :speech_transcriptions
1516
+
1517
+ # OCR text detection and tracking.
1518
+ # Annotations for list of detected text snippets. Each will have list of
1519
+ # frame information associated with it.
1520
+ # Corresponds to the JSON property `textAnnotations`
1521
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1beta2TextAnnotation>]
1522
+ attr_accessor :text_annotations
1523
+
1524
+ def initialize(**args)
1525
+ update!(**args)
1526
+ end
1527
+
1528
+ # Update properties of this object
1529
+ def update!(**args)
1530
+ @error = args[:error] if args.key?(:error)
1531
+ @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation)
1532
+ @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations)
1533
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
1534
+ @object_annotations = args[:object_annotations] if args.key?(:object_annotations)
1535
+ @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations)
1536
+ @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations)
1537
+ @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations)
1538
+ @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions)
1539
+ @text_annotations = args[:text_annotations] if args.key?(:text_annotations)
1540
+ end
1541
+ end
1542
+
1543
+ # Video segment.
1544
+ class GoogleCloudVideointelligenceV1beta2VideoSegment
1545
+ include Google::Apis::Core::Hashable
1546
+
1547
+ # Time-offset, relative to the beginning of the video,
1548
+ # corresponding to the end of the segment (inclusive).
1549
+ # Corresponds to the JSON property `endTimeOffset`
1550
+ # @return [String]
1551
+ attr_accessor :end_time_offset
1552
+
1553
+ # Time-offset, relative to the beginning of the video,
1554
+ # corresponding to the start of the segment (inclusive).
1555
+ # Corresponds to the JSON property `startTimeOffset`
1556
+ # @return [String]
1557
+ attr_accessor :start_time_offset
1558
+
1559
+ def initialize(**args)
1560
+ update!(**args)
1561
+ end
1562
+
1563
+ # Update properties of this object
1564
+ def update!(**args)
1565
+ @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset)
1566
+ @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset)
1567
+ end
1568
+ end
1569
+
1570
+ # Word-specific information for recognized words. Word information is only
1571
+ # included in the response when certain request parameters are set, such
1572
+ # as `enable_word_time_offsets`.
1573
+ class GoogleCloudVideointelligenceV1beta2WordInfo
1574
+ include Google::Apis::Core::Hashable
1575
+
1576
+ # Output only. The confidence estimate between 0.0 and 1.0. A higher number
1577
+ # indicates an estimated greater likelihood that the recognized words are
1578
+ # correct. This field is set only for the top alternative.
1579
+ # This field is not guaranteed to be accurate and users should not rely on it
1580
+ # to be always provided.
1581
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
1582
+ # Corresponds to the JSON property `confidence`
1583
+ # @return [Float]
1584
+ attr_accessor :confidence
1585
+
1586
+ # Time offset relative to the beginning of the audio, and
1587
+ # corresponding to the end of the spoken word. This field is only set if
1588
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
1589
+ # experimental feature and the accuracy of the time offset can vary.
1590
+ # Corresponds to the JSON property `endTime`
1591
+ # @return [String]
1592
+ attr_accessor :end_time
1593
+
1594
+ # Output only. A distinct integer value is assigned for every speaker within
1595
+ # the audio. This field specifies which one of those speakers was detected to
1596
+ # have spoken this word. Value ranges from 1 up to diarization_speaker_count,
1597
+ # and is only set if speaker diarization is enabled.
1598
+ # Corresponds to the JSON property `speakerTag`
1599
+ # @return [Fixnum]
1600
+ attr_accessor :speaker_tag
1601
+
1602
+ # Time offset relative to the beginning of the audio, and
1603
+ # corresponding to the start of the spoken word. This field is only set if
1604
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
1605
+ # experimental feature and the accuracy of the time offset can vary.
1606
+ # Corresponds to the JSON property `startTime`
1607
+ # @return [String]
1608
+ attr_accessor :start_time
1609
+
1610
+ # The word corresponding to this set of information.
1611
+ # Corresponds to the JSON property `word`
1612
+ # @return [String]
1613
+ attr_accessor :word
1614
+
1615
+ def initialize(**args)
1616
+ update!(**args)
1617
+ end
1618
+
1619
+ # Update properties of this object
1620
+ def update!(**args)
1621
+ @confidence = args[:confidence] if args.key?(:confidence)
1622
+ @end_time = args[:end_time] if args.key?(:end_time)
1623
+ @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag)
1624
+ @start_time = args[:start_time] if args.key?(:start_time)
1625
+ @word = args[:word] if args.key?(:word)
1626
+ end
1627
+ end
1628
+
1629
+ # Video annotation progress. Included in the `metadata`
1630
+ # field of the `Operation` returned by the `GetOperation`
1631
+ # call of the `google::longrunning::Operations` service.
1632
+ class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoProgress
1633
+ include Google::Apis::Core::Hashable
1634
+
1635
+ # Progress metadata for all videos specified in `AnnotateVideoRequest`.
1636
+ # Corresponds to the JSON property `annotationProgress`
1637
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress>]
1638
+ attr_accessor :annotation_progress
1639
+
1640
+ def initialize(**args)
1641
+ update!(**args)
1642
+ end
1643
+
1644
+ # Update properties of this object
1645
+ def update!(**args)
1646
+ @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress)
1647
+ end
1648
+ end
1649
+
1650
+ # Video annotation response. Included in the `response`
1651
+ # field of the `Operation` returned by the `GetOperation`
1652
+ # call of the `google::longrunning::Operations` service.
1653
+ class GoogleCloudVideointelligenceV1p1beta1AnnotateVideoResponse
1654
+ include Google::Apis::Core::Hashable
1655
+
1656
+ # Annotation results for all videos specified in `AnnotateVideoRequest`.
1657
+ # Corresponds to the JSON property `annotationResults`
1658
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults>]
1659
+ attr_accessor :annotation_results
1660
+
1661
+ def initialize(**args)
1662
+ update!(**args)
1663
+ end
1664
+
1665
+ # Update properties of this object
1666
+ def update!(**args)
1667
+ @annotation_results = args[:annotation_results] if args.key?(:annotation_results)
1668
+ end
1669
+ end
1670
+
1671
+ # Detected entity from video analysis.
1672
+ class GoogleCloudVideointelligenceV1p1beta1Entity
1673
+ include Google::Apis::Core::Hashable
1674
+
1675
+ # Textual description, e.g. `Fixed-gear bicycle`.
1676
+ # Corresponds to the JSON property `description`
1677
+ # @return [String]
1678
+ attr_accessor :description
1679
+
1680
+ # Opaque entity ID. Some IDs may be available in
1681
+ # [Google Knowledge Graph Search
1682
+ # API](https://developers.google.com/knowledge-graph/).
1683
+ # Corresponds to the JSON property `entityId`
1684
+ # @return [String]
1685
+ attr_accessor :entity_id
1686
+
1687
+ # Language code for `description` in BCP-47 format.
1688
+ # Corresponds to the JSON property `languageCode`
1689
+ # @return [String]
1690
+ attr_accessor :language_code
1691
+
1692
+ def initialize(**args)
1693
+ update!(**args)
1694
+ end
1695
+
1696
+ # Update properties of this object
1697
+ def update!(**args)
1698
+ @description = args[:description] if args.key?(:description)
1699
+ @entity_id = args[:entity_id] if args.key?(:entity_id)
1700
+ @language_code = args[:language_code] if args.key?(:language_code)
1701
+ end
1702
+ end
1703
+
1704
+ # Explicit content annotation (based on per-frame visual signals only).
1705
+ # If no explicit content has been detected in a frame, no annotations are
1706
+ # present for that frame.
1707
+ class GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation
1708
+ include Google::Apis::Core::Hashable
1709
+
1710
+ # All video frames where explicit content was detected.
1711
+ # Corresponds to the JSON property `frames`
1712
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame>]
1713
+ attr_accessor :frames
1714
+
1715
+ def initialize(**args)
1716
+ update!(**args)
1717
+ end
1718
+
1719
+ # Update properties of this object
1720
+ def update!(**args)
1721
+ @frames = args[:frames] if args.key?(:frames)
1722
+ end
1723
+ end
1724
+
1725
+ # Video frame level annotation results for explicit content.
1726
+ class GoogleCloudVideointelligenceV1p1beta1ExplicitContentFrame
1727
+ include Google::Apis::Core::Hashable
1728
+
1729
+ # Likelihood of the pornography content..
1730
+ # Corresponds to the JSON property `pornographyLikelihood`
1731
+ # @return [String]
1732
+ attr_accessor :pornography_likelihood
1733
+
1734
+ # Time-offset, relative to the beginning of the video, corresponding to the
1735
+ # video frame for this location.
1736
+ # Corresponds to the JSON property `timeOffset`
1737
+ # @return [String]
1738
+ attr_accessor :time_offset
1739
+
1740
+ def initialize(**args)
1741
+ update!(**args)
1742
+ end
1743
+
1744
+ # Update properties of this object
1745
+ def update!(**args)
1746
+ @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood)
1747
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
1748
+ end
1749
+ end
1750
+
1751
+ # Label annotation.
1752
+ class GoogleCloudVideointelligenceV1p1beta1LabelAnnotation
1753
+ include Google::Apis::Core::Hashable
1754
+
1755
+ # Common categories for the detected entity.
1756
+ # E.g. when the label is `Terrier` the category is likely `dog`. And in some
1757
+ # cases there might be more than one categories e.g. `Terrier` could also be
1758
+ # a `pet`.
1759
+ # Corresponds to the JSON property `categoryEntities`
1760
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity>]
1761
+ attr_accessor :category_entities
1762
+
1763
+ # Detected entity from video analysis.
1764
+ # Corresponds to the JSON property `entity`
1765
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity]
1766
+ attr_accessor :entity
1767
+
1768
+ # All video frames where a label was detected.
1769
+ # Corresponds to the JSON property `frames`
1770
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelFrame>]
1771
+ attr_accessor :frames
1772
+
1773
+ # All video segments where a label was detected.
1774
+ # Corresponds to the JSON property `segments`
1775
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelSegment>]
1776
+ attr_accessor :segments
1777
+
1778
+ def initialize(**args)
1779
+ update!(**args)
1780
+ end
1781
+
1782
+ # Update properties of this object
1783
+ def update!(**args)
1784
+ @category_entities = args[:category_entities] if args.key?(:category_entities)
1785
+ @entity = args[:entity] if args.key?(:entity)
1786
+ @frames = args[:frames] if args.key?(:frames)
1787
+ @segments = args[:segments] if args.key?(:segments)
1788
+ end
1789
+ end
1790
+
1791
+ # Video frame level annotation results for label detection.
1792
+ class GoogleCloudVideointelligenceV1p1beta1LabelFrame
1793
+ include Google::Apis::Core::Hashable
1794
+
1795
+ # Confidence that the label is accurate. Range: [0, 1].
1796
+ # Corresponds to the JSON property `confidence`
1797
+ # @return [Float]
1798
+ attr_accessor :confidence
1799
+
1800
+ # Time-offset, relative to the beginning of the video, corresponding to the
1801
+ # video frame for this location.
1802
+ # Corresponds to the JSON property `timeOffset`
1803
+ # @return [String]
1804
+ attr_accessor :time_offset
1805
+
1806
+ def initialize(**args)
1807
+ update!(**args)
1808
+ end
1809
+
1810
+ # Update properties of this object
1811
+ def update!(**args)
1812
+ @confidence = args[:confidence] if args.key?(:confidence)
1813
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
1814
+ end
1815
+ end
1816
+
1817
+ # Video segment level annotation results for label detection.
1818
+ class GoogleCloudVideointelligenceV1p1beta1LabelSegment
1819
+ include Google::Apis::Core::Hashable
1820
+
1821
+ # Confidence that the label is accurate. Range: [0, 1].
1822
+ # Corresponds to the JSON property `confidence`
1823
+ # @return [Float]
1824
+ attr_accessor :confidence
1825
+
1826
+ # Video segment.
1827
+ # Corresponds to the JSON property `segment`
1828
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment]
1829
+ attr_accessor :segment
1830
+
1831
+ def initialize(**args)
1832
+ update!(**args)
1833
+ end
1834
+
1835
+ # Update properties of this object
1836
+ def update!(**args)
1837
+ @confidence = args[:confidence] if args.key?(:confidence)
1838
+ @segment = args[:segment] if args.key?(:segment)
1839
+ end
1840
+ end
1841
+
1842
+ # Normalized bounding box.
1843
+ # The normalized vertex coordinates are relative to the original image.
1844
+ # Range: [0, 1].
1845
+ class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox
1846
+ include Google::Apis::Core::Hashable
1847
+
1848
+ # Bottom Y coordinate.
1849
+ # Corresponds to the JSON property `bottom`
1850
+ # @return [Float]
1851
+ attr_accessor :bottom
1852
+
1853
+ # Left X coordinate.
1854
+ # Corresponds to the JSON property `left`
1855
+ # @return [Float]
1856
+ attr_accessor :left
1857
+
1858
+ # Right X coordinate.
1859
+ # Corresponds to the JSON property `right`
1860
+ # @return [Float]
1861
+ attr_accessor :right
1862
+
1863
+ # Top Y coordinate.
1864
+ # Corresponds to the JSON property `top`
1865
+ # @return [Float]
1866
+ attr_accessor :top
1867
+
1868
+ def initialize(**args)
1869
+ update!(**args)
1870
+ end
1871
+
1872
+ # Update properties of this object
1873
+ def update!(**args)
1874
+ @bottom = args[:bottom] if args.key?(:bottom)
1875
+ @left = args[:left] if args.key?(:left)
1876
+ @right = args[:right] if args.key?(:right)
1877
+ @top = args[:top] if args.key?(:top)
1878
+ end
1879
+ end
1880
+
1881
+ # Normalized bounding polygon for text (that might not be aligned with axis).
1882
+ # Contains list of the corner points in clockwise order starting from
1883
+ # top-left corner. For example, for a rectangular bounding box:
1884
+ # When the text is horizontal it might look like:
1885
+ # 0----1
1886
+ # | |
1887
+ # 3----2
1888
+ # When it's clockwise rotated 180 degrees around the top-left corner it
1889
+ # becomes:
1890
+ # 2----3
1891
+ # | |
1892
+ # 1----0
1893
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
1894
+ # than 0, or greater than 1 due to trignometric calculations for location of
1895
+ # the box.
1896
+ class GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly
1897
+ include Google::Apis::Core::Hashable
1898
+
1899
+ # Normalized vertices of the bounding polygon.
1900
+ # Corresponds to the JSON property `vertices`
1901
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedVertex>]
1902
+ attr_accessor :vertices
1903
+
1904
+ def initialize(**args)
1905
+ update!(**args)
1906
+ end
1907
+
1908
+ # Update properties of this object
1909
+ def update!(**args)
1910
+ @vertices = args[:vertices] if args.key?(:vertices)
1911
+ end
1912
+ end
1913
+
1914
+ # A vertex represents a 2D point in the image.
1915
+ # NOTE: the normalized vertex coordinates are relative to the original image
1916
+ # and range from 0 to 1.
1917
+ class GoogleCloudVideointelligenceV1p1beta1NormalizedVertex
1918
+ include Google::Apis::Core::Hashable
1919
+
1920
+ # X coordinate.
1921
+ # Corresponds to the JSON property `x`
1922
+ # @return [Float]
1923
+ attr_accessor :x
1924
+
1925
+ # Y coordinate.
1926
+ # Corresponds to the JSON property `y`
1927
+ # @return [Float]
1928
+ attr_accessor :y
1929
+
1930
+ def initialize(**args)
1931
+ update!(**args)
1932
+ end
1933
+
1934
+ # Update properties of this object
1935
+ def update!(**args)
1936
+ @x = args[:x] if args.key?(:x)
1937
+ @y = args[:y] if args.key?(:y)
1938
+ end
1939
+ end
1940
+
1941
+ # Annotations corresponding to one tracked object.
1942
+ class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation
1943
+ include Google::Apis::Core::Hashable
1944
+
1945
+ # Object category's labeling confidence of this track.
1946
+ # Corresponds to the JSON property `confidence`
1947
+ # @return [Float]
1948
+ attr_accessor :confidence
1949
+
1950
+ # Detected entity from video analysis.
1951
+ # Corresponds to the JSON property `entity`
1952
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1Entity]
1953
+ attr_accessor :entity
1954
+
1955
+ # Information corresponding to all frames where this object track appears.
1956
+ # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame
1957
+ # messages in frames.
1958
+ # Streaming mode: it can only be one ObjectTrackingFrame message in frames.
1959
+ # Corresponds to the JSON property `frames`
1960
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame>]
1961
+ attr_accessor :frames
1962
+
1963
+ # Video segment.
1964
+ # Corresponds to the JSON property `segment`
1965
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment]
1966
+ attr_accessor :segment
1967
+
1968
+ # Streaming mode ONLY.
1969
+ # In streaming mode, we do not know the end time of a tracked object
1970
+ # before it is completed. Hence, there is no VideoSegment info returned.
1971
+ # Instead, we provide a unique identifiable integer track_id so that
1972
+ # the customers can correlate the results of the ongoing
1973
+ # ObjectTrackAnnotation of the same track_id over time.
1974
+ # Corresponds to the JSON property `trackId`
1975
+ # @return [Fixnum]
1976
+ attr_accessor :track_id
1977
+
1978
+ def initialize(**args)
1979
+ update!(**args)
1980
+ end
1981
+
1982
+ # Update properties of this object
1983
+ def update!(**args)
1984
+ @confidence = args[:confidence] if args.key?(:confidence)
1985
+ @entity = args[:entity] if args.key?(:entity)
1986
+ @frames = args[:frames] if args.key?(:frames)
1987
+ @segment = args[:segment] if args.key?(:segment)
1988
+ @track_id = args[:track_id] if args.key?(:track_id)
1989
+ end
1990
+ end
1991
+
1992
+ # Video frame level annotations for object detection and tracking. This field
1993
+ # stores per frame location, time offset, and confidence.
1994
+ class GoogleCloudVideointelligenceV1p1beta1ObjectTrackingFrame
1995
+ include Google::Apis::Core::Hashable
1996
+
1997
+ # Normalized bounding box.
1998
+ # The normalized vertex coordinates are relative to the original image.
1999
+ # Range: [0, 1].
2000
+ # Corresponds to the JSON property `normalizedBoundingBox`
2001
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingBox]
2002
+ attr_accessor :normalized_bounding_box
2003
+
2004
+ # The timestamp of the frame in microseconds.
2005
+ # Corresponds to the JSON property `timeOffset`
2006
+ # @return [String]
2007
+ attr_accessor :time_offset
2008
+
2009
+ def initialize(**args)
2010
+ update!(**args)
2011
+ end
2012
+
2013
+ # Update properties of this object
2014
+ def update!(**args)
2015
+ @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box)
2016
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
2017
+ end
2018
+ end
2019
+
2020
+ # Alternative hypotheses (a.k.a. n-best list).
2021
+ class GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative
2022
+ include Google::Apis::Core::Hashable
2023
+
2024
+ # The confidence estimate between 0.0 and 1.0. A higher number
2025
+ # indicates an estimated greater likelihood that the recognized words are
2026
+ # correct. This field is typically provided only for the top hypothesis, and
2027
+ # only for `is_final=true` results. Clients should not rely on the
2028
+ # `confidence` field as it is not guaranteed to be accurate or consistent.
2029
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
2030
+ # Corresponds to the JSON property `confidence`
2031
+ # @return [Float]
2032
+ attr_accessor :confidence
2033
+
2034
+ # Transcript text representing the words that the user spoke.
2035
+ # Corresponds to the JSON property `transcript`
2036
+ # @return [String]
2037
+ attr_accessor :transcript
2038
+
2039
+ # A list of word-specific information for each recognized word.
2040
+ # Corresponds to the JSON property `words`
2041
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1WordInfo>]
2042
+ attr_accessor :words
2043
+
2044
+ def initialize(**args)
2045
+ update!(**args)
2046
+ end
2047
+
2048
+ # Update properties of this object
2049
+ def update!(**args)
2050
+ @confidence = args[:confidence] if args.key?(:confidence)
2051
+ @transcript = args[:transcript] if args.key?(:transcript)
2052
+ @words = args[:words] if args.key?(:words)
2053
+ end
2054
+ end
2055
+
2056
+ # A speech recognition result corresponding to a portion of the audio.
2057
+ class GoogleCloudVideointelligenceV1p1beta1SpeechTranscription
2058
+ include Google::Apis::Core::Hashable
2059
+
2060
+ # May contain one or more recognition hypotheses (up to the maximum specified
2061
+ # in `max_alternatives`). These alternatives are ordered in terms of
2062
+ # accuracy, with the top (first) alternative being the most probable, as
2063
+ # ranked by the recognizer.
2064
+ # Corresponds to the JSON property `alternatives`
2065
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1SpeechRecognitionAlternative>]
2066
+ attr_accessor :alternatives
2067
+
2068
+ # Output only. The
2069
+ # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the
2070
+ # language in this result. This language code was detected to have the most
2071
+ # likelihood of being spoken in the audio.
2072
+ # Corresponds to the JSON property `languageCode`
2073
+ # @return [String]
2074
+ attr_accessor :language_code
2075
+
2076
+ def initialize(**args)
2077
+ update!(**args)
2078
+ end
2079
+
2080
+ # Update properties of this object
2081
+ def update!(**args)
2082
+ @alternatives = args[:alternatives] if args.key?(:alternatives)
2083
+ @language_code = args[:language_code] if args.key?(:language_code)
2084
+ end
2085
+ end
2086
+
2087
+ # Annotations related to one detected OCR text snippet. This will contain the
2088
+ # corresponding text, confidence value, and frame level information for each
2089
+ # detection.
2090
+ class GoogleCloudVideointelligenceV1p1beta1TextAnnotation
2091
+ include Google::Apis::Core::Hashable
2092
+
2093
+ # All video segments where OCR detected text appears.
2094
+ # Corresponds to the JSON property `segments`
2095
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextSegment>]
2096
+ attr_accessor :segments
2097
+
2098
+ # The detected text.
2099
+ # Corresponds to the JSON property `text`
2100
+ # @return [String]
2101
+ attr_accessor :text
2102
+
2103
+ def initialize(**args)
2104
+ update!(**args)
2105
+ end
2106
+
2107
+ # Update properties of this object
2108
+ def update!(**args)
2109
+ @segments = args[:segments] if args.key?(:segments)
2110
+ @text = args[:text] if args.key?(:text)
2111
+ end
2112
+ end
2113
+
2114
+ # Video frame level annotation results for text annotation (OCR).
2115
+ # Contains information regarding timestamp and bounding box locations for the
2116
+ # frames containing detected OCR text snippets.
2117
+ class GoogleCloudVideointelligenceV1p1beta1TextFrame
2118
+ include Google::Apis::Core::Hashable
2119
+
2120
+ # Normalized bounding polygon for text (that might not be aligned with axis).
2121
+ # Contains list of the corner points in clockwise order starting from
2122
+ # top-left corner. For example, for a rectangular bounding box:
2123
+ # When the text is horizontal it might look like:
2124
+ # 0----1
2125
+ # | |
2126
+ # 3----2
2127
+ # When it's clockwise rotated 180 degrees around the top-left corner it
2128
+ # becomes:
2129
+ # 2----3
2130
+ # | |
2131
+ # 1----0
2132
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
2133
+ # than 0, or greater than 1 due to trignometric calculations for location of
2134
+ # the box.
2135
+ # Corresponds to the JSON property `rotatedBoundingBox`
2136
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1NormalizedBoundingPoly]
2137
+ attr_accessor :rotated_bounding_box
2138
+
2139
+ # Timestamp of this frame.
2140
+ # Corresponds to the JSON property `timeOffset`
2141
+ # @return [String]
2142
+ attr_accessor :time_offset
2143
+
2144
+ def initialize(**args)
2145
+ update!(**args)
2146
+ end
2147
+
2148
+ # Update properties of this object
2149
+ def update!(**args)
2150
+ @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box)
2151
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
2152
+ end
2153
+ end
2154
+
2155
+ # Video segment level annotation results for text detection.
2156
+ class GoogleCloudVideointelligenceV1p1beta1TextSegment
2157
+ include Google::Apis::Core::Hashable
2158
+
2159
+ # Confidence for the track of detected text. It is calculated as the highest
2160
+ # over all frames where OCR detected text appears.
2161
+ # Corresponds to the JSON property `confidence`
2162
+ # @return [Float]
2163
+ attr_accessor :confidence
2164
+
2165
+ # Information related to the frames where OCR detected text appears.
2166
+ # Corresponds to the JSON property `frames`
2167
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextFrame>]
2168
+ attr_accessor :frames
2169
+
2170
+ # Video segment.
2171
+ # Corresponds to the JSON property `segment`
2172
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment]
2173
+ attr_accessor :segment
2174
+
2175
+ def initialize(**args)
2176
+ update!(**args)
2177
+ end
2178
+
2179
+ # Update properties of this object
2180
+ def update!(**args)
2181
+ @confidence = args[:confidence] if args.key?(:confidence)
2182
+ @frames = args[:frames] if args.key?(:frames)
2183
+ @segment = args[:segment] if args.key?(:segment)
2184
+ end
2185
+ end
2186
+
2187
+ # Annotation progress for a single video.
2188
+ class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationProgress
2189
+ include Google::Apis::Core::Hashable
2190
+
2191
+ # Video file location in
2192
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
2193
+ # Corresponds to the JSON property `inputUri`
2194
+ # @return [String]
2195
+ attr_accessor :input_uri
2196
+
2197
+ # Approximate percentage processed thus far. Guaranteed to be
2198
+ # 100 when fully processed.
2199
+ # Corresponds to the JSON property `progressPercent`
2200
+ # @return [Fixnum]
2201
+ attr_accessor :progress_percent
2202
+
2203
+ # Time when the request was received.
2204
+ # Corresponds to the JSON property `startTime`
2205
+ # @return [String]
2206
+ attr_accessor :start_time
2207
+
2208
+ # Time of the most recent update.
2209
+ # Corresponds to the JSON property `updateTime`
2210
+ # @return [String]
2211
+ attr_accessor :update_time
2212
+
2213
+ def initialize(**args)
2214
+ update!(**args)
2215
+ end
2216
+
2217
+ # Update properties of this object
2218
+ def update!(**args)
2219
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
2220
+ @progress_percent = args[:progress_percent] if args.key?(:progress_percent)
2221
+ @start_time = args[:start_time] if args.key?(:start_time)
2222
+ @update_time = args[:update_time] if args.key?(:update_time)
2223
+ end
2224
+ end
2225
+
2226
+ # Annotation results for a single video.
2227
+ class GoogleCloudVideointelligenceV1p1beta1VideoAnnotationResults
2228
+ include Google::Apis::Core::Hashable
2229
+
2230
+ # The `Status` type defines a logical error model that is suitable for
2231
+ # different programming environments, including REST APIs and RPC APIs. It is
2232
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
2233
+ # - Simple to use and understand for most users
2234
+ # - Flexible enough to meet unexpected needs
2235
+ # # Overview
2236
+ # The `Status` message contains three pieces of data: error code, error
2237
+ # message, and error details. The error code should be an enum value of
2238
+ # google.rpc.Code, but it may accept additional error codes if needed. The
2239
+ # error message should be a developer-facing English message that helps
2240
+ # developers *understand* and *resolve* the error. If a localized user-facing
2241
+ # error message is needed, put the localized message in the error details or
2242
+ # localize it in the client. The optional error details may contain arbitrary
2243
+ # information about the error. There is a predefined set of error detail types
2244
+ # in the package `google.rpc` that can be used for common error conditions.
2245
+ # # Language mapping
2246
+ # The `Status` message is the logical representation of the error model, but it
2247
+ # is not necessarily the actual wire format. When the `Status` message is
2248
+ # exposed in different client libraries and different wire protocols, it can be
2249
+ # mapped differently. For example, it will likely be mapped to some exceptions
2250
+ # in Java, but more likely mapped to some error codes in C.
2251
+ # # Other uses
2252
+ # The error model and the `Status` message can be used in a variety of
2253
+ # environments, either with or without APIs, to provide a
2254
+ # consistent developer experience across different environments.
2255
+ # Example uses of this error model include:
2256
+ # - Partial errors. If a service needs to return partial errors to the client,
2257
+ # it may embed the `Status` in the normal response to indicate the partial
2258
+ # errors.
2259
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
2260
+ # have a `Status` message for error reporting.
2261
+ # - Batch operations. If a client uses batch request and batch response, the
2262
+ # `Status` message should be used directly inside batch response, one for
2263
+ # each error sub-response.
2264
+ # - Asynchronous operations. If an API call embeds asynchronous operation
2265
+ # results in its response, the status of those operations should be
2266
+ # represented directly using the `Status` message.
2267
+ # - Logging. If some API errors are stored in logs, the message `Status` could
2268
+ # be used directly after any stripping needed for security/privacy reasons.
2269
+ # Corresponds to the JSON property `error`
2270
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
2271
+ attr_accessor :error
2272
+
2273
+ # Explicit content annotation (based on per-frame visual signals only).
2274
+ # If no explicit content has been detected in a frame, no annotations are
2275
+ # present for that frame.
2276
+ # Corresponds to the JSON property `explicitAnnotation`
2277
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ExplicitContentAnnotation]
2278
+ attr_accessor :explicit_annotation
2279
+
2280
+ # Label annotations on frame level.
2281
+ # There is exactly one element for each unique label.
2282
+ # Corresponds to the JSON property `frameLabelAnnotations`
2283
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation>]
2284
+ attr_accessor :frame_label_annotations
2285
+
2286
+ # Video file location in
2287
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
2288
+ # Corresponds to the JSON property `inputUri`
2289
+ # @return [String]
2290
+ attr_accessor :input_uri
2291
+
2292
+ # Annotations for list of objects detected and tracked in video.
2293
+ # Corresponds to the JSON property `objectAnnotations`
2294
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1ObjectTrackingAnnotation>]
2295
+ attr_accessor :object_annotations
2296
+
2297
+ # Label annotations on video level or user specified segment level.
2298
+ # There is exactly one element for each unique label.
2299
+ # Corresponds to the JSON property `segmentLabelAnnotations`
2300
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation>]
2301
+ attr_accessor :segment_label_annotations
2302
+
2303
+ # Shot annotations. Each shot is represented as a video segment.
2304
+ # Corresponds to the JSON property `shotAnnotations`
2305
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1VideoSegment>]
2306
+ attr_accessor :shot_annotations
2307
+
2308
+ # Label annotations on shot level.
2309
+ # There is exactly one element for each unique label.
2310
+ # Corresponds to the JSON property `shotLabelAnnotations`
2311
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1LabelAnnotation>]
2312
+ attr_accessor :shot_label_annotations
2313
+
2314
+ # Speech transcription.
2315
+ # Corresponds to the JSON property `speechTranscriptions`
2316
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1SpeechTranscription>]
2317
+ attr_accessor :speech_transcriptions
2318
+
2319
+ # OCR text detection and tracking.
2320
+ # Annotations for list of detected text snippets. Each will have list of
2321
+ # frame information associated with it.
2322
+ # Corresponds to the JSON property `textAnnotations`
2323
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p1beta1TextAnnotation>]
2324
+ attr_accessor :text_annotations
2325
+
2326
+ def initialize(**args)
2327
+ update!(**args)
2328
+ end
2329
+
2330
+ # Update properties of this object
2331
+ def update!(**args)
2332
+ @error = args[:error] if args.key?(:error)
2333
+ @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation)
2334
+ @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations)
2335
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
2336
+ @object_annotations = args[:object_annotations] if args.key?(:object_annotations)
2337
+ @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations)
2338
+ @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations)
2339
+ @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations)
2340
+ @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions)
2341
+ @text_annotations = args[:text_annotations] if args.key?(:text_annotations)
2342
+ end
2343
+ end
2344
+
2345
+ # Video segment.
2346
+ class GoogleCloudVideointelligenceV1p1beta1VideoSegment
2347
+ include Google::Apis::Core::Hashable
2348
+
2349
+ # Time-offset, relative to the beginning of the video,
2350
+ # corresponding to the end of the segment (inclusive).
2351
+ # Corresponds to the JSON property `endTimeOffset`
2352
+ # @return [String]
2353
+ attr_accessor :end_time_offset
2354
+
2355
+ # Time-offset, relative to the beginning of the video,
2356
+ # corresponding to the start of the segment (inclusive).
2357
+ # Corresponds to the JSON property `startTimeOffset`
2358
+ # @return [String]
2359
+ attr_accessor :start_time_offset
2360
+
2361
+ def initialize(**args)
2362
+ update!(**args)
2363
+ end
2364
+
2365
+ # Update properties of this object
2366
+ def update!(**args)
2367
+ @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset)
2368
+ @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset)
2369
+ end
2370
+ end
2371
+
2372
+ # Word-specific information for recognized words. Word information is only
2373
+ # included in the response when certain request parameters are set, such
2374
+ # as `enable_word_time_offsets`.
2375
+ class GoogleCloudVideointelligenceV1p1beta1WordInfo
2376
+ include Google::Apis::Core::Hashable
2377
+
2378
+ # Output only. The confidence estimate between 0.0 and 1.0. A higher number
2379
+ # indicates an estimated greater likelihood that the recognized words are
2380
+ # correct. This field is set only for the top alternative.
2381
+ # This field is not guaranteed to be accurate and users should not rely on it
2382
+ # to be always provided.
2383
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
2384
+ # Corresponds to the JSON property `confidence`
2385
+ # @return [Float]
2386
+ attr_accessor :confidence
2387
+
2388
+ # Time offset relative to the beginning of the audio, and
2389
+ # corresponding to the end of the spoken word. This field is only set if
2390
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
2391
+ # experimental feature and the accuracy of the time offset can vary.
2392
+ # Corresponds to the JSON property `endTime`
2393
+ # @return [String]
2394
+ attr_accessor :end_time
2395
+
2396
+ # Output only. A distinct integer value is assigned for every speaker within
2397
+ # the audio. This field specifies which one of those speakers was detected to
2398
+ # have spoken this word. Value ranges from 1 up to diarization_speaker_count,
2399
+ # and is only set if speaker diarization is enabled.
2400
+ # Corresponds to the JSON property `speakerTag`
2401
+ # @return [Fixnum]
2402
+ attr_accessor :speaker_tag
2403
+
2404
+ # Time offset relative to the beginning of the audio, and
2405
+ # corresponding to the start of the spoken word. This field is only set if
2406
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
2407
+ # experimental feature and the accuracy of the time offset can vary.
2408
+ # Corresponds to the JSON property `startTime`
2409
+ # @return [String]
2410
+ attr_accessor :start_time
2411
+
2412
+ # The word corresponding to this set of information.
2413
+ # Corresponds to the JSON property `word`
2414
+ # @return [String]
2415
+ attr_accessor :word
2416
+
2417
+ def initialize(**args)
2418
+ update!(**args)
2419
+ end
2420
+
2421
+ # Update properties of this object
2422
+ def update!(**args)
2423
+ @confidence = args[:confidence] if args.key?(:confidence)
2424
+ @end_time = args[:end_time] if args.key?(:end_time)
2425
+ @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag)
2426
+ @start_time = args[:start_time] if args.key?(:start_time)
2427
+ @word = args[:word] if args.key?(:word)
2428
+ end
2429
+ end
2430
+
2431
+ # Video annotation progress. Included in the `metadata`
2432
+ # field of the `Operation` returned by the `GetOperation`
2433
+ # call of the `google::longrunning::Operations` service.
2434
+ class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoProgress
2435
+ include Google::Apis::Core::Hashable
2436
+
2437
+ # Progress metadata for all videos specified in `AnnotateVideoRequest`.
2438
+ # Corresponds to the JSON property `annotationProgress`
2439
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress>]
2440
+ attr_accessor :annotation_progress
2441
+
2442
+ def initialize(**args)
2443
+ update!(**args)
2444
+ end
2445
+
2446
+ # Update properties of this object
2447
+ def update!(**args)
2448
+ @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress)
2449
+ end
2450
+ end
2451
+
2452
+ # Video annotation response. Included in the `response`
2453
+ # field of the `Operation` returned by the `GetOperation`
2454
+ # call of the `google::longrunning::Operations` service.
2455
+ class GoogleCloudVideointelligenceV1p2beta1AnnotateVideoResponse
2456
+ include Google::Apis::Core::Hashable
2457
+
2458
+ # Annotation results for all videos specified in `AnnotateVideoRequest`.
2459
+ # Corresponds to the JSON property `annotationResults`
2460
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults>]
2461
+ attr_accessor :annotation_results
2462
+
2463
+ def initialize(**args)
2464
+ update!(**args)
2465
+ end
2466
+
2467
+ # Update properties of this object
2468
+ def update!(**args)
2469
+ @annotation_results = args[:annotation_results] if args.key?(:annotation_results)
2470
+ end
2471
+ end
2472
+
2473
+ # Detected entity from video analysis.
2474
+ class GoogleCloudVideointelligenceV1p2beta1Entity
2475
+ include Google::Apis::Core::Hashable
2476
+
2477
+ # Textual description, e.g. `Fixed-gear bicycle`.
2478
+ # Corresponds to the JSON property `description`
2479
+ # @return [String]
2480
+ attr_accessor :description
2481
+
2482
+ # Opaque entity ID. Some IDs may be available in
2483
+ # [Google Knowledge Graph Search
2484
+ # API](https://developers.google.com/knowledge-graph/).
2485
+ # Corresponds to the JSON property `entityId`
2486
+ # @return [String]
2487
+ attr_accessor :entity_id
2488
+
2489
+ # Language code for `description` in BCP-47 format.
2490
+ # Corresponds to the JSON property `languageCode`
2491
+ # @return [String]
2492
+ attr_accessor :language_code
2493
+
2494
+ def initialize(**args)
2495
+ update!(**args)
2496
+ end
2497
+
2498
+ # Update properties of this object
2499
+ def update!(**args)
2500
+ @description = args[:description] if args.key?(:description)
2501
+ @entity_id = args[:entity_id] if args.key?(:entity_id)
2502
+ @language_code = args[:language_code] if args.key?(:language_code)
2503
+ end
2504
+ end
2505
+
2506
+ # Explicit content annotation (based on per-frame visual signals only).
2507
+ # If no explicit content has been detected in a frame, no annotations are
2508
+ # present for that frame.
2509
+ class GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation
2510
+ include Google::Apis::Core::Hashable
2511
+
2512
+ # All video frames where explicit content was detected.
2513
+ # Corresponds to the JSON property `frames`
2514
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame>]
2515
+ attr_accessor :frames
2516
+
2517
+ def initialize(**args)
2518
+ update!(**args)
2519
+ end
2520
+
2521
+ # Update properties of this object
2522
+ def update!(**args)
2523
+ @frames = args[:frames] if args.key?(:frames)
2524
+ end
2525
+ end
2526
+
2527
+ # Video frame level annotation results for explicit content.
2528
+ class GoogleCloudVideointelligenceV1p2beta1ExplicitContentFrame
2529
+ include Google::Apis::Core::Hashable
2530
+
2531
+ # Likelihood of the pornography content..
2532
+ # Corresponds to the JSON property `pornographyLikelihood`
2533
+ # @return [String]
2534
+ attr_accessor :pornography_likelihood
2535
+
2536
+ # Time-offset, relative to the beginning of the video, corresponding to the
2537
+ # video frame for this location.
2538
+ # Corresponds to the JSON property `timeOffset`
2539
+ # @return [String]
2540
+ attr_accessor :time_offset
2541
+
2542
+ def initialize(**args)
2543
+ update!(**args)
2544
+ end
2545
+
2546
+ # Update properties of this object
2547
+ def update!(**args)
2548
+ @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood)
2549
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
2550
+ end
2551
+ end
2552
+
2553
+ # Label annotation.
2554
+ class GoogleCloudVideointelligenceV1p2beta1LabelAnnotation
2555
+ include Google::Apis::Core::Hashable
2556
+
2557
+ # Common categories for the detected entity.
2558
+ # E.g. when the label is `Terrier` the category is likely `dog`. And in some
2559
+ # cases there might be more than one categories e.g. `Terrier` could also be
2560
+ # a `pet`.
2561
+ # Corresponds to the JSON property `categoryEntities`
2562
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity>]
2563
+ attr_accessor :category_entities
2564
+
2565
+ # Detected entity from video analysis.
2566
+ # Corresponds to the JSON property `entity`
2567
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity]
2568
+ attr_accessor :entity
2569
+
2570
+ # All video frames where a label was detected.
2571
+ # Corresponds to the JSON property `frames`
2572
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelFrame>]
2573
+ attr_accessor :frames
2574
+
2575
+ # All video segments where a label was detected.
2576
+ # Corresponds to the JSON property `segments`
2577
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelSegment>]
2578
+ attr_accessor :segments
2579
+
2580
+ def initialize(**args)
2581
+ update!(**args)
2582
+ end
2583
+
2584
+ # Update properties of this object
2585
+ def update!(**args)
2586
+ @category_entities = args[:category_entities] if args.key?(:category_entities)
2587
+ @entity = args[:entity] if args.key?(:entity)
2588
+ @frames = args[:frames] if args.key?(:frames)
2589
+ @segments = args[:segments] if args.key?(:segments)
2590
+ end
2591
+ end
2592
+
2593
+ # Video frame level annotation results for label detection.
2594
+ class GoogleCloudVideointelligenceV1p2beta1LabelFrame
2595
+ include Google::Apis::Core::Hashable
2596
+
2597
+ # Confidence that the label is accurate. Range: [0, 1].
2598
+ # Corresponds to the JSON property `confidence`
2599
+ # @return [Float]
2600
+ attr_accessor :confidence
2601
+
2602
+ # Time-offset, relative to the beginning of the video, corresponding to the
2603
+ # video frame for this location.
2604
+ # Corresponds to the JSON property `timeOffset`
2605
+ # @return [String]
2606
+ attr_accessor :time_offset
2607
+
2608
+ def initialize(**args)
2609
+ update!(**args)
2610
+ end
2611
+
2612
+ # Update properties of this object
2613
+ def update!(**args)
2614
+ @confidence = args[:confidence] if args.key?(:confidence)
2615
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
2616
+ end
2617
+ end
2618
+
2619
+ # Video segment level annotation results for label detection.
2620
+ class GoogleCloudVideointelligenceV1p2beta1LabelSegment
2621
+ include Google::Apis::Core::Hashable
2622
+
2623
+ # Confidence that the label is accurate. Range: [0, 1].
2624
+ # Corresponds to the JSON property `confidence`
2625
+ # @return [Float]
2626
+ attr_accessor :confidence
2627
+
2628
+ # Video segment.
2629
+ # Corresponds to the JSON property `segment`
2630
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment]
2631
+ attr_accessor :segment
2632
+
2633
+ def initialize(**args)
2634
+ update!(**args)
2635
+ end
2636
+
2637
+ # Update properties of this object
2638
+ def update!(**args)
2639
+ @confidence = args[:confidence] if args.key?(:confidence)
2640
+ @segment = args[:segment] if args.key?(:segment)
2641
+ end
2642
+ end
2643
+
2644
+ # Normalized bounding box.
2645
+ # The normalized vertex coordinates are relative to the original image.
2646
+ # Range: [0, 1].
2647
+ class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox
2648
+ include Google::Apis::Core::Hashable
2649
+
2650
+ # Bottom Y coordinate.
2651
+ # Corresponds to the JSON property `bottom`
2652
+ # @return [Float]
2653
+ attr_accessor :bottom
2654
+
2655
+ # Left X coordinate.
2656
+ # Corresponds to the JSON property `left`
2657
+ # @return [Float]
2658
+ attr_accessor :left
2659
+
2660
+ # Right X coordinate.
2661
+ # Corresponds to the JSON property `right`
2662
+ # @return [Float]
2663
+ attr_accessor :right
2664
+
2665
+ # Top Y coordinate.
2666
+ # Corresponds to the JSON property `top`
2667
+ # @return [Float]
2668
+ attr_accessor :top
2669
+
2670
+ def initialize(**args)
2671
+ update!(**args)
2672
+ end
2673
+
2674
+ # Update properties of this object
2675
+ def update!(**args)
2676
+ @bottom = args[:bottom] if args.key?(:bottom)
2677
+ @left = args[:left] if args.key?(:left)
2678
+ @right = args[:right] if args.key?(:right)
2679
+ @top = args[:top] if args.key?(:top)
2680
+ end
2681
+ end
2682
+
2683
+ # Normalized bounding polygon for text (that might not be aligned with axis).
2684
+ # Contains list of the corner points in clockwise order starting from
2685
+ # top-left corner. For example, for a rectangular bounding box:
2686
+ # When the text is horizontal it might look like:
2687
+ # 0----1
2688
+ # | |
2689
+ # 3----2
2690
+ # When it's clockwise rotated 180 degrees around the top-left corner it
2691
+ # becomes:
2692
+ # 2----3
2693
+ # | |
2694
+ # 1----0
2695
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
2696
+ # than 0, or greater than 1 due to trignometric calculations for location of
2697
+ # the box.
2698
+ class GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly
2699
+ include Google::Apis::Core::Hashable
2700
+
2701
+ # Normalized vertices of the bounding polygon.
2702
+ # Corresponds to the JSON property `vertices`
2703
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedVertex>]
2704
+ attr_accessor :vertices
2705
+
2706
+ def initialize(**args)
2707
+ update!(**args)
2708
+ end
2709
+
2710
+ # Update properties of this object
2711
+ def update!(**args)
2712
+ @vertices = args[:vertices] if args.key?(:vertices)
2713
+ end
2714
+ end
2715
+
2716
+ # A vertex represents a 2D point in the image.
2717
+ # NOTE: the normalized vertex coordinates are relative to the original image
2718
+ # and range from 0 to 1.
2719
+ class GoogleCloudVideointelligenceV1p2beta1NormalizedVertex
2720
+ include Google::Apis::Core::Hashable
2721
+
2722
+ # X coordinate.
2723
+ # Corresponds to the JSON property `x`
2724
+ # @return [Float]
2725
+ attr_accessor :x
2726
+
2727
+ # Y coordinate.
2728
+ # Corresponds to the JSON property `y`
2729
+ # @return [Float]
2730
+ attr_accessor :y
2731
+
2732
+ def initialize(**args)
2733
+ update!(**args)
2734
+ end
2735
+
2736
+ # Update properties of this object
2737
+ def update!(**args)
2738
+ @x = args[:x] if args.key?(:x)
2739
+ @y = args[:y] if args.key?(:y)
2740
+ end
2741
+ end
2742
+
2743
+ # Annotations corresponding to one tracked object.
2744
+ class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation
2745
+ include Google::Apis::Core::Hashable
2746
+
2747
+ # Object category's labeling confidence of this track.
2748
+ # Corresponds to the JSON property `confidence`
2749
+ # @return [Float]
2750
+ attr_accessor :confidence
2751
+
2752
+ # Detected entity from video analysis.
2753
+ # Corresponds to the JSON property `entity`
2754
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1Entity]
2755
+ attr_accessor :entity
2756
+
2757
+ # Information corresponding to all frames where this object track appears.
2758
+ # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame
2759
+ # messages in frames.
2760
+ # Streaming mode: it can only be one ObjectTrackingFrame message in frames.
2761
+ # Corresponds to the JSON property `frames`
2762
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame>]
2763
+ attr_accessor :frames
2764
+
2765
+ # Video segment.
2766
+ # Corresponds to the JSON property `segment`
2767
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment]
2768
+ attr_accessor :segment
2769
+
2770
+ # Streaming mode ONLY.
2771
+ # In streaming mode, we do not know the end time of a tracked object
2772
+ # before it is completed. Hence, there is no VideoSegment info returned.
2773
+ # Instead, we provide a unique identifiable integer track_id so that
2774
+ # the customers can correlate the results of the ongoing
2775
+ # ObjectTrackAnnotation of the same track_id over time.
2776
+ # Corresponds to the JSON property `trackId`
2777
+ # @return [Fixnum]
2778
+ attr_accessor :track_id
2779
+
2780
+ def initialize(**args)
2781
+ update!(**args)
2782
+ end
2783
+
2784
+ # Update properties of this object
2785
+ def update!(**args)
2786
+ @confidence = args[:confidence] if args.key?(:confidence)
2787
+ @entity = args[:entity] if args.key?(:entity)
2788
+ @frames = args[:frames] if args.key?(:frames)
2789
+ @segment = args[:segment] if args.key?(:segment)
2790
+ @track_id = args[:track_id] if args.key?(:track_id)
2791
+ end
2792
+ end
2793
+
2794
+ # Video frame level annotations for object detection and tracking. This field
2795
+ # stores per frame location, time offset, and confidence.
2796
+ class GoogleCloudVideointelligenceV1p2beta1ObjectTrackingFrame
2797
+ include Google::Apis::Core::Hashable
2798
+
2799
+ # Normalized bounding box.
2800
+ # The normalized vertex coordinates are relative to the original image.
2801
+ # Range: [0, 1].
2802
+ # Corresponds to the JSON property `normalizedBoundingBox`
2803
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingBox]
2804
+ attr_accessor :normalized_bounding_box
2805
+
2806
+ # The timestamp of the frame in microseconds.
2807
+ # Corresponds to the JSON property `timeOffset`
2808
+ # @return [String]
2809
+ attr_accessor :time_offset
2810
+
2811
+ def initialize(**args)
2812
+ update!(**args)
2813
+ end
2814
+
2815
+ # Update properties of this object
2816
+ def update!(**args)
2817
+ @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box)
2818
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
2819
+ end
2820
+ end
2821
+
2822
+ # Alternative hypotheses (a.k.a. n-best list).
2823
+ class GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative
2824
+ include Google::Apis::Core::Hashable
2825
+
2826
+ # The confidence estimate between 0.0 and 1.0. A higher number
2827
+ # indicates an estimated greater likelihood that the recognized words are
2828
+ # correct. This field is typically provided only for the top hypothesis, and
2829
+ # only for `is_final=true` results. Clients should not rely on the
2830
+ # `confidence` field as it is not guaranteed to be accurate or consistent.
2831
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
2832
+ # Corresponds to the JSON property `confidence`
2833
+ # @return [Float]
2834
+ attr_accessor :confidence
2835
+
2836
+ # Transcript text representing the words that the user spoke.
2837
+ # Corresponds to the JSON property `transcript`
2838
+ # @return [String]
2839
+ attr_accessor :transcript
2840
+
2841
+ # A list of word-specific information for each recognized word.
2842
+ # Corresponds to the JSON property `words`
2843
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1WordInfo>]
2844
+ attr_accessor :words
2845
+
2846
+ def initialize(**args)
2847
+ update!(**args)
2848
+ end
2849
+
2850
+ # Update properties of this object
2851
+ def update!(**args)
2852
+ @confidence = args[:confidence] if args.key?(:confidence)
2853
+ @transcript = args[:transcript] if args.key?(:transcript)
2854
+ @words = args[:words] if args.key?(:words)
2855
+ end
2856
+ end
2857
+
2858
+ # A speech recognition result corresponding to a portion of the audio.
2859
+ class GoogleCloudVideointelligenceV1p2beta1SpeechTranscription
2860
+ include Google::Apis::Core::Hashable
2861
+
2862
+ # May contain one or more recognition hypotheses (up to the maximum specified
2863
+ # in `max_alternatives`). These alternatives are ordered in terms of
2864
+ # accuracy, with the top (first) alternative being the most probable, as
2865
+ # ranked by the recognizer.
2866
+ # Corresponds to the JSON property `alternatives`
2867
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1SpeechRecognitionAlternative>]
2868
+ attr_accessor :alternatives
2869
+
2870
+ # Output only. The
2871
+ # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the
2872
+ # language in this result. This language code was detected to have the most
2873
+ # likelihood of being spoken in the audio.
2874
+ # Corresponds to the JSON property `languageCode`
2875
+ # @return [String]
2876
+ attr_accessor :language_code
2877
+
2878
+ def initialize(**args)
2879
+ update!(**args)
2880
+ end
2881
+
2882
+ # Update properties of this object
2883
+ def update!(**args)
2884
+ @alternatives = args[:alternatives] if args.key?(:alternatives)
2885
+ @language_code = args[:language_code] if args.key?(:language_code)
2886
+ end
2887
+ end
2888
+
2889
+ # Annotations related to one detected OCR text snippet. This will contain the
2890
+ # corresponding text, confidence value, and frame level information for each
2891
+ # detection.
2892
+ class GoogleCloudVideointelligenceV1p2beta1TextAnnotation
2893
+ include Google::Apis::Core::Hashable
2894
+
2895
+ # All video segments where OCR detected text appears.
2896
+ # Corresponds to the JSON property `segments`
2897
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextSegment>]
2898
+ attr_accessor :segments
2899
+
2900
+ # The detected text.
2901
+ # Corresponds to the JSON property `text`
2902
+ # @return [String]
2903
+ attr_accessor :text
2904
+
2905
+ def initialize(**args)
2906
+ update!(**args)
2907
+ end
2908
+
2909
+ # Update properties of this object
2910
+ def update!(**args)
2911
+ @segments = args[:segments] if args.key?(:segments)
2912
+ @text = args[:text] if args.key?(:text)
2913
+ end
2914
+ end
2915
+
2916
+ # Video frame level annotation results for text annotation (OCR).
2917
+ # Contains information regarding timestamp and bounding box locations for the
2918
+ # frames containing detected OCR text snippets.
2919
+ class GoogleCloudVideointelligenceV1p2beta1TextFrame
2920
+ include Google::Apis::Core::Hashable
2921
+
2922
+ # Normalized bounding polygon for text (that might not be aligned with axis).
2923
+ # Contains list of the corner points in clockwise order starting from
2924
+ # top-left corner. For example, for a rectangular bounding box:
2925
+ # When the text is horizontal it might look like:
2926
+ # 0----1
2927
+ # | |
2928
+ # 3----2
2929
+ # When it's clockwise rotated 180 degrees around the top-left corner it
2930
+ # becomes:
2931
+ # 2----3
2932
+ # | |
2933
+ # 1----0
2934
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
2935
+ # than 0, or greater than 1 due to trignometric calculations for location of
2936
+ # the box.
2937
+ # Corresponds to the JSON property `rotatedBoundingBox`
2938
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1NormalizedBoundingPoly]
2939
+ attr_accessor :rotated_bounding_box
2940
+
2941
+ # Timestamp of this frame.
2942
+ # Corresponds to the JSON property `timeOffset`
2943
+ # @return [String]
2944
+ attr_accessor :time_offset
2945
+
2946
+ def initialize(**args)
2947
+ update!(**args)
2948
+ end
2949
+
2950
+ # Update properties of this object
2951
+ def update!(**args)
2952
+ @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box)
2953
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
2954
+ end
2955
+ end
2956
+
2957
+ # Video segment level annotation results for text detection.
2958
+ class GoogleCloudVideointelligenceV1p2beta1TextSegment
2959
+ include Google::Apis::Core::Hashable
2960
+
2961
+ # Confidence for the track of detected text. It is calculated as the highest
2962
+ # over all frames where OCR detected text appears.
2963
+ # Corresponds to the JSON property `confidence`
2964
+ # @return [Float]
2965
+ attr_accessor :confidence
2966
+
2967
+ # Information related to the frames where OCR detected text appears.
2968
+ # Corresponds to the JSON property `frames`
2969
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextFrame>]
2970
+ attr_accessor :frames
2971
+
2972
+ # Video segment.
2973
+ # Corresponds to the JSON property `segment`
2974
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment]
2975
+ attr_accessor :segment
2976
+
2977
+ def initialize(**args)
2978
+ update!(**args)
2979
+ end
2980
+
2981
+ # Update properties of this object
2982
+ def update!(**args)
2983
+ @confidence = args[:confidence] if args.key?(:confidence)
2984
+ @frames = args[:frames] if args.key?(:frames)
2985
+ @segment = args[:segment] if args.key?(:segment)
2986
+ end
2987
+ end
2988
+
2989
+ # Annotation progress for a single video.
2990
+ class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationProgress
2991
+ include Google::Apis::Core::Hashable
2992
+
2993
+ # Video file location in
2994
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
2995
+ # Corresponds to the JSON property `inputUri`
2996
+ # @return [String]
2997
+ attr_accessor :input_uri
2998
+
2999
+ # Approximate percentage processed thus far. Guaranteed to be
3000
+ # 100 when fully processed.
3001
+ # Corresponds to the JSON property `progressPercent`
3002
+ # @return [Fixnum]
3003
+ attr_accessor :progress_percent
3004
+
3005
+ # Time when the request was received.
3006
+ # Corresponds to the JSON property `startTime`
3007
+ # @return [String]
3008
+ attr_accessor :start_time
3009
+
3010
+ # Time of the most recent update.
3011
+ # Corresponds to the JSON property `updateTime`
3012
+ # @return [String]
3013
+ attr_accessor :update_time
3014
+
3015
+ def initialize(**args)
3016
+ update!(**args)
3017
+ end
3018
+
3019
+ # Update properties of this object
3020
+ def update!(**args)
3021
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
3022
+ @progress_percent = args[:progress_percent] if args.key?(:progress_percent)
3023
+ @start_time = args[:start_time] if args.key?(:start_time)
3024
+ @update_time = args[:update_time] if args.key?(:update_time)
3025
+ end
3026
+ end
3027
+
3028
+ # Annotation results for a single video.
3029
+ class GoogleCloudVideointelligenceV1p2beta1VideoAnnotationResults
3030
+ include Google::Apis::Core::Hashable
3031
+
3032
+ # The `Status` type defines a logical error model that is suitable for
3033
+ # different programming environments, including REST APIs and RPC APIs. It is
3034
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
3035
+ # - Simple to use and understand for most users
3036
+ # - Flexible enough to meet unexpected needs
3037
+ # # Overview
3038
+ # The `Status` message contains three pieces of data: error code, error
3039
+ # message, and error details. The error code should be an enum value of
3040
+ # google.rpc.Code, but it may accept additional error codes if needed. The
3041
+ # error message should be a developer-facing English message that helps
3042
+ # developers *understand* and *resolve* the error. If a localized user-facing
3043
+ # error message is needed, put the localized message in the error details or
3044
+ # localize it in the client. The optional error details may contain arbitrary
3045
+ # information about the error. There is a predefined set of error detail types
3046
+ # in the package `google.rpc` that can be used for common error conditions.
3047
+ # # Language mapping
3048
+ # The `Status` message is the logical representation of the error model, but it
3049
+ # is not necessarily the actual wire format. When the `Status` message is
3050
+ # exposed in different client libraries and different wire protocols, it can be
3051
+ # mapped differently. For example, it will likely be mapped to some exceptions
3052
+ # in Java, but more likely mapped to some error codes in C.
3053
+ # # Other uses
3054
+ # The error model and the `Status` message can be used in a variety of
3055
+ # environments, either with or without APIs, to provide a
3056
+ # consistent developer experience across different environments.
3057
+ # Example uses of this error model include:
3058
+ # - Partial errors. If a service needs to return partial errors to the client,
3059
+ # it may embed the `Status` in the normal response to indicate the partial
3060
+ # errors.
3061
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
3062
+ # have a `Status` message for error reporting.
3063
+ # - Batch operations. If a client uses batch request and batch response, the
3064
+ # `Status` message should be used directly inside batch response, one for
3065
+ # each error sub-response.
3066
+ # - Asynchronous operations. If an API call embeds asynchronous operation
3067
+ # results in its response, the status of those operations should be
3068
+ # represented directly using the `Status` message.
3069
+ # - Logging. If some API errors are stored in logs, the message `Status` could
3070
+ # be used directly after any stripping needed for security/privacy reasons.
3071
+ # Corresponds to the JSON property `error`
3072
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
3073
+ attr_accessor :error
3074
+
3075
+ # Explicit content annotation (based on per-frame visual signals only).
3076
+ # If no explicit content has been detected in a frame, no annotations are
3077
+ # present for that frame.
3078
+ # Corresponds to the JSON property `explicitAnnotation`
3079
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ExplicitContentAnnotation]
3080
+ attr_accessor :explicit_annotation
3081
+
3082
+ # Label annotations on frame level.
3083
+ # There is exactly one element for each unique label.
3084
+ # Corresponds to the JSON property `frameLabelAnnotations`
3085
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation>]
3086
+ attr_accessor :frame_label_annotations
3087
+
3088
+ # Video file location in
3089
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
3090
+ # Corresponds to the JSON property `inputUri`
3091
+ # @return [String]
3092
+ attr_accessor :input_uri
3093
+
3094
+ # Annotations for list of objects detected and tracked in video.
3095
+ # Corresponds to the JSON property `objectAnnotations`
3096
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1ObjectTrackingAnnotation>]
3097
+ attr_accessor :object_annotations
3098
+
3099
+ # Label annotations on video level or user specified segment level.
3100
+ # There is exactly one element for each unique label.
3101
+ # Corresponds to the JSON property `segmentLabelAnnotations`
3102
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation>]
3103
+ attr_accessor :segment_label_annotations
3104
+
3105
+ # Shot annotations. Each shot is represented as a video segment.
3106
+ # Corresponds to the JSON property `shotAnnotations`
3107
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1VideoSegment>]
3108
+ attr_accessor :shot_annotations
3109
+
3110
+ # Label annotations on shot level.
3111
+ # There is exactly one element for each unique label.
3112
+ # Corresponds to the JSON property `shotLabelAnnotations`
3113
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1LabelAnnotation>]
3114
+ attr_accessor :shot_label_annotations
3115
+
3116
+ # Speech transcription.
3117
+ # Corresponds to the JSON property `speechTranscriptions`
3118
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1SpeechTranscription>]
3119
+ attr_accessor :speech_transcriptions
3120
+
3121
+ # OCR text detection and tracking.
3122
+ # Annotations for list of detected text snippets. Each will have list of
3123
+ # frame information associated with it.
3124
+ # Corresponds to the JSON property `textAnnotations`
3125
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p2beta1TextAnnotation>]
3126
+ attr_accessor :text_annotations
3127
+
3128
+ def initialize(**args)
3129
+ update!(**args)
3130
+ end
3131
+
3132
+ # Update properties of this object
3133
+ def update!(**args)
3134
+ @error = args[:error] if args.key?(:error)
3135
+ @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation)
3136
+ @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations)
3137
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
3138
+ @object_annotations = args[:object_annotations] if args.key?(:object_annotations)
3139
+ @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations)
3140
+ @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations)
3141
+ @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations)
3142
+ @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions)
3143
+ @text_annotations = args[:text_annotations] if args.key?(:text_annotations)
3144
+ end
3145
+ end
3146
+
3147
+ # Video segment.
3148
+ class GoogleCloudVideointelligenceV1p2beta1VideoSegment
3149
+ include Google::Apis::Core::Hashable
3150
+
3151
+ # Time-offset, relative to the beginning of the video,
3152
+ # corresponding to the end of the segment (inclusive).
3153
+ # Corresponds to the JSON property `endTimeOffset`
3154
+ # @return [String]
3155
+ attr_accessor :end_time_offset
3156
+
3157
+ # Time-offset, relative to the beginning of the video,
3158
+ # corresponding to the start of the segment (inclusive).
3159
+ # Corresponds to the JSON property `startTimeOffset`
3160
+ # @return [String]
3161
+ attr_accessor :start_time_offset
3162
+
3163
+ def initialize(**args)
3164
+ update!(**args)
3165
+ end
3166
+
3167
+ # Update properties of this object
3168
+ def update!(**args)
3169
+ @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset)
3170
+ @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset)
3171
+ end
3172
+ end
3173
+
3174
+ # Word-specific information for recognized words. Word information is only
3175
+ # included in the response when certain request parameters are set, such
3176
+ # as `enable_word_time_offsets`.
3177
+ class GoogleCloudVideointelligenceV1p2beta1WordInfo
3178
+ include Google::Apis::Core::Hashable
3179
+
3180
+ # Output only. The confidence estimate between 0.0 and 1.0. A higher number
3181
+ # indicates an estimated greater likelihood that the recognized words are
3182
+ # correct. This field is set only for the top alternative.
3183
+ # This field is not guaranteed to be accurate and users should not rely on it
3184
+ # to be always provided.
3185
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
3186
+ # Corresponds to the JSON property `confidence`
3187
+ # @return [Float]
3188
+ attr_accessor :confidence
3189
+
3190
+ # Time offset relative to the beginning of the audio, and
3191
+ # corresponding to the end of the spoken word. This field is only set if
3192
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
3193
+ # experimental feature and the accuracy of the time offset can vary.
3194
+ # Corresponds to the JSON property `endTime`
3195
+ # @return [String]
3196
+ attr_accessor :end_time
3197
+
3198
+ # Output only. A distinct integer value is assigned for every speaker within
3199
+ # the audio. This field specifies which one of those speakers was detected to
3200
+ # have spoken this word. Value ranges from 1 up to diarization_speaker_count,
3201
+ # and is only set if speaker diarization is enabled.
3202
+ # Corresponds to the JSON property `speakerTag`
3203
+ # @return [Fixnum]
3204
+ attr_accessor :speaker_tag
3205
+
3206
+ # Time offset relative to the beginning of the audio, and
3207
+ # corresponding to the start of the spoken word. This field is only set if
3208
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
3209
+ # experimental feature and the accuracy of the time offset can vary.
3210
+ # Corresponds to the JSON property `startTime`
3211
+ # @return [String]
3212
+ attr_accessor :start_time
3213
+
3214
+ # The word corresponding to this set of information.
3215
+ # Corresponds to the JSON property `word`
3216
+ # @return [String]
3217
+ attr_accessor :word
3218
+
3219
+ def initialize(**args)
3220
+ update!(**args)
3221
+ end
3222
+
3223
+ # Update properties of this object
3224
+ def update!(**args)
3225
+ @confidence = args[:confidence] if args.key?(:confidence)
3226
+ @end_time = args[:end_time] if args.key?(:end_time)
3227
+ @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag)
3228
+ @start_time = args[:start_time] if args.key?(:start_time)
3229
+ @word = args[:word] if args.key?(:word)
3230
+ end
3231
+ end
3232
+
3233
+ # Video annotation progress. Included in the `metadata`
3234
+ # field of the `Operation` returned by the `GetOperation`
3235
+ # call of the `google::longrunning::Operations` service.
3236
+ class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoProgress
3237
+ include Google::Apis::Core::Hashable
3238
+
3239
+ # Progress metadata for all videos specified in `AnnotateVideoRequest`.
3240
+ # Corresponds to the JSON property `annotationProgress`
3241
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress>]
3242
+ attr_accessor :annotation_progress
3243
+
3244
+ def initialize(**args)
3245
+ update!(**args)
3246
+ end
3247
+
3248
+ # Update properties of this object
3249
+ def update!(**args)
3250
+ @annotation_progress = args[:annotation_progress] if args.key?(:annotation_progress)
3251
+ end
3252
+ end
3253
+
3254
+ # Video annotation request.
3255
+ class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoRequest
3256
+ include Google::Apis::Core::Hashable
3257
+
3258
+ # Requested video annotation features.
3259
+ # Corresponds to the JSON property `features`
3260
+ # @return [Array<String>]
3261
+ attr_accessor :features
3262
+
3263
+ # The video data bytes.
3264
+ # If unset, the input video(s) should be specified via `input_uri`.
3265
+ # If set, `input_uri` should be unset.
3266
+ # Corresponds to the JSON property `inputContent`
3267
+ # NOTE: Values are automatically base64 encoded/decoded in the client library.
3268
+ # @return [String]
3269
+ attr_accessor :input_content
3270
+
3271
+ # Input video location. Currently, only
3272
+ # [Google Cloud Storage](https://cloud.google.com/storage/) URIs are
3273
+ # supported, which must be specified in the following format:
3274
+ # `gs://bucket-id/object-id` (other URI formats return
3275
+ # google.rpc.Code.INVALID_ARGUMENT). For more information, see
3276
+ # [Request URIs](/storage/docs/reference-uris).
3277
+ # A video URI may include wildcards in `object-id`, and thus identify
3278
+ # multiple videos. Supported wildcards: '*' to match 0 or more characters;
3279
+ # '?' to match 1 character. If unset, the input video should be embedded
3280
+ # in the request as `input_content`. If set, `input_content` should be unset.
3281
+ # Corresponds to the JSON property `inputUri`
3282
+ # @return [String]
3283
+ attr_accessor :input_uri
3284
+
3285
+ # Optional cloud region where annotation should take place. Supported cloud
3286
+ # regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region
3287
+ # is specified, a region will be determined based on video file location.
3288
+ # Corresponds to the JSON property `locationId`
3289
+ # @return [String]
3290
+ attr_accessor :location_id
3291
+
3292
+ # Optional location where the output (in JSON format) should be stored.
3293
+ # Currently, only [Google Cloud Storage](https://cloud.google.com/storage/)
3294
+ # URIs are supported, which must be specified in the following format:
3295
+ # `gs://bucket-id/object-id` (other URI formats return
3296
+ # google.rpc.Code.INVALID_ARGUMENT). For more information, see
3297
+ # [Request URIs](/storage/docs/reference-uris).
3298
+ # Corresponds to the JSON property `outputUri`
3299
+ # @return [String]
3300
+ attr_accessor :output_uri
3301
+
3302
+ # Video context and/or feature-specific parameters.
3303
+ # Corresponds to the JSON property `videoContext`
3304
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoContext]
3305
+ attr_accessor :video_context
3306
+
3307
+ def initialize(**args)
3308
+ update!(**args)
3309
+ end
3310
+
3311
+ # Update properties of this object
3312
+ def update!(**args)
3313
+ @features = args[:features] if args.key?(:features)
3314
+ @input_content = args[:input_content] if args.key?(:input_content)
3315
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
3316
+ @location_id = args[:location_id] if args.key?(:location_id)
3317
+ @output_uri = args[:output_uri] if args.key?(:output_uri)
3318
+ @video_context = args[:video_context] if args.key?(:video_context)
3319
+ end
3320
+ end
3321
+
3322
+ # Video annotation response. Included in the `response`
3323
+ # field of the `Operation` returned by the `GetOperation`
3324
+ # call of the `google::longrunning::Operations` service.
3325
+ class GoogleCloudVideointelligenceV1p3beta1AnnotateVideoResponse
3326
+ include Google::Apis::Core::Hashable
3327
+
3328
+ # Annotation results for all videos specified in `AnnotateVideoRequest`.
3329
+ # Corresponds to the JSON property `annotationResults`
3330
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults>]
3331
+ attr_accessor :annotation_results
3332
+
3333
+ def initialize(**args)
3334
+ update!(**args)
3335
+ end
3336
+
3337
+ # Update properties of this object
3338
+ def update!(**args)
3339
+ @annotation_results = args[:annotation_results] if args.key?(:annotation_results)
3340
+ end
3341
+ end
3342
+
3343
+ # Detected entity from video analysis.
3344
+ class GoogleCloudVideointelligenceV1p3beta1Entity
3345
+ include Google::Apis::Core::Hashable
3346
+
3347
+ # Textual description, e.g. `Fixed-gear bicycle`.
3348
+ # Corresponds to the JSON property `description`
3349
+ # @return [String]
3350
+ attr_accessor :description
3351
+
3352
+ # Opaque entity ID. Some IDs may be available in
3353
+ # [Google Knowledge Graph Search
3354
+ # API](https://developers.google.com/knowledge-graph/).
3355
+ # Corresponds to the JSON property `entityId`
3356
+ # @return [String]
3357
+ attr_accessor :entity_id
3358
+
3359
+ # Language code for `description` in BCP-47 format.
3360
+ # Corresponds to the JSON property `languageCode`
3361
+ # @return [String]
3362
+ attr_accessor :language_code
3363
+
3364
+ def initialize(**args)
3365
+ update!(**args)
3366
+ end
3367
+
3368
+ # Update properties of this object
3369
+ def update!(**args)
3370
+ @description = args[:description] if args.key?(:description)
3371
+ @entity_id = args[:entity_id] if args.key?(:entity_id)
3372
+ @language_code = args[:language_code] if args.key?(:language_code)
3373
+ end
3374
+ end
3375
+
3376
+ # Explicit content annotation (based on per-frame visual signals only).
3377
+ # If no explicit content has been detected in a frame, no annotations are
3378
+ # present for that frame.
3379
+ class GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation
3380
+ include Google::Apis::Core::Hashable
3381
+
3382
+ # All video frames where explicit content was detected.
3383
+ # Corresponds to the JSON property `frames`
3384
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame>]
3385
+ attr_accessor :frames
3386
+
3387
+ def initialize(**args)
3388
+ update!(**args)
3389
+ end
3390
+
3391
+ # Update properties of this object
3392
+ def update!(**args)
3393
+ @frames = args[:frames] if args.key?(:frames)
3394
+ end
3395
+ end
3396
+
3397
+ # Config for EXPLICIT_CONTENT_DETECTION.
3398
+ class GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig
3399
+ include Google::Apis::Core::Hashable
3400
+
3401
+ # Model to use for explicit content detection.
3402
+ # Supported values: "builtin/stable" (the default if unset) and
3403
+ # "builtin/latest".
3404
+ # Corresponds to the JSON property `model`
3405
+ # @return [String]
3406
+ attr_accessor :model
3407
+
3408
+ def initialize(**args)
3409
+ update!(**args)
3410
+ end
3411
+
3412
+ # Update properties of this object
3413
+ def update!(**args)
3414
+ @model = args[:model] if args.key?(:model)
3415
+ end
3416
+ end
3417
+
3418
+ # Video frame level annotation results for explicit content.
3419
+ class GoogleCloudVideointelligenceV1p3beta1ExplicitContentFrame
3420
+ include Google::Apis::Core::Hashable
3421
+
3422
+ # Likelihood of the pornography content..
3423
+ # Corresponds to the JSON property `pornographyLikelihood`
3424
+ # @return [String]
3425
+ attr_accessor :pornography_likelihood
3426
+
3427
+ # Time-offset, relative to the beginning of the video, corresponding to the
3428
+ # video frame for this location.
3429
+ # Corresponds to the JSON property `timeOffset`
3430
+ # @return [String]
3431
+ attr_accessor :time_offset
3432
+
3433
+ def initialize(**args)
3434
+ update!(**args)
3435
+ end
3436
+
3437
+ # Update properties of this object
3438
+ def update!(**args)
3439
+ @pornography_likelihood = args[:pornography_likelihood] if args.key?(:pornography_likelihood)
3440
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
3441
+ end
3442
+ end
3443
+
3444
+ # Label annotation.
3445
+ class GoogleCloudVideointelligenceV1p3beta1LabelAnnotation
3446
+ include Google::Apis::Core::Hashable
3447
+
3448
+ # Common categories for the detected entity.
3449
+ # E.g. when the label is `Terrier` the category is likely `dog`. And in some
3450
+ # cases there might be more than one categories e.g. `Terrier` could also be
3451
+ # a `pet`.
3452
+ # Corresponds to the JSON property `categoryEntities`
3453
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity>]
3454
+ attr_accessor :category_entities
3455
+
3456
+ # Detected entity from video analysis.
3457
+ # Corresponds to the JSON property `entity`
3458
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity]
3459
+ attr_accessor :entity
3460
+
3461
+ # All video frames where a label was detected.
3462
+ # Corresponds to the JSON property `frames`
3463
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelFrame>]
3464
+ attr_accessor :frames
3465
+
3466
+ # All video segments where a label was detected.
3467
+ # Corresponds to the JSON property `segments`
3468
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelSegment>]
3469
+ attr_accessor :segments
3470
+
3471
+ def initialize(**args)
3472
+ update!(**args)
3473
+ end
3474
+
3475
+ # Update properties of this object
3476
+ def update!(**args)
3477
+ @category_entities = args[:category_entities] if args.key?(:category_entities)
3478
+ @entity = args[:entity] if args.key?(:entity)
3479
+ @frames = args[:frames] if args.key?(:frames)
3480
+ @segments = args[:segments] if args.key?(:segments)
3481
+ end
3482
+ end
3483
+
3484
+ # Config for LABEL_DETECTION.
3485
+ class GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig
3486
+ include Google::Apis::Core::Hashable
3487
+
3488
+ # The confidence threshold we perform filtering on the labels from
3489
+ # frame-level detection. If not set, it is set to 0.4 by default. The valid
3490
+ # range for this threshold is [0.1, 0.9]. Any value set outside of this
3491
+ # range will be clipped.
3492
+ # Note: for best results please follow the default threshold. We will update
3493
+ # the default threshold everytime when we release a new model.
3494
+ # Corresponds to the JSON property `frameConfidenceThreshold`
3495
+ # @return [Float]
3496
+ attr_accessor :frame_confidence_threshold
3497
+
3498
+ # What labels should be detected with LABEL_DETECTION, in addition to
3499
+ # video-level labels or segment-level labels.
3500
+ # If unspecified, defaults to `SHOT_MODE`.
3501
+ # Corresponds to the JSON property `labelDetectionMode`
3502
+ # @return [String]
3503
+ attr_accessor :label_detection_mode
3504
+
3505
+ # Model to use for label detection.
3506
+ # Supported values: "builtin/stable" (the default if unset) and
3507
+ # "builtin/latest".
3508
+ # Corresponds to the JSON property `model`
3509
+ # @return [String]
3510
+ attr_accessor :model
3511
+
3512
+ # Whether the video has been shot from a stationary (i.e. non-moving) camera.
3513
+ # When set to true, might improve detection accuracy for moving objects.
3514
+ # Should be used with `SHOT_AND_FRAME_MODE` enabled.
3515
+ # Corresponds to the JSON property `stationaryCamera`
3516
+ # @return [Boolean]
3517
+ attr_accessor :stationary_camera
3518
+ alias_method :stationary_camera?, :stationary_camera
3519
+
3520
+ # The confidence threshold we perform filtering on the labels from
3521
+ # video-level and shot-level detections. If not set, it is set to 0.3 by
3522
+ # default. The valid range for this threshold is [0.1, 0.9]. Any value set
3523
+ # outside of this range will be clipped.
3524
+ # Note: for best results please follow the default threshold. We will update
3525
+ # the default threshold everytime when we release a new model.
3526
+ # Corresponds to the JSON property `videoConfidenceThreshold`
3527
+ # @return [Float]
3528
+ attr_accessor :video_confidence_threshold
3529
+
3530
+ def initialize(**args)
3531
+ update!(**args)
3532
+ end
3533
+
3534
+ # Update properties of this object
3535
+ def update!(**args)
3536
+ @frame_confidence_threshold = args[:frame_confidence_threshold] if args.key?(:frame_confidence_threshold)
3537
+ @label_detection_mode = args[:label_detection_mode] if args.key?(:label_detection_mode)
3538
+ @model = args[:model] if args.key?(:model)
3539
+ @stationary_camera = args[:stationary_camera] if args.key?(:stationary_camera)
3540
+ @video_confidence_threshold = args[:video_confidence_threshold] if args.key?(:video_confidence_threshold)
3541
+ end
3542
+ end
3543
+
3544
+ # Video frame level annotation results for label detection.
3545
+ class GoogleCloudVideointelligenceV1p3beta1LabelFrame
3546
+ include Google::Apis::Core::Hashable
3547
+
3548
+ # Confidence that the label is accurate. Range: [0, 1].
3549
+ # Corresponds to the JSON property `confidence`
3550
+ # @return [Float]
3551
+ attr_accessor :confidence
3552
+
3553
+ # Time-offset, relative to the beginning of the video, corresponding to the
3554
+ # video frame for this location.
3555
+ # Corresponds to the JSON property `timeOffset`
3556
+ # @return [String]
3557
+ attr_accessor :time_offset
3558
+
3559
+ def initialize(**args)
3560
+ update!(**args)
3561
+ end
3562
+
3563
+ # Update properties of this object
3564
+ def update!(**args)
3565
+ @confidence = args[:confidence] if args.key?(:confidence)
3566
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
3567
+ end
3568
+ end
3569
+
3570
+ # Video segment level annotation results for label detection.
3571
+ class GoogleCloudVideointelligenceV1p3beta1LabelSegment
3572
+ include Google::Apis::Core::Hashable
3573
+
3574
+ # Confidence that the label is accurate. Range: [0, 1].
3575
+ # Corresponds to the JSON property `confidence`
3576
+ # @return [Float]
3577
+ attr_accessor :confidence
3578
+
3579
+ # Video segment.
3580
+ # Corresponds to the JSON property `segment`
3581
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment]
3582
+ attr_accessor :segment
3583
+
3584
+ def initialize(**args)
3585
+ update!(**args)
3586
+ end
3587
+
3588
+ # Update properties of this object
3589
+ def update!(**args)
3590
+ @confidence = args[:confidence] if args.key?(:confidence)
3591
+ @segment = args[:segment] if args.key?(:segment)
3592
+ end
3593
+ end
3594
+
3595
+ # Normalized bounding box.
3596
+ # The normalized vertex coordinates are relative to the original image.
3597
+ # Range: [0, 1].
3598
+ class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox
3599
+ include Google::Apis::Core::Hashable
3600
+
3601
+ # Bottom Y coordinate.
3602
+ # Corresponds to the JSON property `bottom`
3603
+ # @return [Float]
3604
+ attr_accessor :bottom
3605
+
3606
+ # Left X coordinate.
3607
+ # Corresponds to the JSON property `left`
3608
+ # @return [Float]
3609
+ attr_accessor :left
3610
+
3611
+ # Right X coordinate.
3612
+ # Corresponds to the JSON property `right`
3613
+ # @return [Float]
3614
+ attr_accessor :right
3615
+
3616
+ # Top Y coordinate.
3617
+ # Corresponds to the JSON property `top`
3618
+ # @return [Float]
3619
+ attr_accessor :top
3620
+
3621
+ def initialize(**args)
3622
+ update!(**args)
3623
+ end
3624
+
3625
+ # Update properties of this object
3626
+ def update!(**args)
3627
+ @bottom = args[:bottom] if args.key?(:bottom)
3628
+ @left = args[:left] if args.key?(:left)
3629
+ @right = args[:right] if args.key?(:right)
3630
+ @top = args[:top] if args.key?(:top)
3631
+ end
3632
+ end
3633
+
3634
+ # Normalized bounding polygon for text (that might not be aligned with axis).
3635
+ # Contains list of the corner points in clockwise order starting from
3636
+ # top-left corner. For example, for a rectangular bounding box:
3637
+ # When the text is horizontal it might look like:
3638
+ # 0----1
3639
+ # | |
3640
+ # 3----2
3641
+ # When it's clockwise rotated 180 degrees around the top-left corner it
3642
+ # becomes:
3643
+ # 2----3
3644
+ # | |
3645
+ # 1----0
3646
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
3647
+ # than 0, or greater than 1 due to trignometric calculations for location of
3648
+ # the box.
3649
+ class GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly
3650
+ include Google::Apis::Core::Hashable
3651
+
3652
+ # Normalized vertices of the bounding polygon.
3653
+ # Corresponds to the JSON property `vertices`
3654
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedVertex>]
3655
+ attr_accessor :vertices
3656
+
3657
+ def initialize(**args)
3658
+ update!(**args)
3659
+ end
3660
+
3661
+ # Update properties of this object
3662
+ def update!(**args)
3663
+ @vertices = args[:vertices] if args.key?(:vertices)
3664
+ end
3665
+ end
3666
+
3667
+ # A vertex represents a 2D point in the image.
3668
+ # NOTE: the normalized vertex coordinates are relative to the original image
3669
+ # and range from 0 to 1.
3670
+ class GoogleCloudVideointelligenceV1p3beta1NormalizedVertex
3671
+ include Google::Apis::Core::Hashable
3672
+
3673
+ # X coordinate.
3674
+ # Corresponds to the JSON property `x`
3675
+ # @return [Float]
3676
+ attr_accessor :x
3677
+
3678
+ # Y coordinate.
3679
+ # Corresponds to the JSON property `y`
3680
+ # @return [Float]
3681
+ attr_accessor :y
3682
+
3683
+ def initialize(**args)
3684
+ update!(**args)
3685
+ end
3686
+
3687
+ # Update properties of this object
3688
+ def update!(**args)
3689
+ @x = args[:x] if args.key?(:x)
3690
+ @y = args[:y] if args.key?(:y)
3691
+ end
3692
+ end
3693
+
3694
+ # Annotations corresponding to one tracked object.
3695
+ class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation
3696
+ include Google::Apis::Core::Hashable
3697
+
3698
+ # Object category's labeling confidence of this track.
3699
+ # Corresponds to the JSON property `confidence`
3700
+ # @return [Float]
3701
+ attr_accessor :confidence
3702
+
3703
+ # Detected entity from video analysis.
3704
+ # Corresponds to the JSON property `entity`
3705
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1Entity]
3706
+ attr_accessor :entity
3707
+
3708
+ # Information corresponding to all frames where this object track appears.
3709
+ # Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame
3710
+ # messages in frames.
3711
+ # Streaming mode: it can only be one ObjectTrackingFrame message in frames.
3712
+ # Corresponds to the JSON property `frames`
3713
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame>]
3714
+ attr_accessor :frames
3715
+
3716
+ # Video segment.
3717
+ # Corresponds to the JSON property `segment`
3718
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment]
3719
+ attr_accessor :segment
3720
+
3721
+ # Streaming mode ONLY.
3722
+ # In streaming mode, we do not know the end time of a tracked object
3723
+ # before it is completed. Hence, there is no VideoSegment info returned.
3724
+ # Instead, we provide a unique identifiable integer track_id so that
3725
+ # the customers can correlate the results of the ongoing
3726
+ # ObjectTrackAnnotation of the same track_id over time.
3727
+ # Corresponds to the JSON property `trackId`
3728
+ # @return [Fixnum]
3729
+ attr_accessor :track_id
3730
+
3731
+ def initialize(**args)
3732
+ update!(**args)
3733
+ end
3734
+
3735
+ # Update properties of this object
3736
+ def update!(**args)
3737
+ @confidence = args[:confidence] if args.key?(:confidence)
3738
+ @entity = args[:entity] if args.key?(:entity)
3739
+ @frames = args[:frames] if args.key?(:frames)
3740
+ @segment = args[:segment] if args.key?(:segment)
3741
+ @track_id = args[:track_id] if args.key?(:track_id)
3742
+ end
3743
+ end
3744
+
3745
+ # Video frame level annotations for object detection and tracking. This field
3746
+ # stores per frame location, time offset, and confidence.
3747
+ class GoogleCloudVideointelligenceV1p3beta1ObjectTrackingFrame
3748
+ include Google::Apis::Core::Hashable
3749
+
3750
+ # Normalized bounding box.
3751
+ # The normalized vertex coordinates are relative to the original image.
3752
+ # Range: [0, 1].
3753
+ # Corresponds to the JSON property `normalizedBoundingBox`
3754
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingBox]
3755
+ attr_accessor :normalized_bounding_box
3756
+
3757
+ # The timestamp of the frame in microseconds.
3758
+ # Corresponds to the JSON property `timeOffset`
3759
+ # @return [String]
3760
+ attr_accessor :time_offset
3761
+
3762
+ def initialize(**args)
3763
+ update!(**args)
3764
+ end
3765
+
3766
+ # Update properties of this object
3767
+ def update!(**args)
3768
+ @normalized_bounding_box = args[:normalized_bounding_box] if args.key?(:normalized_bounding_box)
3769
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
3770
+ end
3771
+ end
3772
+
3773
+ # Config for SHOT_CHANGE_DETECTION.
3774
+ class GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig
3775
+ include Google::Apis::Core::Hashable
3776
+
3777
+ # Model to use for shot change detection.
3778
+ # Supported values: "builtin/stable" (the default if unset) and
3779
+ # "builtin/latest".
3780
+ # Corresponds to the JSON property `model`
3781
+ # @return [String]
3782
+ attr_accessor :model
3783
+
3784
+ def initialize(**args)
3785
+ update!(**args)
3786
+ end
3787
+
3788
+ # Update properties of this object
3789
+ def update!(**args)
3790
+ @model = args[:model] if args.key?(:model)
3791
+ end
3792
+ end
3793
+
3794
+ # Provides "hints" to the speech recognizer to favor specific words and phrases
3795
+ # in the results.
3796
+ class GoogleCloudVideointelligenceV1p3beta1SpeechContext
3797
+ include Google::Apis::Core::Hashable
3798
+
3799
+ # *Optional* A list of strings containing words and phrases "hints" so that
3800
+ # the speech recognition is more likely to recognize them. This can be used
3801
+ # to improve the accuracy for specific words and phrases, for example, if
3802
+ # specific commands are typically spoken by the user. This can also be used
3803
+ # to add additional words to the vocabulary of the recognizer. See
3804
+ # [usage limits](https://cloud.google.com/speech/limits#content).
3805
+ # Corresponds to the JSON property `phrases`
3806
+ # @return [Array<String>]
3807
+ attr_accessor :phrases
3808
+
3809
+ def initialize(**args)
3810
+ update!(**args)
3811
+ end
3812
+
3813
+ # Update properties of this object
3814
+ def update!(**args)
3815
+ @phrases = args[:phrases] if args.key?(:phrases)
3816
+ end
3817
+ end
3818
+
3819
+ # Alternative hypotheses (a.k.a. n-best list).
3820
+ class GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative
3821
+ include Google::Apis::Core::Hashable
3822
+
3823
+ # The confidence estimate between 0.0 and 1.0. A higher number
3824
+ # indicates an estimated greater likelihood that the recognized words are
3825
+ # correct. This field is typically provided only for the top hypothesis, and
3826
+ # only for `is_final=true` results. Clients should not rely on the
3827
+ # `confidence` field as it is not guaranteed to be accurate or consistent.
3828
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
3829
+ # Corresponds to the JSON property `confidence`
3830
+ # @return [Float]
3831
+ attr_accessor :confidence
3832
+
3833
+ # Transcript text representing the words that the user spoke.
3834
+ # Corresponds to the JSON property `transcript`
3835
+ # @return [String]
3836
+ attr_accessor :transcript
3837
+
3838
+ # A list of word-specific information for each recognized word.
3839
+ # Corresponds to the JSON property `words`
3840
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1WordInfo>]
3841
+ attr_accessor :words
3842
+
3843
+ def initialize(**args)
3844
+ update!(**args)
3845
+ end
3846
+
3847
+ # Update properties of this object
3848
+ def update!(**args)
3849
+ @confidence = args[:confidence] if args.key?(:confidence)
3850
+ @transcript = args[:transcript] if args.key?(:transcript)
3851
+ @words = args[:words] if args.key?(:words)
3852
+ end
3853
+ end
3854
+
3855
+ # A speech recognition result corresponding to a portion of the audio.
3856
+ class GoogleCloudVideointelligenceV1p3beta1SpeechTranscription
3857
+ include Google::Apis::Core::Hashable
3858
+
3859
+ # May contain one or more recognition hypotheses (up to the maximum specified
3860
+ # in `max_alternatives`). These alternatives are ordered in terms of
3861
+ # accuracy, with the top (first) alternative being the most probable, as
3862
+ # ranked by the recognizer.
3863
+ # Corresponds to the JSON property `alternatives`
3864
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechRecognitionAlternative>]
3865
+ attr_accessor :alternatives
3866
+
3867
+ # Output only. The
3868
+ # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the
3869
+ # language in this result. This language code was detected to have the most
3870
+ # likelihood of being spoken in the audio.
3871
+ # Corresponds to the JSON property `languageCode`
3872
+ # @return [String]
3873
+ attr_accessor :language_code
3874
+
3875
+ def initialize(**args)
3876
+ update!(**args)
3877
+ end
3878
+
3879
+ # Update properties of this object
3880
+ def update!(**args)
3881
+ @alternatives = args[:alternatives] if args.key?(:alternatives)
3882
+ @language_code = args[:language_code] if args.key?(:language_code)
3883
+ end
3884
+ end
3885
+
3886
+ # Config for SPEECH_TRANSCRIPTION.
3887
+ class GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig
3888
+ include Google::Apis::Core::Hashable
3889
+
3890
+ # *Optional* For file formats, such as MXF or MKV, supporting multiple audio
3891
+ # tracks, specify up to two tracks. Default: track 0.
3892
+ # Corresponds to the JSON property `audioTracks`
3893
+ # @return [Array<Fixnum>]
3894
+ attr_accessor :audio_tracks
3895
+
3896
+ # *Optional*
3897
+ # If set, specifies the estimated number of speakers in the conversation.
3898
+ # If not set, defaults to '2'.
3899
+ # Ignored unless enable_speaker_diarization is set to true.
3900
+ # Corresponds to the JSON property `diarizationSpeakerCount`
3901
+ # @return [Fixnum]
3902
+ attr_accessor :diarization_speaker_count
3903
+
3904
+ # *Optional* If 'true', adds punctuation to recognition result hypotheses.
3905
+ # This feature is only available in select languages. Setting this for
3906
+ # requests in other languages has no effect at all. The default 'false' value
3907
+ # does not add punctuation to result hypotheses. NOTE: "This is currently
3908
+ # offered as an experimental service, complimentary to all users. In the
3909
+ # future this may be exclusively available as a premium feature."
3910
+ # Corresponds to the JSON property `enableAutomaticPunctuation`
3911
+ # @return [Boolean]
3912
+ attr_accessor :enable_automatic_punctuation
3913
+ alias_method :enable_automatic_punctuation?, :enable_automatic_punctuation
3914
+
3915
+ # *Optional* If 'true', enables speaker detection for each recognized word in
3916
+ # the top alternative of the recognition result using a speaker_tag provided
3917
+ # in the WordInfo.
3918
+ # Note: When this is true, we send all the words from the beginning of the
3919
+ # audio for the top alternative in every consecutive responses.
3920
+ # This is done in order to improve our speaker tags as our models learn to
3921
+ # identify the speakers in the conversation over time.
3922
+ # Corresponds to the JSON property `enableSpeakerDiarization`
3923
+ # @return [Boolean]
3924
+ attr_accessor :enable_speaker_diarization
3925
+ alias_method :enable_speaker_diarization?, :enable_speaker_diarization
3926
+
3927
+ # *Optional* If `true`, the top result includes a list of words and the
3928
+ # confidence for those words. If `false`, no word-level confidence
3929
+ # information is returned. The default is `false`.
3930
+ # Corresponds to the JSON property `enableWordConfidence`
3931
+ # @return [Boolean]
3932
+ attr_accessor :enable_word_confidence
3933
+ alias_method :enable_word_confidence?, :enable_word_confidence
3934
+
3935
+ # *Optional* If set to `true`, the server will attempt to filter out
3936
+ # profanities, replacing all but the initial character in each filtered word
3937
+ # with asterisks, e.g. "f***". If set to `false` or omitted, profanities
3938
+ # won't be filtered out.
3939
+ # Corresponds to the JSON property `filterProfanity`
3940
+ # @return [Boolean]
3941
+ attr_accessor :filter_profanity
3942
+ alias_method :filter_profanity?, :filter_profanity
3943
+
3944
+ # *Required* The language of the supplied audio as a
3945
+ # [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
3946
+ # Example: "en-US".
3947
+ # See [Language Support](https://cloud.google.com/speech/docs/languages)
3948
+ # for a list of the currently supported language codes.
3949
+ # Corresponds to the JSON property `languageCode`
3950
+ # @return [String]
3951
+ attr_accessor :language_code
3952
+
3953
+ # *Optional* Maximum number of recognition hypotheses to be returned.
3954
+ # Specifically, the maximum number of `SpeechRecognitionAlternative` messages
3955
+ # within each `SpeechTranscription`. The server may return fewer than
3956
+ # `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will
3957
+ # return a maximum of one. If omitted, will return a maximum of one.
3958
+ # Corresponds to the JSON property `maxAlternatives`
3959
+ # @return [Fixnum]
3960
+ attr_accessor :max_alternatives
3961
+
3962
+ # *Optional* A means to provide context to assist the speech recognition.
3963
+ # Corresponds to the JSON property `speechContexts`
3964
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechContext>]
3965
+ attr_accessor :speech_contexts
3966
+
3967
+ def initialize(**args)
3968
+ update!(**args)
3969
+ end
3970
+
3971
+ # Update properties of this object
3972
+ def update!(**args)
3973
+ @audio_tracks = args[:audio_tracks] if args.key?(:audio_tracks)
3974
+ @diarization_speaker_count = args[:diarization_speaker_count] if args.key?(:diarization_speaker_count)
3975
+ @enable_automatic_punctuation = args[:enable_automatic_punctuation] if args.key?(:enable_automatic_punctuation)
3976
+ @enable_speaker_diarization = args[:enable_speaker_diarization] if args.key?(:enable_speaker_diarization)
3977
+ @enable_word_confidence = args[:enable_word_confidence] if args.key?(:enable_word_confidence)
3978
+ @filter_profanity = args[:filter_profanity] if args.key?(:filter_profanity)
3979
+ @language_code = args[:language_code] if args.key?(:language_code)
3980
+ @max_alternatives = args[:max_alternatives] if args.key?(:max_alternatives)
3981
+ @speech_contexts = args[:speech_contexts] if args.key?(:speech_contexts)
3982
+ end
3983
+ end
3984
+
3985
+ # `StreamingAnnotateVideoResponse` is the only message returned to the client
3986
+ # by `StreamingAnnotateVideo`. A series of zero or more
3987
+ # `StreamingAnnotateVideoResponse` messages are streamed back to the client.
3988
+ class GoogleCloudVideointelligenceV1p3beta1StreamingAnnotateVideoResponse
3989
+ include Google::Apis::Core::Hashable
3990
+
3991
+ # Streaming annotation results corresponding to a portion of the video
3992
+ # that is currently being processed.
3993
+ # Corresponds to the JSON property `annotationResults`
3994
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults]
3995
+ attr_accessor :annotation_results
3996
+
3997
+ # GCS URI that stores annotation results of one streaming session.
3998
+ # It is a directory that can hold multiple files in JSON format.
3999
+ # Example uri format:
4000
+ # gs://bucket_id/object_id/cloud_project_name-session_id
4001
+ # Corresponds to the JSON property `annotationResultsUri`
4002
+ # @return [String]
4003
+ attr_accessor :annotation_results_uri
4004
+
4005
+ # The `Status` type defines a logical error model that is suitable for
4006
+ # different programming environments, including REST APIs and RPC APIs. It is
4007
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
4008
+ # - Simple to use and understand for most users
4009
+ # - Flexible enough to meet unexpected needs
4010
+ # # Overview
4011
+ # The `Status` message contains three pieces of data: error code, error
4012
+ # message, and error details. The error code should be an enum value of
4013
+ # google.rpc.Code, but it may accept additional error codes if needed. The
4014
+ # error message should be a developer-facing English message that helps
4015
+ # developers *understand* and *resolve* the error. If a localized user-facing
4016
+ # error message is needed, put the localized message in the error details or
4017
+ # localize it in the client. The optional error details may contain arbitrary
4018
+ # information about the error. There is a predefined set of error detail types
4019
+ # in the package `google.rpc` that can be used for common error conditions.
4020
+ # # Language mapping
4021
+ # The `Status` message is the logical representation of the error model, but it
4022
+ # is not necessarily the actual wire format. When the `Status` message is
4023
+ # exposed in different client libraries and different wire protocols, it can be
4024
+ # mapped differently. For example, it will likely be mapped to some exceptions
4025
+ # in Java, but more likely mapped to some error codes in C.
4026
+ # # Other uses
4027
+ # The error model and the `Status` message can be used in a variety of
4028
+ # environments, either with or without APIs, to provide a
4029
+ # consistent developer experience across different environments.
4030
+ # Example uses of this error model include:
4031
+ # - Partial errors. If a service needs to return partial errors to the client,
4032
+ # it may embed the `Status` in the normal response to indicate the partial
4033
+ # errors.
4034
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
4035
+ # have a `Status` message for error reporting.
4036
+ # - Batch operations. If a client uses batch request and batch response, the
4037
+ # `Status` message should be used directly inside batch response, one for
4038
+ # each error sub-response.
4039
+ # - Asynchronous operations. If an API call embeds asynchronous operation
4040
+ # results in its response, the status of those operations should be
4041
+ # represented directly using the `Status` message.
4042
+ # - Logging. If some API errors are stored in logs, the message `Status` could
4043
+ # be used directly after any stripping needed for security/privacy reasons.
4044
+ # Corresponds to the JSON property `error`
4045
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
4046
+ attr_accessor :error
4047
+
4048
+ def initialize(**args)
4049
+ update!(**args)
4050
+ end
4051
+
4052
+ # Update properties of this object
4053
+ def update!(**args)
4054
+ @annotation_results = args[:annotation_results] if args.key?(:annotation_results)
4055
+ @annotation_results_uri = args[:annotation_results_uri] if args.key?(:annotation_results_uri)
4056
+ @error = args[:error] if args.key?(:error)
4057
+ end
4058
+ end
4059
+
4060
+ # Streaming annotation results corresponding to a portion of the video
4061
+ # that is currently being processed.
4062
+ class GoogleCloudVideointelligenceV1p3beta1StreamingVideoAnnotationResults
4063
+ include Google::Apis::Core::Hashable
4064
+
4065
+ # Explicit content annotation (based on per-frame visual signals only).
4066
+ # If no explicit content has been detected in a frame, no annotations are
4067
+ # present for that frame.
4068
+ # Corresponds to the JSON property `explicitAnnotation`
4069
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation]
4070
+ attr_accessor :explicit_annotation
4071
+
4072
+ # Label annotation results.
4073
+ # Corresponds to the JSON property `labelAnnotations`
4074
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation>]
4075
+ attr_accessor :label_annotations
4076
+
4077
+ # Object tracking results.
4078
+ # Corresponds to the JSON property `objectAnnotations`
4079
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation>]
4080
+ attr_accessor :object_annotations
4081
+
4082
+ # Shot annotation results. Each shot is represented as a video segment.
4083
+ # Corresponds to the JSON property `shotAnnotations`
4084
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment>]
4085
+ attr_accessor :shot_annotations
4086
+
4087
+ def initialize(**args)
4088
+ update!(**args)
4089
+ end
4090
+
4091
+ # Update properties of this object
4092
+ def update!(**args)
4093
+ @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation)
4094
+ @label_annotations = args[:label_annotations] if args.key?(:label_annotations)
4095
+ @object_annotations = args[:object_annotations] if args.key?(:object_annotations)
4096
+ @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations)
4097
+ end
4098
+ end
4099
+
4100
+ # Annotations related to one detected OCR text snippet. This will contain the
4101
+ # corresponding text, confidence value, and frame level information for each
4102
+ # detection.
4103
+ class GoogleCloudVideointelligenceV1p3beta1TextAnnotation
4104
+ include Google::Apis::Core::Hashable
4105
+
4106
+ # All video segments where OCR detected text appears.
4107
+ # Corresponds to the JSON property `segments`
4108
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextSegment>]
4109
+ attr_accessor :segments
4110
+
4111
+ # The detected text.
4112
+ # Corresponds to the JSON property `text`
4113
+ # @return [String]
4114
+ attr_accessor :text
4115
+
4116
+ def initialize(**args)
4117
+ update!(**args)
4118
+ end
4119
+
4120
+ # Update properties of this object
4121
+ def update!(**args)
4122
+ @segments = args[:segments] if args.key?(:segments)
4123
+ @text = args[:text] if args.key?(:text)
4124
+ end
4125
+ end
4126
+
4127
+ # Config for TEXT_DETECTION.
4128
+ class GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig
4129
+ include Google::Apis::Core::Hashable
4130
+
4131
+ # Language hint can be specified if the language to be detected is known a
4132
+ # priori. It can increase the accuracy of the detection. Language hint must
4133
+ # be language code in BCP-47 format.
4134
+ # Automatic language detection is performed if no hint is provided.
4135
+ # Corresponds to the JSON property `languageHints`
4136
+ # @return [Array<String>]
4137
+ attr_accessor :language_hints
4138
+
4139
+ def initialize(**args)
4140
+ update!(**args)
4141
+ end
4142
+
4143
+ # Update properties of this object
4144
+ def update!(**args)
4145
+ @language_hints = args[:language_hints] if args.key?(:language_hints)
4146
+ end
4147
+ end
4148
+
4149
+ # Video frame level annotation results for text annotation (OCR).
4150
+ # Contains information regarding timestamp and bounding box locations for the
4151
+ # frames containing detected OCR text snippets.
4152
+ class GoogleCloudVideointelligenceV1p3beta1TextFrame
4153
+ include Google::Apis::Core::Hashable
4154
+
4155
+ # Normalized bounding polygon for text (that might not be aligned with axis).
4156
+ # Contains list of the corner points in clockwise order starting from
4157
+ # top-left corner. For example, for a rectangular bounding box:
4158
+ # When the text is horizontal it might look like:
4159
+ # 0----1
4160
+ # | |
4161
+ # 3----2
4162
+ # When it's clockwise rotated 180 degrees around the top-left corner it
4163
+ # becomes:
4164
+ # 2----3
4165
+ # | |
4166
+ # 1----0
4167
+ # and the vertex order will still be (0, 1, 2, 3). Note that values can be less
4168
+ # than 0, or greater than 1 due to trignometric calculations for location of
4169
+ # the box.
4170
+ # Corresponds to the JSON property `rotatedBoundingBox`
4171
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1NormalizedBoundingPoly]
4172
+ attr_accessor :rotated_bounding_box
4173
+
4174
+ # Timestamp of this frame.
4175
+ # Corresponds to the JSON property `timeOffset`
4176
+ # @return [String]
4177
+ attr_accessor :time_offset
4178
+
4179
+ def initialize(**args)
4180
+ update!(**args)
4181
+ end
4182
+
4183
+ # Update properties of this object
4184
+ def update!(**args)
4185
+ @rotated_bounding_box = args[:rotated_bounding_box] if args.key?(:rotated_bounding_box)
4186
+ @time_offset = args[:time_offset] if args.key?(:time_offset)
4187
+ end
4188
+ end
4189
+
4190
+ # Video segment level annotation results for text detection.
4191
+ class GoogleCloudVideointelligenceV1p3beta1TextSegment
4192
+ include Google::Apis::Core::Hashable
4193
+
4194
+ # Confidence for the track of detected text. It is calculated as the highest
4195
+ # over all frames where OCR detected text appears.
4196
+ # Corresponds to the JSON property `confidence`
4197
+ # @return [Float]
4198
+ attr_accessor :confidence
4199
+
4200
+ # Information related to the frames where OCR detected text appears.
4201
+ # Corresponds to the JSON property `frames`
4202
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextFrame>]
4203
+ attr_accessor :frames
4204
+
4205
+ # Video segment.
4206
+ # Corresponds to the JSON property `segment`
4207
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment]
4208
+ attr_accessor :segment
4209
+
4210
+ def initialize(**args)
4211
+ update!(**args)
4212
+ end
4213
+
4214
+ # Update properties of this object
4215
+ def update!(**args)
4216
+ @confidence = args[:confidence] if args.key?(:confidence)
4217
+ @frames = args[:frames] if args.key?(:frames)
4218
+ @segment = args[:segment] if args.key?(:segment)
4219
+ end
4220
+ end
4221
+
4222
+ # Annotation progress for a single video.
4223
+ class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationProgress
4224
+ include Google::Apis::Core::Hashable
4225
+
4226
+ # Video file location in
4227
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
4228
+ # Corresponds to the JSON property `inputUri`
4229
+ # @return [String]
4230
+ attr_accessor :input_uri
4231
+
4232
+ # Approximate percentage processed thus far. Guaranteed to be
4233
+ # 100 when fully processed.
4234
+ # Corresponds to the JSON property `progressPercent`
4235
+ # @return [Fixnum]
4236
+ attr_accessor :progress_percent
4237
+
4238
+ # Time when the request was received.
4239
+ # Corresponds to the JSON property `startTime`
4240
+ # @return [String]
4241
+ attr_accessor :start_time
4242
+
4243
+ # Time of the most recent update.
4244
+ # Corresponds to the JSON property `updateTime`
4245
+ # @return [String]
4246
+ attr_accessor :update_time
4247
+
4248
+ def initialize(**args)
4249
+ update!(**args)
4250
+ end
4251
+
4252
+ # Update properties of this object
4253
+ def update!(**args)
4254
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
4255
+ @progress_percent = args[:progress_percent] if args.key?(:progress_percent)
4256
+ @start_time = args[:start_time] if args.key?(:start_time)
4257
+ @update_time = args[:update_time] if args.key?(:update_time)
4258
+ end
4259
+ end
4260
+
4261
+ # Annotation results for a single video.
4262
+ class GoogleCloudVideointelligenceV1p3beta1VideoAnnotationResults
4263
+ include Google::Apis::Core::Hashable
4264
+
4265
+ # The `Status` type defines a logical error model that is suitable for
4266
+ # different programming environments, including REST APIs and RPC APIs. It is
4267
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
4268
+ # - Simple to use and understand for most users
4269
+ # - Flexible enough to meet unexpected needs
4270
+ # # Overview
4271
+ # The `Status` message contains three pieces of data: error code, error
4272
+ # message, and error details. The error code should be an enum value of
4273
+ # google.rpc.Code, but it may accept additional error codes if needed. The
4274
+ # error message should be a developer-facing English message that helps
4275
+ # developers *understand* and *resolve* the error. If a localized user-facing
4276
+ # error message is needed, put the localized message in the error details or
4277
+ # localize it in the client. The optional error details may contain arbitrary
4278
+ # information about the error. There is a predefined set of error detail types
4279
+ # in the package `google.rpc` that can be used for common error conditions.
4280
+ # # Language mapping
4281
+ # The `Status` message is the logical representation of the error model, but it
4282
+ # is not necessarily the actual wire format. When the `Status` message is
4283
+ # exposed in different client libraries and different wire protocols, it can be
4284
+ # mapped differently. For example, it will likely be mapped to some exceptions
4285
+ # in Java, but more likely mapped to some error codes in C.
4286
+ # # Other uses
4287
+ # The error model and the `Status` message can be used in a variety of
4288
+ # environments, either with or without APIs, to provide a
4289
+ # consistent developer experience across different environments.
4290
+ # Example uses of this error model include:
4291
+ # - Partial errors. If a service needs to return partial errors to the client,
4292
+ # it may embed the `Status` in the normal response to indicate the partial
4293
+ # errors.
4294
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
4295
+ # have a `Status` message for error reporting.
4296
+ # - Batch operations. If a client uses batch request and batch response, the
4297
+ # `Status` message should be used directly inside batch response, one for
4298
+ # each error sub-response.
4299
+ # - Asynchronous operations. If an API call embeds asynchronous operation
4300
+ # results in its response, the status of those operations should be
4301
+ # represented directly using the `Status` message.
4302
+ # - Logging. If some API errors are stored in logs, the message `Status` could
4303
+ # be used directly after any stripping needed for security/privacy reasons.
4304
+ # Corresponds to the JSON property `error`
4305
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
4306
+ attr_accessor :error
4307
+
4308
+ # Explicit content annotation (based on per-frame visual signals only).
4309
+ # If no explicit content has been detected in a frame, no annotations are
4310
+ # present for that frame.
4311
+ # Corresponds to the JSON property `explicitAnnotation`
4312
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentAnnotation]
4313
+ attr_accessor :explicit_annotation
4314
+
4315
+ # Label annotations on frame level.
4316
+ # There is exactly one element for each unique label.
4317
+ # Corresponds to the JSON property `frameLabelAnnotations`
4318
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation>]
4319
+ attr_accessor :frame_label_annotations
4320
+
4321
+ # Video file location in
4322
+ # [Google Cloud Storage](https://cloud.google.com/storage/).
4323
+ # Corresponds to the JSON property `inputUri`
4324
+ # @return [String]
4325
+ attr_accessor :input_uri
4326
+
4327
+ # Annotations for list of objects detected and tracked in video.
4328
+ # Corresponds to the JSON property `objectAnnotations`
4329
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ObjectTrackingAnnotation>]
4330
+ attr_accessor :object_annotations
4331
+
4332
+ # Label annotations on video level or user specified segment level.
4333
+ # There is exactly one element for each unique label.
4334
+ # Corresponds to the JSON property `segmentLabelAnnotations`
4335
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation>]
4336
+ attr_accessor :segment_label_annotations
4337
+
4338
+ # Shot annotations. Each shot is represented as a video segment.
4339
+ # Corresponds to the JSON property `shotAnnotations`
4340
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment>]
4341
+ attr_accessor :shot_annotations
4342
+
4343
+ # Label annotations on shot level.
4344
+ # There is exactly one element for each unique label.
4345
+ # Corresponds to the JSON property `shotLabelAnnotations`
4346
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelAnnotation>]
4347
+ attr_accessor :shot_label_annotations
4348
+
4349
+ # Speech transcription.
4350
+ # Corresponds to the JSON property `speechTranscriptions`
4351
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscription>]
4352
+ attr_accessor :speech_transcriptions
4353
+
4354
+ # OCR text detection and tracking.
4355
+ # Annotations for list of detected text snippets. Each will have list of
4356
+ # frame information associated with it.
4357
+ # Corresponds to the JSON property `textAnnotations`
4358
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextAnnotation>]
4359
+ attr_accessor :text_annotations
4360
+
4361
+ def initialize(**args)
4362
+ update!(**args)
4363
+ end
4364
+
4365
+ # Update properties of this object
4366
+ def update!(**args)
4367
+ @error = args[:error] if args.key?(:error)
4368
+ @explicit_annotation = args[:explicit_annotation] if args.key?(:explicit_annotation)
4369
+ @frame_label_annotations = args[:frame_label_annotations] if args.key?(:frame_label_annotations)
4370
+ @input_uri = args[:input_uri] if args.key?(:input_uri)
4371
+ @object_annotations = args[:object_annotations] if args.key?(:object_annotations)
4372
+ @segment_label_annotations = args[:segment_label_annotations] if args.key?(:segment_label_annotations)
4373
+ @shot_annotations = args[:shot_annotations] if args.key?(:shot_annotations)
4374
+ @shot_label_annotations = args[:shot_label_annotations] if args.key?(:shot_label_annotations)
4375
+ @speech_transcriptions = args[:speech_transcriptions] if args.key?(:speech_transcriptions)
4376
+ @text_annotations = args[:text_annotations] if args.key?(:text_annotations)
4377
+ end
4378
+ end
4379
+
4380
+ # Video context and/or feature-specific parameters.
4381
+ class GoogleCloudVideointelligenceV1p3beta1VideoContext
4382
+ include Google::Apis::Core::Hashable
4383
+
4384
+ # Config for EXPLICIT_CONTENT_DETECTION.
4385
+ # Corresponds to the JSON property `explicitContentDetectionConfig`
4386
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ExplicitContentDetectionConfig]
4387
+ attr_accessor :explicit_content_detection_config
4388
+
4389
+ # Config for LABEL_DETECTION.
4390
+ # Corresponds to the JSON property `labelDetectionConfig`
4391
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1LabelDetectionConfig]
4392
+ attr_accessor :label_detection_config
4393
+
4394
+ # Video segments to annotate. The segments may overlap and are not required
4395
+ # to be contiguous or span the whole video. If unspecified, each video is
4396
+ # treated as a single segment.
4397
+ # Corresponds to the JSON property `segments`
4398
+ # @return [Array<Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1VideoSegment>]
4399
+ attr_accessor :segments
4400
+
4401
+ # Config for SHOT_CHANGE_DETECTION.
4402
+ # Corresponds to the JSON property `shotChangeDetectionConfig`
4403
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1ShotChangeDetectionConfig]
4404
+ attr_accessor :shot_change_detection_config
4405
+
4406
+ # Config for SPEECH_TRANSCRIPTION.
4407
+ # Corresponds to the JSON property `speechTranscriptionConfig`
4408
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1SpeechTranscriptionConfig]
4409
+ attr_accessor :speech_transcription_config
4410
+
4411
+ # Config for TEXT_DETECTION.
4412
+ # Corresponds to the JSON property `textDetectionConfig`
4413
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleCloudVideointelligenceV1p3beta1TextDetectionConfig]
4414
+ attr_accessor :text_detection_config
4415
+
4416
+ def initialize(**args)
4417
+ update!(**args)
4418
+ end
4419
+
4420
+ # Update properties of this object
4421
+ def update!(**args)
4422
+ @explicit_content_detection_config = args[:explicit_content_detection_config] if args.key?(:explicit_content_detection_config)
4423
+ @label_detection_config = args[:label_detection_config] if args.key?(:label_detection_config)
4424
+ @segments = args[:segments] if args.key?(:segments)
4425
+ @shot_change_detection_config = args[:shot_change_detection_config] if args.key?(:shot_change_detection_config)
4426
+ @speech_transcription_config = args[:speech_transcription_config] if args.key?(:speech_transcription_config)
4427
+ @text_detection_config = args[:text_detection_config] if args.key?(:text_detection_config)
4428
+ end
4429
+ end
4430
+
4431
+ # Video segment.
4432
+ class GoogleCloudVideointelligenceV1p3beta1VideoSegment
4433
+ include Google::Apis::Core::Hashable
4434
+
4435
+ # Time-offset, relative to the beginning of the video,
4436
+ # corresponding to the end of the segment (inclusive).
4437
+ # Corresponds to the JSON property `endTimeOffset`
4438
+ # @return [String]
4439
+ attr_accessor :end_time_offset
4440
+
4441
+ # Time-offset, relative to the beginning of the video,
4442
+ # corresponding to the start of the segment (inclusive).
4443
+ # Corresponds to the JSON property `startTimeOffset`
4444
+ # @return [String]
4445
+ attr_accessor :start_time_offset
4446
+
4447
+ def initialize(**args)
4448
+ update!(**args)
4449
+ end
4450
+
4451
+ # Update properties of this object
4452
+ def update!(**args)
4453
+ @end_time_offset = args[:end_time_offset] if args.key?(:end_time_offset)
4454
+ @start_time_offset = args[:start_time_offset] if args.key?(:start_time_offset)
4455
+ end
4456
+ end
4457
+
4458
+ # Word-specific information for recognized words. Word information is only
4459
+ # included in the response when certain request parameters are set, such
4460
+ # as `enable_word_time_offsets`.
4461
+ class GoogleCloudVideointelligenceV1p3beta1WordInfo
4462
+ include Google::Apis::Core::Hashable
4463
+
4464
+ # Output only. The confidence estimate between 0.0 and 1.0. A higher number
4465
+ # indicates an estimated greater likelihood that the recognized words are
4466
+ # correct. This field is set only for the top alternative.
4467
+ # This field is not guaranteed to be accurate and users should not rely on it
4468
+ # to be always provided.
4469
+ # The default of 0.0 is a sentinel value indicating `confidence` was not set.
4470
+ # Corresponds to the JSON property `confidence`
4471
+ # @return [Float]
4472
+ attr_accessor :confidence
4473
+
4474
+ # Time offset relative to the beginning of the audio, and
4475
+ # corresponding to the end of the spoken word. This field is only set if
4476
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
4477
+ # experimental feature and the accuracy of the time offset can vary.
4478
+ # Corresponds to the JSON property `endTime`
4479
+ # @return [String]
4480
+ attr_accessor :end_time
4481
+
4482
+ # Output only. A distinct integer value is assigned for every speaker within
4483
+ # the audio. This field specifies which one of those speakers was detected to
4484
+ # have spoken this word. Value ranges from 1 up to diarization_speaker_count,
4485
+ # and is only set if speaker diarization is enabled.
4486
+ # Corresponds to the JSON property `speakerTag`
4487
+ # @return [Fixnum]
4488
+ attr_accessor :speaker_tag
4489
+
4490
+ # Time offset relative to the beginning of the audio, and
4491
+ # corresponding to the start of the spoken word. This field is only set if
4492
+ # `enable_word_time_offsets=true` and only in the top hypothesis. This is an
4493
+ # experimental feature and the accuracy of the time offset can vary.
4494
+ # Corresponds to the JSON property `startTime`
4495
+ # @return [String]
4496
+ attr_accessor :start_time
4497
+
4498
+ # The word corresponding to this set of information.
4499
+ # Corresponds to the JSON property `word`
4500
+ # @return [String]
4501
+ attr_accessor :word
4502
+
4503
+ def initialize(**args)
4504
+ update!(**args)
4505
+ end
4506
+
4507
+ # Update properties of this object
4508
+ def update!(**args)
4509
+ @confidence = args[:confidence] if args.key?(:confidence)
4510
+ @end_time = args[:end_time] if args.key?(:end_time)
4511
+ @speaker_tag = args[:speaker_tag] if args.key?(:speaker_tag)
4512
+ @start_time = args[:start_time] if args.key?(:start_time)
4513
+ @word = args[:word] if args.key?(:word)
4514
+ end
4515
+ end
4516
+
4517
+ # This resource represents a long-running operation that is the result of a
4518
+ # network API call.
4519
+ class GoogleLongrunningOperation
4520
+ include Google::Apis::Core::Hashable
4521
+
4522
+ # If the value is `false`, it means the operation is still in progress.
4523
+ # If `true`, the operation is completed, and either `error` or `response` is
4524
+ # available.
4525
+ # Corresponds to the JSON property `done`
4526
+ # @return [Boolean]
4527
+ attr_accessor :done
4528
+ alias_method :done?, :done
4529
+
4530
+ # The `Status` type defines a logical error model that is suitable for
4531
+ # different programming environments, including REST APIs and RPC APIs. It is
4532
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
4533
+ # - Simple to use and understand for most users
4534
+ # - Flexible enough to meet unexpected needs
4535
+ # # Overview
4536
+ # The `Status` message contains three pieces of data: error code, error
4537
+ # message, and error details. The error code should be an enum value of
4538
+ # google.rpc.Code, but it may accept additional error codes if needed. The
4539
+ # error message should be a developer-facing English message that helps
4540
+ # developers *understand* and *resolve* the error. If a localized user-facing
4541
+ # error message is needed, put the localized message in the error details or
4542
+ # localize it in the client. The optional error details may contain arbitrary
4543
+ # information about the error. There is a predefined set of error detail types
4544
+ # in the package `google.rpc` that can be used for common error conditions.
4545
+ # # Language mapping
4546
+ # The `Status` message is the logical representation of the error model, but it
4547
+ # is not necessarily the actual wire format. When the `Status` message is
4548
+ # exposed in different client libraries and different wire protocols, it can be
4549
+ # mapped differently. For example, it will likely be mapped to some exceptions
4550
+ # in Java, but more likely mapped to some error codes in C.
4551
+ # # Other uses
4552
+ # The error model and the `Status` message can be used in a variety of
4553
+ # environments, either with or without APIs, to provide a
4554
+ # consistent developer experience across different environments.
4555
+ # Example uses of this error model include:
4556
+ # - Partial errors. If a service needs to return partial errors to the client,
4557
+ # it may embed the `Status` in the normal response to indicate the partial
4558
+ # errors.
4559
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
4560
+ # have a `Status` message for error reporting.
4561
+ # - Batch operations. If a client uses batch request and batch response, the
4562
+ # `Status` message should be used directly inside batch response, one for
4563
+ # each error sub-response.
4564
+ # - Asynchronous operations. If an API call embeds asynchronous operation
4565
+ # results in its response, the status of those operations should be
4566
+ # represented directly using the `Status` message.
4567
+ # - Logging. If some API errors are stored in logs, the message `Status` could
4568
+ # be used directly after any stripping needed for security/privacy reasons.
4569
+ # Corresponds to the JSON property `error`
4570
+ # @return [Google::Apis::VideointelligenceV1p3beta1::GoogleRpcStatus]
4571
+ attr_accessor :error
4572
+
4573
+ # Service-specific metadata associated with the operation. It typically
4574
+ # contains progress information and common metadata such as create time.
4575
+ # Some services might not provide such metadata. Any method that returns a
4576
+ # long-running operation should document the metadata type, if any.
4577
+ # Corresponds to the JSON property `metadata`
4578
+ # @return [Hash<String,Object>]
4579
+ attr_accessor :metadata
4580
+
4581
+ # The server-assigned name, which is only unique within the same service that
4582
+ # originally returns it. If you use the default HTTP mapping, the
4583
+ # `name` should have the format of `operations/some/unique/name`.
4584
+ # Corresponds to the JSON property `name`
4585
+ # @return [String]
4586
+ attr_accessor :name
4587
+
4588
+ # The normal response of the operation in case of success. If the original
4589
+ # method returns no data on success, such as `Delete`, the response is
4590
+ # `google.protobuf.Empty`. If the original method is standard
4591
+ # `Get`/`Create`/`Update`, the response should be the resource. For other
4592
+ # methods, the response should have the type `XxxResponse`, where `Xxx`
4593
+ # is the original method name. For example, if the original method name
4594
+ # is `TakeSnapshot()`, the inferred response type is
4595
+ # `TakeSnapshotResponse`.
4596
+ # Corresponds to the JSON property `response`
4597
+ # @return [Hash<String,Object>]
4598
+ attr_accessor :response
4599
+
4600
+ def initialize(**args)
4601
+ update!(**args)
4602
+ end
4603
+
4604
+ # Update properties of this object
4605
+ def update!(**args)
4606
+ @done = args[:done] if args.key?(:done)
4607
+ @error = args[:error] if args.key?(:error)
4608
+ @metadata = args[:metadata] if args.key?(:metadata)
4609
+ @name = args[:name] if args.key?(:name)
4610
+ @response = args[:response] if args.key?(:response)
4611
+ end
4612
+ end
4613
+
4614
+ # The `Status` type defines a logical error model that is suitable for
4615
+ # different programming environments, including REST APIs and RPC APIs. It is
4616
+ # used by [gRPC](https://github.com/grpc). The error model is designed to be:
4617
+ # - Simple to use and understand for most users
4618
+ # - Flexible enough to meet unexpected needs
4619
+ # # Overview
4620
+ # The `Status` message contains three pieces of data: error code, error
4621
+ # message, and error details. The error code should be an enum value of
4622
+ # google.rpc.Code, but it may accept additional error codes if needed. The
4623
+ # error message should be a developer-facing English message that helps
4624
+ # developers *understand* and *resolve* the error. If a localized user-facing
4625
+ # error message is needed, put the localized message in the error details or
4626
+ # localize it in the client. The optional error details may contain arbitrary
4627
+ # information about the error. There is a predefined set of error detail types
4628
+ # in the package `google.rpc` that can be used for common error conditions.
4629
+ # # Language mapping
4630
+ # The `Status` message is the logical representation of the error model, but it
4631
+ # is not necessarily the actual wire format. When the `Status` message is
4632
+ # exposed in different client libraries and different wire protocols, it can be
4633
+ # mapped differently. For example, it will likely be mapped to some exceptions
4634
+ # in Java, but more likely mapped to some error codes in C.
4635
+ # # Other uses
4636
+ # The error model and the `Status` message can be used in a variety of
4637
+ # environments, either with or without APIs, to provide a
4638
+ # consistent developer experience across different environments.
4639
+ # Example uses of this error model include:
4640
+ # - Partial errors. If a service needs to return partial errors to the client,
4641
+ # it may embed the `Status` in the normal response to indicate the partial
4642
+ # errors.
4643
+ # - Workflow errors. A typical workflow has multiple steps. Each step may
4644
+ # have a `Status` message for error reporting.
4645
+ # - Batch operations. If a client uses batch request and batch response, the
4646
+ # `Status` message should be used directly inside batch response, one for
4647
+ # each error sub-response.
4648
+ # - Asynchronous operations. If an API call embeds asynchronous operation
4649
+ # results in its response, the status of those operations should be
4650
+ # represented directly using the `Status` message.
4651
+ # - Logging. If some API errors are stored in logs, the message `Status` could
4652
+ # be used directly after any stripping needed for security/privacy reasons.
4653
+ class GoogleRpcStatus
4654
+ include Google::Apis::Core::Hashable
4655
+
4656
+ # The status code, which should be an enum value of google.rpc.Code.
4657
+ # Corresponds to the JSON property `code`
4658
+ # @return [Fixnum]
4659
+ attr_accessor :code
4660
+
4661
+ # A list of messages that carry the error details. There is a common set of
4662
+ # message types for APIs to use.
4663
+ # Corresponds to the JSON property `details`
4664
+ # @return [Array<Hash<String,Object>>]
4665
+ attr_accessor :details
4666
+
4667
+ # A developer-facing error message, which should be in English. Any
4668
+ # user-facing error message should be localized and sent in the
4669
+ # google.rpc.Status.details field, or localized by the client.
4670
+ # Corresponds to the JSON property `message`
4671
+ # @return [String]
4672
+ attr_accessor :message
4673
+
4674
+ def initialize(**args)
4675
+ update!(**args)
4676
+ end
4677
+
4678
+ # Update properties of this object
4679
+ def update!(**args)
4680
+ @code = args[:code] if args.key?(:code)
4681
+ @details = args[:details] if args.key?(:details)
4682
+ @message = args[:message] if args.key?(:message)
4683
+ end
4684
+ end
4685
+ end
4686
+ end
4687
+ end