@milaboratories/pl-drivers 1.9.0 → 1.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (331) hide show
  1. package/dist/clients/constructors.cjs +45 -0
  2. package/dist/clients/constructors.cjs.map +1 -0
  3. package/dist/clients/constructors.d.ts +0 -1
  4. package/dist/clients/constructors.js +39 -0
  5. package/dist/clients/constructors.js.map +1 -0
  6. package/dist/clients/download.cjs +149 -0
  7. package/dist/clients/download.cjs.map +1 -0
  8. package/dist/clients/download.d.ts +0 -1
  9. package/dist/clients/download.js +121 -0
  10. package/dist/clients/download.js.map +1 -0
  11. package/dist/clients/logs.cjs +44 -0
  12. package/dist/clients/logs.cjs.map +1 -0
  13. package/dist/clients/logs.d.ts +0 -1
  14. package/dist/clients/logs.js +42 -0
  15. package/dist/clients/logs.js.map +1 -0
  16. package/dist/clients/ls_api.cjs +23 -0
  17. package/dist/clients/ls_api.cjs.map +1 -0
  18. package/dist/clients/ls_api.d.ts +0 -1
  19. package/dist/clients/ls_api.js +21 -0
  20. package/dist/clients/ls_api.js.map +1 -0
  21. package/dist/clients/progress.cjs +58 -0
  22. package/dist/clients/progress.cjs.map +1 -0
  23. package/dist/clients/progress.d.ts +1 -3
  24. package/dist/clients/progress.js +56 -0
  25. package/dist/clients/progress.js.map +1 -0
  26. package/dist/clients/upload.cjs +188 -0
  27. package/dist/clients/upload.cjs.map +1 -0
  28. package/dist/clients/upload.d.ts +1 -3
  29. package/dist/clients/upload.js +163 -0
  30. package/dist/clients/upload.js.map +1 -0
  31. package/dist/drivers/download_blob/blob_key.cjs +34 -0
  32. package/dist/drivers/download_blob/blob_key.cjs.map +1 -0
  33. package/dist/drivers/download_blob/blob_key.d.ts +0 -1
  34. package/dist/drivers/download_blob/blob_key.js +12 -0
  35. package/dist/drivers/download_blob/blob_key.js.map +1 -0
  36. package/dist/drivers/download_blob/download_blob.cjs +442 -0
  37. package/dist/drivers/download_blob/download_blob.cjs.map +1 -0
  38. package/dist/drivers/download_blob/download_blob.d.ts +0 -1
  39. package/dist/drivers/download_blob/download_blob.js +417 -0
  40. package/dist/drivers/download_blob/download_blob.js.map +1 -0
  41. package/dist/drivers/download_blob/download_blob_task.cjs +170 -0
  42. package/dist/drivers/download_blob/download_blob_task.cjs.map +1 -0
  43. package/dist/drivers/download_blob/download_blob_task.d.ts +0 -1
  44. package/dist/drivers/download_blob/download_blob_task.js +146 -0
  45. package/dist/drivers/download_blob/download_blob_task.js.map +1 -0
  46. package/dist/drivers/download_blob/sparse_cache/cache.cjs +202 -0
  47. package/dist/drivers/download_blob/sparse_cache/cache.cjs.map +1 -0
  48. package/dist/drivers/download_blob/sparse_cache/cache.d.ts +0 -1
  49. package/dist/drivers/download_blob/sparse_cache/cache.js +197 -0
  50. package/dist/drivers/download_blob/sparse_cache/cache.js.map +1 -0
  51. package/dist/drivers/download_blob/sparse_cache/file.cjs +61 -0
  52. package/dist/drivers/download_blob/sparse_cache/file.cjs.map +1 -0
  53. package/dist/drivers/download_blob/sparse_cache/file.d.ts +0 -1
  54. package/dist/drivers/download_blob/sparse_cache/file.js +39 -0
  55. package/dist/drivers/download_blob/sparse_cache/file.js.map +1 -0
  56. package/dist/drivers/download_blob/sparse_cache/ranges.cjs +104 -0
  57. package/dist/drivers/download_blob/sparse_cache/ranges.cjs.map +1 -0
  58. package/dist/drivers/download_blob/sparse_cache/ranges.d.ts +0 -1
  59. package/dist/drivers/download_blob/sparse_cache/ranges.js +76 -0
  60. package/dist/drivers/download_blob/sparse_cache/ranges.js.map +1 -0
  61. package/dist/drivers/download_blob_url/driver.cjs +169 -0
  62. package/dist/drivers/download_blob_url/driver.cjs.map +1 -0
  63. package/dist/drivers/download_blob_url/driver.d.ts +0 -1
  64. package/dist/drivers/download_blob_url/driver.js +148 -0
  65. package/dist/drivers/download_blob_url/driver.js.map +1 -0
  66. package/dist/drivers/download_blob_url/driver_id.cjs +9 -0
  67. package/dist/drivers/download_blob_url/driver_id.cjs.map +1 -0
  68. package/dist/drivers/download_blob_url/driver_id.d.ts +0 -1
  69. package/dist/drivers/download_blob_url/driver_id.js +7 -0
  70. package/dist/drivers/download_blob_url/driver_id.js.map +1 -0
  71. package/dist/drivers/download_blob_url/snapshot.cjs +18 -0
  72. package/dist/drivers/download_blob_url/snapshot.cjs.map +1 -0
  73. package/dist/drivers/download_blob_url/snapshot.d.ts +2 -3
  74. package/dist/drivers/download_blob_url/snapshot.js +15 -0
  75. package/dist/drivers/download_blob_url/snapshot.js.map +1 -0
  76. package/dist/drivers/download_blob_url/task.cjs +209 -0
  77. package/dist/drivers/download_blob_url/task.cjs.map +1 -0
  78. package/dist/drivers/download_blob_url/task.d.ts +0 -1
  79. package/dist/drivers/download_blob_url/task.js +184 -0
  80. package/dist/drivers/download_blob_url/task.js.map +1 -0
  81. package/dist/drivers/download_blob_url/url.d.ts +1 -1
  82. package/dist/drivers/download_url/driver.cjs +149 -0
  83. package/dist/drivers/download_url/driver.cjs.map +1 -0
  84. package/dist/drivers/download_url/driver.d.ts +0 -1
  85. package/dist/drivers/download_url/driver.js +128 -0
  86. package/dist/drivers/download_url/driver.js.map +1 -0
  87. package/dist/drivers/download_url/task.cjs +150 -0
  88. package/dist/drivers/download_url/task.cjs.map +1 -0
  89. package/dist/drivers/download_url/task.d.ts +0 -1
  90. package/dist/drivers/download_url/task.js +124 -0
  91. package/dist/drivers/download_url/task.js.map +1 -0
  92. package/dist/drivers/helpers/download_local_handle.cjs +26 -0
  93. package/dist/drivers/helpers/download_local_handle.cjs.map +1 -0
  94. package/dist/drivers/helpers/download_local_handle.d.ts +0 -1
  95. package/dist/drivers/helpers/download_local_handle.js +22 -0
  96. package/dist/drivers/helpers/download_local_handle.js.map +1 -0
  97. package/dist/drivers/helpers/download_remote_handle.cjs +36 -0
  98. package/dist/drivers/helpers/download_remote_handle.cjs.map +1 -0
  99. package/dist/drivers/helpers/download_remote_handle.d.ts +0 -1
  100. package/dist/drivers/helpers/download_remote_handle.js +32 -0
  101. package/dist/drivers/helpers/download_remote_handle.js.map +1 -0
  102. package/dist/drivers/helpers/files_cache.cjs +68 -0
  103. package/dist/drivers/helpers/files_cache.cjs.map +1 -0
  104. package/dist/drivers/helpers/files_cache.d.ts +0 -1
  105. package/dist/drivers/helpers/files_cache.js +66 -0
  106. package/dist/drivers/helpers/files_cache.js.map +1 -0
  107. package/dist/drivers/helpers/helpers.cjs +34 -0
  108. package/dist/drivers/helpers/helpers.cjs.map +1 -0
  109. package/dist/drivers/helpers/helpers.d.ts +0 -1
  110. package/dist/drivers/helpers/helpers.js +31 -0
  111. package/dist/drivers/helpers/helpers.js.map +1 -0
  112. package/dist/drivers/helpers/logs_handle.cjs +50 -0
  113. package/dist/drivers/helpers/logs_handle.cjs.map +1 -0
  114. package/dist/drivers/helpers/logs_handle.d.ts +0 -1
  115. package/dist/drivers/helpers/logs_handle.js +43 -0
  116. package/dist/drivers/helpers/logs_handle.js.map +1 -0
  117. package/dist/drivers/helpers/ls_remote_import_handle.cjs +34 -0
  118. package/dist/drivers/helpers/ls_remote_import_handle.cjs.map +1 -0
  119. package/dist/drivers/helpers/ls_remote_import_handle.d.ts +0 -1
  120. package/dist/drivers/helpers/ls_remote_import_handle.js +29 -0
  121. package/dist/drivers/helpers/ls_remote_import_handle.js.map +1 -0
  122. package/dist/drivers/helpers/ls_storage_entry.cjs +64 -0
  123. package/dist/drivers/helpers/ls_storage_entry.cjs.map +1 -0
  124. package/dist/drivers/helpers/ls_storage_entry.d.ts +0 -1
  125. package/dist/drivers/helpers/ls_storage_entry.js +58 -0
  126. package/dist/drivers/helpers/ls_storage_entry.js.map +1 -0
  127. package/dist/drivers/helpers/polling_ops.d.ts +0 -1
  128. package/dist/drivers/helpers/read_file.cjs +54 -0
  129. package/dist/drivers/helpers/read_file.cjs.map +1 -0
  130. package/dist/drivers/helpers/read_file.d.ts +0 -1
  131. package/dist/drivers/helpers/read_file.js +33 -0
  132. package/dist/drivers/helpers/read_file.js.map +1 -0
  133. package/dist/drivers/helpers/test_helpers.d.ts +0 -1
  134. package/dist/drivers/logs.cjs +118 -0
  135. package/dist/drivers/logs.cjs.map +1 -0
  136. package/dist/drivers/logs.d.ts +0 -1
  137. package/dist/drivers/logs.js +116 -0
  138. package/dist/drivers/logs.js.map +1 -0
  139. package/dist/drivers/logs_stream.cjs +238 -0
  140. package/dist/drivers/logs_stream.cjs.map +1 -0
  141. package/dist/drivers/logs_stream.d.ts +0 -1
  142. package/dist/drivers/logs_stream.js +236 -0
  143. package/dist/drivers/logs_stream.js.map +1 -0
  144. package/dist/drivers/ls.cjs +236 -0
  145. package/dist/drivers/ls.cjs.map +1 -0
  146. package/dist/drivers/ls.d.ts +0 -1
  147. package/dist/drivers/ls.js +214 -0
  148. package/dist/drivers/ls.js.map +1 -0
  149. package/dist/drivers/types.cjs +72 -0
  150. package/dist/drivers/types.cjs.map +1 -0
  151. package/dist/drivers/types.d.ts +4 -5
  152. package/dist/drivers/types.js +64 -0
  153. package/dist/drivers/types.js.map +1 -0
  154. package/dist/drivers/upload.cjs +154 -0
  155. package/dist/drivers/upload.cjs.map +1 -0
  156. package/dist/drivers/upload.d.ts +0 -1
  157. package/dist/drivers/upload.js +151 -0
  158. package/dist/drivers/upload.js.map +1 -0
  159. package/dist/drivers/upload_task.cjs +293 -0
  160. package/dist/drivers/upload_task.cjs.map +1 -0
  161. package/dist/drivers/upload_task.d.ts +0 -1
  162. package/dist/drivers/upload_task.js +285 -0
  163. package/dist/drivers/upload_task.js.map +1 -0
  164. package/dist/drivers/urls/url.cjs +59 -0
  165. package/dist/drivers/urls/url.cjs.map +1 -0
  166. package/dist/drivers/urls/url.d.ts +0 -1
  167. package/dist/drivers/urls/url.js +54 -0
  168. package/dist/drivers/urls/url.js.map +1 -0
  169. package/dist/drivers/virtual_storages.cjs +53 -0
  170. package/dist/drivers/virtual_storages.cjs.map +1 -0
  171. package/dist/drivers/virtual_storages.d.ts +0 -1
  172. package/dist/drivers/virtual_storages.js +51 -0
  173. package/dist/drivers/virtual_storages.js.map +1 -0
  174. package/dist/helpers/download.cjs +63 -0
  175. package/dist/helpers/download.cjs.map +1 -0
  176. package/dist/helpers/download.d.ts +0 -1
  177. package/dist/helpers/download.js +60 -0
  178. package/dist/helpers/download.js.map +1 -0
  179. package/dist/helpers/validate.cjs +12 -0
  180. package/dist/helpers/validate.cjs.map +1 -0
  181. package/dist/helpers/validate.d.ts +0 -1
  182. package/dist/helpers/validate.js +10 -0
  183. package/dist/helpers/validate.js.map +1 -0
  184. package/dist/index.cjs +72 -0
  185. package/dist/index.cjs.map +1 -0
  186. package/dist/index.d.ts +0 -1
  187. package/dist/index.js +19 -2
  188. package/dist/index.js.map +1 -1
  189. package/dist/proto/github.com/googleapis/googleapis/google/rpc/status.d.ts +0 -1
  190. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.cjs +261 -0
  191. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.cjs.map +1 -0
  192. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.cjs +31 -0
  193. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.cjs.map +1 -0
  194. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.d.ts +3 -5
  195. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.js +29 -0
  196. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.js.map +1 -0
  197. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.d.ts +0 -1
  198. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.js +256 -0
  199. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.js.map +1 -0
  200. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.cjs +301 -0
  201. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.cjs.map +1 -0
  202. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.cjs +34 -0
  203. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.cjs.map +1 -0
  204. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.d.ts +3 -5
  205. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.js +32 -0
  206. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.js.map +1 -0
  207. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.d.ts +0 -1
  208. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.js +296 -0
  209. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.js.map +1 -0
  210. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.cjs +410 -0
  211. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.cjs.map +1 -0
  212. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.cjs +38 -0
  213. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.cjs.map +1 -0
  214. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.d.ts +3 -5
  215. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.js +36 -0
  216. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.js.map +1 -0
  217. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.d.ts +0 -1
  218. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.js +403 -0
  219. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.js.map +1 -0
  220. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.cjs +486 -0
  221. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.cjs.map +1 -0
  222. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.cjs +86 -0
  223. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.cjs.map +1 -0
  224. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.d.ts +3 -5
  225. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.js +84 -0
  226. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.js.map +1 -0
  227. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.d.ts +0 -1
  228. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.js +478 -0
  229. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.js.map +1 -0
  230. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.cjs +712 -0
  231. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.cjs.map +1 -0
  232. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.cjs +71 -0
  233. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.cjs.map +1 -0
  234. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.d.ts +3 -5
  235. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.js +69 -0
  236. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.js.map +1 -0
  237. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.d.ts +0 -1
  238. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.js +701 -0
  239. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.js.map +1 -0
  240. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/api.client.d.ts +3 -5
  241. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/api.d.ts +0 -1
  242. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/api_types.d.ts +0 -1
  243. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/base_types.d.ts +0 -1
  244. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/import.d.ts +0 -1
  245. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/resource_types.d.ts +0 -1
  246. package/dist/proto/google/api/http.d.ts +0 -1
  247. package/dist/proto/google/protobuf/any.d.ts +0 -1
  248. package/dist/proto/google/protobuf/descriptor.d.ts +0 -1
  249. package/dist/proto/google/protobuf/duration.cjs +105 -0
  250. package/dist/proto/google/protobuf/duration.cjs.map +1 -0
  251. package/dist/proto/google/protobuf/duration.d.ts +0 -1
  252. package/dist/proto/google/protobuf/duration.js +103 -0
  253. package/dist/proto/google/protobuf/duration.js.map +1 -0
  254. package/dist/proto/google/protobuf/empty.d.ts +0 -1
  255. package/dist/proto/google/protobuf/struct.d.ts +0 -1
  256. package/dist/proto/google/protobuf/timestamp.cjs +133 -0
  257. package/dist/proto/google/protobuf/timestamp.cjs.map +1 -0
  258. package/dist/proto/google/protobuf/timestamp.d.ts +0 -1
  259. package/dist/proto/google/protobuf/timestamp.js +131 -0
  260. package/dist/proto/google/protobuf/timestamp.js.map +1 -0
  261. package/dist/proto/google/protobuf/wrappers.d.ts +0 -1
  262. package/dist/test_env.d.ts +0 -1
  263. package/package.json +16 -15
  264. package/dist/clients/constructors.d.ts.map +0 -1
  265. package/dist/clients/download.d.ts.map +0 -1
  266. package/dist/clients/logs.d.ts.map +0 -1
  267. package/dist/clients/ls_api.d.ts.map +0 -1
  268. package/dist/clients/progress.d.ts.map +0 -1
  269. package/dist/clients/upload.d.ts.map +0 -1
  270. package/dist/drivers/download_blob/blob_key.d.ts.map +0 -1
  271. package/dist/drivers/download_blob/download_blob.d.ts.map +0 -1
  272. package/dist/drivers/download_blob/download_blob_task.d.ts.map +0 -1
  273. package/dist/drivers/download_blob/sparse_cache/cache.d.ts.map +0 -1
  274. package/dist/drivers/download_blob/sparse_cache/file.d.ts.map +0 -1
  275. package/dist/drivers/download_blob/sparse_cache/ranges.d.ts.map +0 -1
  276. package/dist/drivers/download_blob_url/driver.d.ts.map +0 -1
  277. package/dist/drivers/download_blob_url/driver_id.d.ts.map +0 -1
  278. package/dist/drivers/download_blob_url/snapshot.d.ts.map +0 -1
  279. package/dist/drivers/download_blob_url/task.d.ts.map +0 -1
  280. package/dist/drivers/download_blob_url/url.d.ts.map +0 -1
  281. package/dist/drivers/download_url/driver.d.ts.map +0 -1
  282. package/dist/drivers/download_url/task.d.ts.map +0 -1
  283. package/dist/drivers/helpers/download_local_handle.d.ts.map +0 -1
  284. package/dist/drivers/helpers/download_remote_handle.d.ts.map +0 -1
  285. package/dist/drivers/helpers/files_cache.d.ts.map +0 -1
  286. package/dist/drivers/helpers/helpers.d.ts.map +0 -1
  287. package/dist/drivers/helpers/logs_handle.d.ts.map +0 -1
  288. package/dist/drivers/helpers/ls_remote_import_handle.d.ts.map +0 -1
  289. package/dist/drivers/helpers/ls_storage_entry.d.ts.map +0 -1
  290. package/dist/drivers/helpers/polling_ops.d.ts.map +0 -1
  291. package/dist/drivers/helpers/read_file.d.ts.map +0 -1
  292. package/dist/drivers/helpers/test_helpers.d.ts.map +0 -1
  293. package/dist/drivers/logs.d.ts.map +0 -1
  294. package/dist/drivers/logs_stream.d.ts.map +0 -1
  295. package/dist/drivers/ls.d.ts.map +0 -1
  296. package/dist/drivers/types.d.ts.map +0 -1
  297. package/dist/drivers/upload.d.ts.map +0 -1
  298. package/dist/drivers/upload_task.d.ts.map +0 -1
  299. package/dist/drivers/urls/url.d.ts.map +0 -1
  300. package/dist/drivers/virtual_storages.d.ts.map +0 -1
  301. package/dist/helpers/download.d.ts.map +0 -1
  302. package/dist/helpers/validate.d.ts.map +0 -1
  303. package/dist/index.d.ts.map +0 -1
  304. package/dist/index.mjs +0 -4892
  305. package/dist/index.mjs.map +0 -1
  306. package/dist/proto/github.com/googleapis/googleapis/google/rpc/status.d.ts.map +0 -1
  307. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.d.ts.map +0 -1
  308. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.d.ts.map +0 -1
  309. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.d.ts.map +0 -1
  310. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.d.ts.map +0 -1
  311. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.d.ts.map +0 -1
  312. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.d.ts.map +0 -1
  313. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.d.ts.map +0 -1
  314. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.d.ts.map +0 -1
  315. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.d.ts.map +0 -1
  316. package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.d.ts.map +0 -1
  317. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/api.client.d.ts.map +0 -1
  318. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/api.d.ts.map +0 -1
  319. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/api_types.d.ts.map +0 -1
  320. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/base_types.d.ts.map +0 -1
  321. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/import.d.ts.map +0 -1
  322. package/dist/proto/github.com/milaboratory/pl/plapi/plapiproto/resource_types.d.ts.map +0 -1
  323. package/dist/proto/google/api/http.d.ts.map +0 -1
  324. package/dist/proto/google/protobuf/any.d.ts.map +0 -1
  325. package/dist/proto/google/protobuf/descriptor.d.ts.map +0 -1
  326. package/dist/proto/google/protobuf/duration.d.ts.map +0 -1
  327. package/dist/proto/google/protobuf/empty.d.ts.map +0 -1
  328. package/dist/proto/google/protobuf/struct.d.ts.map +0 -1
  329. package/dist/proto/google/protobuf/timestamp.d.ts.map +0 -1
  330. package/dist/proto/google/protobuf/wrappers.d.ts.map +0 -1
  331. package/dist/test_env.d.ts.map +0 -1
@@ -0,0 +1,63 @@
1
+ 'use strict';
2
+
3
+ var undici = require('undici');
4
+ var node_stream = require('node:stream');
5
+ var consumers = require('node:stream/consumers');
6
+
7
+ /** Throws when a status code of the downloading URL was in range [400, 500). */
8
+ class NetworkError400 extends Error {
9
+ name = 'NetworkError400';
10
+ }
11
+ class RemoteFileDownloader {
12
+ httpClient;
13
+ constructor(httpClient) {
14
+ this.httpClient = httpClient;
15
+ }
16
+ async withContent(url, reqHeaders, ops, handler) {
17
+ const headers = { ...reqHeaders };
18
+ // Add range header if specified
19
+ if (ops.range) {
20
+ headers['Range'] = `bytes=${ops.range.from}-${ops.range.to - 1}`;
21
+ }
22
+ const { statusCode, body, headers: responseHeaders } = await undici.request(url, {
23
+ dispatcher: this.httpClient,
24
+ headers,
25
+ signal: ops.signal,
26
+ });
27
+ const webBody = node_stream.Readable.toWeb(body);
28
+ let handlerSuccess = false;
29
+ try {
30
+ await checkStatusCodeOk(statusCode, webBody, url);
31
+ const size = Number(responseHeaders['content-length']);
32
+ const result = await handler(webBody, size);
33
+ handlerSuccess = true;
34
+ return result;
35
+ }
36
+ catch (error) {
37
+ // Cleanup on error (including handler errors)
38
+ if (!handlerSuccess && !webBody.locked) {
39
+ try {
40
+ await webBody.cancel();
41
+ }
42
+ catch {
43
+ // Ignore cleanup errors
44
+ }
45
+ }
46
+ throw error;
47
+ }
48
+ }
49
+ }
50
+ async function checkStatusCodeOk(statusCode, webBody, url) {
51
+ if (statusCode != 200 && statusCode != 206 /* partial content from range request */) {
52
+ const beginning = (await consumers.text(webBody)).substring(0, 1000);
53
+ if (400 <= statusCode && statusCode < 500) {
54
+ throw new NetworkError400(`Http error: statusCode: ${statusCode} `
55
+ + `url: ${url.toString()}, beginning of body: ${beginning}`);
56
+ }
57
+ throw new Error(`Http error: statusCode: ${statusCode} url: ${url.toString()}`);
58
+ }
59
+ }
60
+
61
+ exports.NetworkError400 = NetworkError400;
62
+ exports.RemoteFileDownloader = RemoteFileDownloader;
63
+ //# sourceMappingURL=download.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download.cjs","sources":["../../src/helpers/download.ts"],"sourcesContent":["// @TODO Gleb Zakharov\n/* eslint-disable n/no-unsupported-features/node-builtins */\nimport type { Dispatcher } from 'undici';\nimport { request } from 'undici';\nimport { Readable } from 'node:stream';\nimport type { ReadableStream } from 'node:stream/web';\nimport { text } from 'node:stream/consumers';\nimport type { RangeBytes } from '@milaboratories/pl-model-common';\n\nexport interface DownloadOps {\n signal?: AbortSignal;\n range?: RangeBytes;\n}\n\nexport type ContentHandler<T> = (content: ReadableStream, size: number) => Promise<T>;\n\n/** Throws when a status code of the downloading URL was in range [400, 500). */\nexport class NetworkError400 extends Error {\n name = 'NetworkError400';\n}\n\nexport class RemoteFileDownloader {\n constructor(public readonly httpClient: Dispatcher) {}\n\n async withContent<T>(\n url: string,\n reqHeaders: Record<string, string>,\n ops: DownloadOps,\n handler: ContentHandler<T>,\n ): Promise<T> {\n const headers = { ...reqHeaders };\n\n // Add range header if specified\n if (ops.range) {\n headers['Range'] = `bytes=${ops.range.from}-${ops.range.to - 1}`;\n }\n\n const { statusCode, body, headers: responseHeaders } = await request(url, {\n dispatcher: this.httpClient,\n headers,\n signal: ops.signal,\n });\n\n const webBody = Readable.toWeb(body);\n let handlerSuccess = false;\n\n try {\n await checkStatusCodeOk(statusCode, webBody, url);\n const size = Number(responseHeaders['content-length']);\n const result = await handler(webBody, size);\n handlerSuccess = true;\n return result;\n } catch (error) {\n // Cleanup on error (including handler errors)\n if (!handlerSuccess && !webBody.locked) {\n try {\n await webBody.cancel();\n } catch {\n // Ignore cleanup errors\n }\n }\n throw error;\n }\n }\n}\n\nasync function checkStatusCodeOk(statusCode: number, webBody: ReadableStream, url: string) {\n if (statusCode != 200 && statusCode != 206 /* partial content from range request */) {\n const beginning = (await text(webBody)).substring(0, 1000);\n\n if (400 <= statusCode && statusCode < 500) {\n throw new NetworkError400(\n `Http error: statusCode: ${statusCode} `\n + `url: ${url.toString()}, beginning of body: ${beginning}`);\n }\n\n throw new Error(`Http error: statusCode: ${statusCode} url: ${url.toString()}`);\n }\n}\n"],"names":["request","Readable","text"],"mappings":";;;;;;AAgBA;AACM,MAAO,eAAgB,SAAQ,KAAK,CAAA;IACxC,IAAI,GAAG,iBAAiB;AACzB;MAEY,oBAAoB,CAAA;AACH,IAAA,UAAA;AAA5B,IAAA,WAAA,CAA4B,UAAsB,EAAA;QAAtB,IAAA,CAAA,UAAU,GAAV,UAAU;IAAe;IAErD,MAAM,WAAW,CACf,GAAW,EACX,UAAkC,EAClC,GAAgB,EAChB,OAA0B,EAAA;AAE1B,QAAA,MAAM,OAAO,GAAG,EAAE,GAAG,UAAU,EAAE;;AAGjC,QAAA,IAAI,GAAG,CAAC,KAAK,EAAE;AACb,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAA,MAAA,EAAS,GAAG,CAAC,KAAK,CAAC,IAAI,CAAA,CAAA,EAAI,GAAG,CAAC,KAAK,CAAC,EAAE,GAAG,CAAC,EAAE;QAClE;AAEA,QAAA,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,MAAMA,cAAO,CAAC,GAAG,EAAE;YACxE,UAAU,EAAE,IAAI,CAAC,UAAU;YAC3B,OAAO;YACP,MAAM,EAAE,GAAG,CAAC,MAAM;AACnB,SAAA,CAAC;QAEF,MAAM,OAAO,GAAGC,oBAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;QACpC,IAAI,cAAc,GAAG,KAAK;AAE1B,QAAA,IAAI;YACF,MAAM,iBAAiB,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,CAAC;YACjD,MAAM,IAAI,GAAG,MAAM,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;YACtD,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC;YAC3C,cAAc,GAAG,IAAI;AACrB,YAAA,OAAO,MAAM;QACf;QAAE,OAAO,KAAK,EAAE;;YAEd,IAAI,CAAC,cAAc,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;AACtC,gBAAA,IAAI;AACF,oBAAA,MAAM,OAAO,CAAC,MAAM,EAAE;gBACxB;AAAE,gBAAA,MAAM;;gBAER;YACF;AACA,YAAA,MAAM,KAAK;QACb;IACF;AACD;AAED,eAAe,iBAAiB,CAAC,UAAkB,EAAE,OAAuB,EAAE,GAAW,EAAA;IACvF,IAAI,UAAU,IAAI,GAAG,IAAI,UAAU,IAAI,GAAG,2CAA2C;AACnF,QAAA,MAAM,SAAS,GAAG,CAAC,MAAMC,cAAI,CAAC,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC;QAE1D,IAAI,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG,EAAE;AACzC,YAAA,MAAM,IAAI,eAAe,CACvB,CAAA,wBAAA,EAA2B,UAAU,CAAA,CAAA;kBACnC,CAAA,KAAA,EAAQ,GAAG,CAAC,QAAQ,EAAE,wBAAwB,SAAS,CAAA,CAAE,CAAC;QAChE;AAEA,QAAA,MAAM,IAAI,KAAK,CAAC,CAAA,wBAAA,EAA2B,UAAU,CAAA,MAAA,EAAS,GAAG,CAAC,QAAQ,EAAE,CAAA,CAAE,CAAC;IACjF;AACF;;;;;"}
@@ -15,4 +15,3 @@ export declare class RemoteFileDownloader {
15
15
  constructor(httpClient: Dispatcher);
16
16
  withContent<T>(url: string, reqHeaders: Record<string, string>, ops: DownloadOps, handler: ContentHandler<T>): Promise<T>;
17
17
  }
18
- //# sourceMappingURL=download.d.ts.map
@@ -0,0 +1,60 @@
1
+ import { request } from 'undici';
2
+ import { Readable } from 'node:stream';
3
+ import { text } from 'node:stream/consumers';
4
+
5
+ /** Throws when a status code of the downloading URL was in range [400, 500). */
6
+ class NetworkError400 extends Error {
7
+ name = 'NetworkError400';
8
+ }
9
+ class RemoteFileDownloader {
10
+ httpClient;
11
+ constructor(httpClient) {
12
+ this.httpClient = httpClient;
13
+ }
14
+ async withContent(url, reqHeaders, ops, handler) {
15
+ const headers = { ...reqHeaders };
16
+ // Add range header if specified
17
+ if (ops.range) {
18
+ headers['Range'] = `bytes=${ops.range.from}-${ops.range.to - 1}`;
19
+ }
20
+ const { statusCode, body, headers: responseHeaders } = await request(url, {
21
+ dispatcher: this.httpClient,
22
+ headers,
23
+ signal: ops.signal,
24
+ });
25
+ const webBody = Readable.toWeb(body);
26
+ let handlerSuccess = false;
27
+ try {
28
+ await checkStatusCodeOk(statusCode, webBody, url);
29
+ const size = Number(responseHeaders['content-length']);
30
+ const result = await handler(webBody, size);
31
+ handlerSuccess = true;
32
+ return result;
33
+ }
34
+ catch (error) {
35
+ // Cleanup on error (including handler errors)
36
+ if (!handlerSuccess && !webBody.locked) {
37
+ try {
38
+ await webBody.cancel();
39
+ }
40
+ catch {
41
+ // Ignore cleanup errors
42
+ }
43
+ }
44
+ throw error;
45
+ }
46
+ }
47
+ }
48
+ async function checkStatusCodeOk(statusCode, webBody, url) {
49
+ if (statusCode != 200 && statusCode != 206 /* partial content from range request */) {
50
+ const beginning = (await text(webBody)).substring(0, 1000);
51
+ if (400 <= statusCode && statusCode < 500) {
52
+ throw new NetworkError400(`Http error: statusCode: ${statusCode} `
53
+ + `url: ${url.toString()}, beginning of body: ${beginning}`);
54
+ }
55
+ throw new Error(`Http error: statusCode: ${statusCode} url: ${url.toString()}`);
56
+ }
57
+ }
58
+
59
+ export { NetworkError400, RemoteFileDownloader };
60
+ //# sourceMappingURL=download.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download.js","sources":["../../src/helpers/download.ts"],"sourcesContent":["// @TODO Gleb Zakharov\n/* eslint-disable n/no-unsupported-features/node-builtins */\nimport type { Dispatcher } from 'undici';\nimport { request } from 'undici';\nimport { Readable } from 'node:stream';\nimport type { ReadableStream } from 'node:stream/web';\nimport { text } from 'node:stream/consumers';\nimport type { RangeBytes } from '@milaboratories/pl-model-common';\n\nexport interface DownloadOps {\n signal?: AbortSignal;\n range?: RangeBytes;\n}\n\nexport type ContentHandler<T> = (content: ReadableStream, size: number) => Promise<T>;\n\n/** Throws when a status code of the downloading URL was in range [400, 500). */\nexport class NetworkError400 extends Error {\n name = 'NetworkError400';\n}\n\nexport class RemoteFileDownloader {\n constructor(public readonly httpClient: Dispatcher) {}\n\n async withContent<T>(\n url: string,\n reqHeaders: Record<string, string>,\n ops: DownloadOps,\n handler: ContentHandler<T>,\n ): Promise<T> {\n const headers = { ...reqHeaders };\n\n // Add range header if specified\n if (ops.range) {\n headers['Range'] = `bytes=${ops.range.from}-${ops.range.to - 1}`;\n }\n\n const { statusCode, body, headers: responseHeaders } = await request(url, {\n dispatcher: this.httpClient,\n headers,\n signal: ops.signal,\n });\n\n const webBody = Readable.toWeb(body);\n let handlerSuccess = false;\n\n try {\n await checkStatusCodeOk(statusCode, webBody, url);\n const size = Number(responseHeaders['content-length']);\n const result = await handler(webBody, size);\n handlerSuccess = true;\n return result;\n } catch (error) {\n // Cleanup on error (including handler errors)\n if (!handlerSuccess && !webBody.locked) {\n try {\n await webBody.cancel();\n } catch {\n // Ignore cleanup errors\n }\n }\n throw error;\n }\n }\n}\n\nasync function checkStatusCodeOk(statusCode: number, webBody: ReadableStream, url: string) {\n if (statusCode != 200 && statusCode != 206 /* partial content from range request */) {\n const beginning = (await text(webBody)).substring(0, 1000);\n\n if (400 <= statusCode && statusCode < 500) {\n throw new NetworkError400(\n `Http error: statusCode: ${statusCode} `\n + `url: ${url.toString()}, beginning of body: ${beginning}`);\n }\n\n throw new Error(`Http error: statusCode: ${statusCode} url: ${url.toString()}`);\n }\n}\n"],"names":[],"mappings":";;;;AAgBA;AACM,MAAO,eAAgB,SAAQ,KAAK,CAAA;IACxC,IAAI,GAAG,iBAAiB;AACzB;MAEY,oBAAoB,CAAA;AACH,IAAA,UAAA;AAA5B,IAAA,WAAA,CAA4B,UAAsB,EAAA;QAAtB,IAAA,CAAA,UAAU,GAAV,UAAU;IAAe;IAErD,MAAM,WAAW,CACf,GAAW,EACX,UAAkC,EAClC,GAAgB,EAChB,OAA0B,EAAA;AAE1B,QAAA,MAAM,OAAO,GAAG,EAAE,GAAG,UAAU,EAAE;;AAGjC,QAAA,IAAI,GAAG,CAAC,KAAK,EAAE;AACb,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAA,MAAA,EAAS,GAAG,CAAC,KAAK,CAAC,IAAI,CAAA,CAAA,EAAI,GAAG,CAAC,KAAK,CAAC,EAAE,GAAG,CAAC,EAAE;QAClE;AAEA,QAAA,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,MAAM,OAAO,CAAC,GAAG,EAAE;YACxE,UAAU,EAAE,IAAI,CAAC,UAAU;YAC3B,OAAO;YACP,MAAM,EAAE,GAAG,CAAC,MAAM;AACnB,SAAA,CAAC;QAEF,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC;QACpC,IAAI,cAAc,GAAG,KAAK;AAE1B,QAAA,IAAI;YACF,MAAM,iBAAiB,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,CAAC;YACjD,MAAM,IAAI,GAAG,MAAM,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;YACtD,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC;YAC3C,cAAc,GAAG,IAAI;AACrB,YAAA,OAAO,MAAM;QACf;QAAE,OAAO,KAAK,EAAE;;YAEd,IAAI,CAAC,cAAc,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;AACtC,gBAAA,IAAI;AACF,oBAAA,MAAM,OAAO,CAAC,MAAM,EAAE;gBACxB;AAAE,gBAAA,MAAM;;gBAER;YACF;AACA,YAAA,MAAM,KAAK;QACb;IACF;AACD;AAED,eAAe,iBAAiB,CAAC,UAAkB,EAAE,OAAuB,EAAE,GAAW,EAAA;IACvF,IAAI,UAAU,IAAI,GAAG,IAAI,UAAU,IAAI,GAAG,2CAA2C;AACnF,QAAA,MAAM,SAAS,GAAG,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC;QAE1D,IAAI,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG,EAAE;AACzC,YAAA,MAAM,IAAI,eAAe,CACvB,CAAA,wBAAA,EAA2B,UAAU,CAAA,CAAA;kBACnC,CAAA,KAAA,EAAQ,GAAG,CAAC,QAAQ,EAAE,wBAAwB,SAAS,CAAA,CAAE,CAAC;QAChE;AAEA,QAAA,MAAM,IAAI,KAAK,CAAC,CAAA,wBAAA,EAA2B,UAAU,CAAA,MAAA,EAAS,GAAG,CAAC,QAAQ,EAAE,CAAA,CAAE,CAAC;IACjF;AACF;;;;"}
@@ -0,0 +1,12 @@
1
+ 'use strict';
2
+
3
+ var path = require('node:path');
4
+
5
+ function validateAbsolute(p) {
6
+ if (!path.isAbsolute(p))
7
+ throw new Error(`Path ${p} is not absolute.`);
8
+ return p;
9
+ }
10
+
11
+ exports.validateAbsolute = validateAbsolute;
12
+ //# sourceMappingURL=validate.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"validate.cjs","sources":["../../src/helpers/validate.ts"],"sourcesContent":["import path from 'node:path';\n\nexport function validateAbsolute(p: string): string {\n if (!path.isAbsolute(p)) throw new Error(`Path ${p} is not absolute.`);\n return p;\n}\n"],"names":[],"mappings":";;;;AAEM,SAAU,gBAAgB,CAAC,CAAS,EAAA;AACxC,IAAA,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC;AAAE,QAAA,MAAM,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAA,iBAAA,CAAmB,CAAC;AACtE,IAAA,OAAO,CAAC;AACV;;;;"}
@@ -1,2 +1 @@
1
1
  export declare function validateAbsolute(p: string): string;
2
- //# sourceMappingURL=validate.d.ts.map
@@ -0,0 +1,10 @@
1
+ import path__default from 'node:path';
2
+
3
+ function validateAbsolute(p) {
4
+ if (!path__default.isAbsolute(p))
5
+ throw new Error(`Path ${p} is not absolute.`);
6
+ return p;
7
+ }
8
+
9
+ export { validateAbsolute };
10
+ //# sourceMappingURL=validate.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"validate.js","sources":["../../src/helpers/validate.ts"],"sourcesContent":["import path from 'node:path';\n\nexport function validateAbsolute(p: string): string {\n if (!path.isAbsolute(p)) throw new Error(`Path ${p} is not absolute.`);\n return p;\n}\n"],"names":["path"],"mappings":";;AAEM,SAAU,gBAAgB,CAAC,CAAS,EAAA;AACxC,IAAA,IAAI,CAACA,aAAI,CAAC,UAAU,CAAC,CAAC,CAAC;AAAE,QAAA,MAAM,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAA,iBAAA,CAAmB,CAAC;AACtE,IAAA,OAAO,CAAC;AACV;;;;"}
package/dist/index.cjs ADDED
@@ -0,0 +1,72 @@
1
+ 'use strict';
2
+
3
+ var upload = require('./clients/upload.cjs');
4
+ var progress = require('./clients/progress.cjs');
5
+ var download = require('./clients/download.cjs');
6
+ var ls_api = require('./clients/ls_api.cjs');
7
+ var logs = require('./clients/logs.cjs');
8
+ var constructors = require('./clients/constructors.cjs');
9
+ var download_blob = require('./drivers/download_blob/download_blob.cjs');
10
+ var driver = require('./drivers/download_blob_url/driver.cjs');
11
+ var snapshot = require('./drivers/download_blob_url/snapshot.cjs');
12
+ var upload$1 = require('./drivers/upload.cjs');
13
+ var upload_task = require('./drivers/upload_task.cjs');
14
+ var logs_stream = require('./drivers/logs_stream.cjs');
15
+ var logs$1 = require('./drivers/logs.cjs');
16
+ var driver$1 = require('./drivers/download_url/driver.cjs');
17
+ var ls = require('./drivers/ls.cjs');
18
+ var virtual_storages = require('./drivers/virtual_storages.cjs');
19
+ var helpers = require('./drivers/helpers/helpers.cjs');
20
+ var types = require('./drivers/types.cjs');
21
+ var validate = require('./helpers/validate.cjs');
22
+
23
+
24
+
25
+ exports.ClientUpload = upload.ClientUpload;
26
+ exports.MTimeError = upload.MTimeError;
27
+ exports.NetworkError = upload.NetworkError;
28
+ exports.NoFileForUploading = upload.NoFileForUploading;
29
+ exports.UnexpectedEOF = upload.UnexpectedEOF;
30
+ exports.ClientProgress = progress.ClientProgress;
31
+ exports.ClientDownload = download.ClientDownload;
32
+ exports.UnknownStorageError = download.UnknownStorageError;
33
+ exports.WrongLocalFileUrl = download.WrongLocalFileUrl;
34
+ exports.getFullPath = download.getFullPath;
35
+ exports.newLocalStorageIdsToRoot = download.newLocalStorageIdsToRoot;
36
+ exports.parseLocalUrl = download.parseLocalUrl;
37
+ exports.ClientLs = ls_api.ClientLs;
38
+ exports.ClientLogs = logs.ClientLogs;
39
+ exports.createDownloadClient = constructors.createDownloadClient;
40
+ exports.createLogsClient = constructors.createLogsClient;
41
+ exports.createLsFilesClient = constructors.createLsFilesClient;
42
+ exports.createUploadBlobClient = constructors.createUploadBlobClient;
43
+ exports.createUploadProgressClient = constructors.createUploadProgressClient;
44
+ exports.DownloadDriver = download_blob.DownloadDriver;
45
+ exports.DownloadBlobToURLDriver = driver.DownloadBlobToURLDriver;
46
+ exports.DownloadableBlobSnapshot = snapshot.DownloadableBlobSnapshot;
47
+ exports.makeDownloadableBlobSnapshot = snapshot.makeDownloadableBlobSnapshot;
48
+ exports.UploadDriver = upload$1.UploadDriver;
49
+ exports.makeBlobImportSnapshot = upload$1.makeBlobImportSnapshot;
50
+ exports.UploadTask = upload_task.UploadTask;
51
+ exports.isMyUpload = upload_task.isMyUpload;
52
+ exports.isResourceWasDeletedError = upload_task.isResourceWasDeletedError;
53
+ exports.isSignMatch = upload_task.isSignMatch;
54
+ exports.isUpload = upload_task.isUpload;
55
+ exports.nonRecoverableError = upload_task.nonRecoverableError;
56
+ exports.uploadBlob = upload_task.uploadBlob;
57
+ exports.LogsStreamDriver = logs_stream.LogsStreamDriver;
58
+ exports.LogsDriver = logs$1.LogsDriver;
59
+ exports.DownloadUrlDriver = driver$1.DownloadUrlDriver;
60
+ exports.LsDriver = ls.LsDriver;
61
+ exports.DefaultVirtualLocalStorages = virtual_storages.DefaultVirtualLocalStorages;
62
+ exports.Updater = helpers.Updater;
63
+ exports.WrongResourceTypeError = helpers.WrongResourceTypeError;
64
+ exports.ImportFileHandleData = types.ImportFileHandleData;
65
+ exports.ImportFileHandleIndexData = types.ImportFileHandleIndexData;
66
+ exports.ImportFileHandleUploadData = types.ImportFileHandleUploadData;
67
+ exports.IndexResourceSnapshot = types.IndexResourceSnapshot;
68
+ exports.OnDemandBlobResourceSnapshot = types.OnDemandBlobResourceSnapshot;
69
+ exports.UploadResourceSnapshot = types.UploadResourceSnapshot;
70
+ exports.getSize = types.getSize;
71
+ exports.validateAbsolute = validate.validateAbsolute;
72
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
package/dist/index.d.ts CHANGED
@@ -18,4 +18,3 @@ export * from './drivers/helpers/helpers';
18
18
  export * from './drivers/helpers/polling_ops';
19
19
  export * from './drivers/types';
20
20
  export { validateAbsolute } from './helpers/validate';
21
- //# sourceMappingURL=index.d.ts.map
package/dist/index.js CHANGED
@@ -1,3 +1,20 @@
1
- "use strict";var Tt=Object.defineProperty;var bt=(o,e,t)=>e in o?Tt(o,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):o[e]=t;var h=(o,e,t)=>bt(o,typeof e!="symbol"?e+"":e,t);Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const p=require("@milaboratories/pl-client"),kt=require("node:fs/promises"),Ce=require("undici"),T=require("@protobuf-ts/runtime-rpc"),a=require("@protobuf-ts/runtime"),g=require("@milaboratories/ts-helpers"),S=require("node:fs"),U=require("node:path"),I=require("node:stream"),ue=require("node:stream/consumers"),f=require("@milaboratories/computable"),v=require("@milaboratories/pl-model-common"),m=require("@milaboratories/pl-tree"),Ut=require("denque"),R=require("node:crypto"),Fe=require("node:os"),Lt=require("node:readline/promises"),L=require("zod"),Pt=require("@milaboratories/helpers"),Rt=require("node:zlib"),It=require("tar-fs"),St=require("decompress"),Be=require("node:timers/promises"),Dt=require("node:assert"),Nt=require("node:util"),$t=require("node:child_process");function D(o){const e=Object.create(null,{[Symbol.toStringTag]:{value:"Module"}});if(o){for(const t in o)if(t!=="default"){const r=Object.getOwnPropertyDescriptor(o,t);Object.defineProperty(e,t,r.get?r:{enumerable:!0,get:()=>o[t]})}}return e.default=o,Object.freeze(e)}const w=D(kt),pe=D(S),k=D(U),Re=D(Fe),vt=D(Lt),Ee=D(Rt),le=D(It);class Ct extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new Ct;class Ft extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.Init",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new Ft;class Bt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.Init.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Et=new Bt;class Wt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.Init.Response",[{no:1,name:"parts_count",kind:"scalar",T:4,L:0},{no:2,name:"uploaded_parts",kind:"scalar",repeat:1,T:4,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.partsCount=0n,t.uploadedParts=[],e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.partsCount=e.uint64().toBigInt();break;case 2:if(c===a.WireType.LengthDelimited)for(let P=e.int32()+e.pos;e.pos<P;)i.uploadedParts.push(e.uint64().toBigInt());else i.uploadedParts.push(e.uint64().toBigInt());break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){if(e.partsCount!==0n&&t.tag(1,a.WireType.Varint).uint64(e.partsCount),e.uploadedParts.length){t.tag(2,a.WireType.LengthDelimited).fork();for(let i=0;i<e.uploadedParts.length;i++)t.uint64(e.uploadedParts[i]);t.join()}let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const _t=new Wt;class Mt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.UpdateProgress",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new Mt;class Ot extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.UpdateProgress.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"bytes_processed",kind:"scalar",T:3,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.bytesProcessed=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.bytesProcessed=e.int64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.bytesProcessed!==0n&&t.tag(2,a.WireType.Varint).int64(e.bytesProcessed);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Ht=new Ot;class zt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.UpdateProgress.Response",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const xt=new zt;class At extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.GetPartURL",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new At;class jt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.GetPartURL.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"part_number",kind:"scalar",T:4,L:0},{no:3,name:"uploaded_part_size",kind:"scalar",T:4,L:0},{no:4,name:"is_internal_use",kind:"scalar",T:8}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.partNumber=0n,t.uploadedPartSize=0n,t.isInternalUse=!1,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.partNumber=e.uint64().toBigInt();break;case 3:i.uploadedPartSize=e.uint64().toBigInt();break;case 4:i.isInternalUse=e.bool();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.partNumber!==0n&&t.tag(2,a.WireType.Varint).uint64(e.partNumber),e.uploadedPartSize!==0n&&t.tag(3,a.WireType.Varint).uint64(e.uploadedPartSize),e.isInternalUse!==!1&&t.tag(4,a.WireType.Varint).bool(e.isInternalUse);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Gt=new jt;class Vt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.GetPartURL.HTTPHeader",[{no:1,name:"Name",kind:"scalar",jsonName:"Name",T:9},{no:2,name:"Value",kind:"scalar",jsonName:"Value",T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.name="",t.value="",e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.name=e.string();break;case 2:i.value=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.name!==""&&t.tag(1,a.WireType.LengthDelimited).string(e.name),e.value!==""&&t.tag(2,a.WireType.LengthDelimited).string(e.value);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Z=new Vt;class qt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.GetPartURL.Response",[{no:1,name:"upload_url",kind:"scalar",T:9},{no:2,name:"method",kind:"scalar",T:9},{no:3,name:"headers",kind:"message",repeat:2,T:()=>Z},{no:4,name:"chunk_start",kind:"scalar",T:4,L:0},{no:5,name:"chunk_end",kind:"scalar",T:4,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.uploadUrl="",t.method="",t.headers=[],t.chunkStart=0n,t.chunkEnd=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.uploadUrl=e.string();break;case 2:i.method=e.string();break;case 3:i.headers.push(Z.internalBinaryRead(e,e.uint32(),r));break;case 4:i.chunkStart=e.uint64().toBigInt();break;case 5:i.chunkEnd=e.uint64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.uploadUrl!==""&&t.tag(1,a.WireType.LengthDelimited).string(e.uploadUrl),e.method!==""&&t.tag(2,a.WireType.LengthDelimited).string(e.method);for(let i=0;i<e.headers.length;i++)Z.internalBinaryWrite(e.headers[i],t.tag(3,a.WireType.LengthDelimited).fork(),r).join();e.chunkStart!==0n&&t.tag(4,a.WireType.Varint).uint64(e.chunkStart),e.chunkEnd!==0n&&t.tag(5,a.WireType.Varint).uint64(e.chunkEnd);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Jt=new qt;class Zt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.Finalize",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new Zt;class Qt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.Finalize.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Kt=new Qt;class Xt extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.uploadapi.Finalize.Response",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Yt=new Xt,Q=new T.ServiceType("MiLaboratories.Controller.Shared.Upload",[{name:"Init",options:{},I:Et,O:_t},{name:"GetPartURL",options:{},I:Gt,O:Jt},{name:"UpdateProgress",options:{},I:Ht,O:xt},{name:"Finalize",options:{},I:Kt,O:Yt}]);class er{constructor(e){h(this,"typeName",Q.typeName);h(this,"methods",Q.methods);h(this,"options",Q.options);this._transport=e}init(e,t){const r=this.methods[0],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}getPartURL(e,t){const r=this.methods[1],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}updateProgress(e,t){const r=this.methods[2],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}finalize(e,t){const r=this.methods[3],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}}class ge extends Error{constructor(){super(...arguments);h(this,"name","MTimeError")}}class fe extends Error{constructor(){super(...arguments);h(this,"name","UnexpectedEOF")}}class we extends Error{constructor(){super(...arguments);h(this,"name","NetworkError")}}class me extends Error{constructor(){super(...arguments);h(this,"name","NoFileForUploading")}}class We{constructor(e,t,r,n){h(this,"grpcClient");this.httpClient=t,this.logger=n,this.grpcClient=e.createGrpcClientProvider(i=>new er(i))}close(){}async initUpload({id:e,type:t},r){const n=await this.grpcInit(e,t,r);return{overall:n.partsCount,toUpload:this.partsToUpload(n.partsCount,n.uploadedParts)}}async partUpload({id:e,type:t},r,n,i,d){const s=await this.grpcGetPartUrl({id:e,type:t},i,0n,d),c=await tr(r,s.chunkStart,s.chunkEnd);await nr(r,n);const l=Number(s.chunkEnd-s.chunkStart);if(c.length!==l)throw new Error(`Chunk size mismatch: expected ${l} bytes, but read ${c.length} bytes from file`);const u=Object.fromEntries(s.headers.map(({name:y,value:C})=>[y,C])),P=Object.keys(u).find(y=>y.toLowerCase()==="content-length");if(P){const y=Number(u[P]);if(y!==l)throw new Error(`Content-Length mismatch: expected ${l}, but got ${y} in headers`)}try{const{body:y,statusCode:C,headers:J}=await Ce.request(s.uploadUrl,{dispatcher:this.httpClient,body:c,headersTimeout:6e4,bodyTimeout:6e4,reset:!0,headers:u,method:s.method.toUpperCase()}),yt=await y.text();ir(C,yt,J,s)}catch(y){throw y instanceof we?y:new Error(`partUpload: error ${JSON.stringify(y)} happened while trying to do part upload to the url ${s.uploadUrl}, headers: ${JSON.stringify(s.headers)}`)}await this.grpcUpdateProgress({id:e,type:t},BigInt(s.chunkEnd-s.chunkStart),d)}async finalize(e,t){return await this.grpcFinalize(e,t)}partsToUpload(e,t){const r=[],n=new Set(t);for(let i=1n;i<=e;i++)n.has(i)||r.push(i);return r}async grpcInit(e,t,r){return await this.grpcClient.get().init({resourceId:e},p.addRTypeToMetadata(t,r)).response}async grpcGetPartUrl({id:e,type:t},r,n,i){return await this.grpcClient.get().getPartURL({resourceId:e,partNumber:r,uploadedPartSize:n,isInternalUse:!1},p.addRTypeToMetadata(t,i)).response}async grpcUpdateProgress({id:e,type:t},r,n){await this.grpcClient.get().updateProgress({resourceId:e,bytesProcessed:r},p.addRTypeToMetadata(t,n)).response}async grpcFinalize({id:e,type:t},r){return await this.grpcClient.get().finalize({resourceId:e},p.addRTypeToMetadata(t,r)).response}}async function tr(o,e,t){let r;try{r=await w.open(o);const n=Number(t-e),i=Number(e),d=Buffer.alloc(n),s=await rr(r,d,n,i);return d.subarray(0,s)}catch(n){throw n&&typeof n=="object"&&"code"in n&&n.code=="ENOENT"?new me(`there is no file ${o} for uploading`):n}finally{await(r==null?void 0:r.close())}}async function rr(o,e,t,r){let n=0;for(;n<t;){const{bytesRead:i}=await o.read(e,n,t-n,r+n);if(i===0)throw new fe("file ended earlier than expected.");n+=i}return n}async function nr(o,e){const t=BigInt(Math.floor((await w.stat(o)).mtimeMs/1e3));if(t>e)throw new ge(`file was modified, expected mtime: ${e}, got: ${t}.`)}function ir(o,e,t,r){if(o!=200)throw new we(`response is not ok, status code: ${o}, body: ${e}, headers: ${JSON.stringify(t)}, url: ${r.uploadUrl}`)}class or extends a.MessageType{constructor(){super("google.protobuf.Duration",[{no:1,name:"seconds",kind:"scalar",T:3,L:0},{no:2,name:"nanos",kind:"scalar",T:5}])}internalJsonWrite(e,t){let r=a.PbLong.from(e.seconds).toNumber();if(r>315576e6||r<-315576e6)throw new Error("Duration value out of range.");let n=e.seconds.toString();if(r===0&&e.nanos<0&&(n="-"+n),e.nanos!==0){let i=Math.abs(e.nanos).toString();i="0".repeat(9-i.length)+i,i.substring(3)==="000000"?i=i.substring(0,3):i.substring(6)==="000"&&(i=i.substring(0,6)),n+="."+i}return n+"s"}internalJsonRead(e,t,r){if(typeof e!="string")throw new Error("Unable to parse Duration from JSON "+a.typeofJsonValue(e)+". Expected string.");let n=e.match(/^(-?)([0-9]+)(?:\.([0-9]+))?s/);if(n===null)throw new Error("Unable to parse Duration from JSON string. Invalid format.");r||(r=this.create());let[,i,d,s]=n,c=a.PbLong.from(i+d);if(c.toNumber()>315576e6||c.toNumber()<-315576e6)throw new Error("Unable to parse Duration from JSON string. Value out of range.");if(r.seconds=c.toBigInt(),typeof s=="string"){let l=i+s+"0".repeat(9-s.length);r.nanos=parseInt(l)}return r}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.seconds=0n,t.nanos=0,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.seconds=e.int64().toBigInt();break;case 2:i.nanos=e.int32();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.seconds!==0n&&t.tag(1,a.WireType.Varint).int64(e.seconds),e.nanos!==0&&t.tag(2,a.WireType.Varint).int32(e.nanos);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const H=new or;class sr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new sr;class ar extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.Report",[{no:1,name:"progress",kind:"scalar",T:2},{no:2,name:"bytes_processed",kind:"scalar",T:4,L:0},{no:3,name:"bytes_total",kind:"scalar",T:4,L:0},{no:4,name:"done",kind:"scalar",T:8},{no:5,name:"name",kind:"scalar",T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.progress=0,t.bytesProcessed=0n,t.bytesTotal=0n,t.done=!1,t.name="",e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.progress=e.float();break;case 2:i.bytesProcessed=e.uint64().toBigInt();break;case 3:i.bytesTotal=e.uint64().toBigInt();break;case 4:i.done=e.bool();break;case 5:i.name=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.progress!==0&&t.tag(1,a.WireType.Bit32).float(e.progress),e.bytesProcessed!==0n&&t.tag(2,a.WireType.Varint).uint64(e.bytesProcessed),e.bytesTotal!==0n&&t.tag(3,a.WireType.Varint).uint64(e.bytesTotal),e.done!==!1&&t.tag(4,a.WireType.Varint).bool(e.done),e.name!==""&&t.tag(5,a.WireType.LengthDelimited).string(e.name);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const $=new ar;class lr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.GetStatus",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new lr;class cr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.GetStatus.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const dr=new cr;class hr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.GetStatus.Response",[{no:1,name:"report",kind:"message",T:()=>$}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.report=$.internalBinaryRead(e,e.uint32(),r,i.report);break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.report&&$.internalBinaryWrite(e.report,t.tag(1,a.WireType.LengthDelimited).fork(),r).join();let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const ur=new hr;class pr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.RealtimeStatus",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new pr;class gr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.RealtimeStatus.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"update_interval",kind:"message",T:()=>H}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.updateInterval=H.internalBinaryRead(e,e.uint32(),r,i.updateInterval);break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.updateInterval&&H.internalBinaryWrite(e.updateInterval,t.tag(2,a.WireType.LengthDelimited).fork(),r).join();let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const fr=new gr;class wr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.ProgressAPI.RealtimeStatus.Response",[{no:1,name:"report",kind:"message",T:()=>$}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.report=$.internalBinaryRead(e,e.uint32(),r,i.report);break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.report&&$.internalBinaryWrite(e.report,t.tag(1,a.WireType.LengthDelimited).fork(),r).join();let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const mr=new wr,K=new T.ServiceType("MiLaboratories.Controller.Shared.Progress",[{name:"GetStatus",options:{"google.api.http":{get:"/resources/{resource_id}/get-progress"}},I:dr,O:ur},{name:"RealtimeStatus",serverStreaming:!0,options:{},I:fr,O:mr}]);class yr{constructor(e){h(this,"typeName",K.typeName);h(this,"methods",K.methods);h(this,"options",K.options);this._transport=e}getStatus(e,t){const r=this.methods[0],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}realtimeStatus(e,t){const r=this.methods[1],n=this._transport.mergeOptions(t);return T.stackIntercept("serverStreaming",this._transport,r,n,e)}}class _e{constructor(e,t,r,n){h(this,"grpcClient");this.client=r,this.logger=n,this.grpcClient=e.createGrpcClientProvider(i=>new yr(i))}close(){}async getStatus({id:e,type:t},r){const n=await this.grpcClient.get().getStatus({resourceId:e},p.addRTypeToMetadata(t,r)),i=g.notEmpty(n.response.report);return{done:i.done,progress:i.progress,bytesProcessed:String(i.bytesProcessed),bytesTotal:String(i.bytesTotal)}}async*realtimeStatus({id:e,type:t},r=100,n){n=p.addRTypeToMetadata(t,n);const i=Math.floor(r/1e3),d=(r-i*1e3)*1e6,s=H.create({seconds:BigInt(i),nanos:d});try{const{responses:c}=this.grpcClient.get().realtimeStatus({resourceId:e,updateInterval:s},n);yield*c}catch(c){throw this.logger.warn("Failed to get realtime status"+String(c)),c}}}class j extends Error{constructor(){super(...arguments);h(this,"name","NetworkError400")}}class Me{constructor(e){this.httpClient=e}async withContent(e,t,r,n){const i={...t};r.range&&(i.Range=`bytes=${r.range.from}-${r.range.to-1}`);const{statusCode:d,body:s,headers:c}=await Ce.request(e,{dispatcher:this.httpClient,headers:i,signal:r.signal}),l=I.Readable.toWeb(s);let u=!1;try{await Tr(d,l,e);const P=Number(c["content-length"]),y=await n(l,P);return u=!0,y}catch(P){if(!u&&!l.locked)try{await l.cancel()}catch{}throw P}}}async function Tr(o,e,t){if(o!=200&&o!=206){const r=(await ue.text(e)).substring(0,1e3);throw 400<=o&&o<500?new j(`Http error: statusCode: ${o} url: ${t.toString()}, beginning of body: ${r}`):new Error(`Http error: statusCode: ${o} url: ${t.toString()}`)}}function N(o){if(!U.isAbsolute(o))throw new Error(`Path ${o} is not absolute.`);return o}class br extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.DownloadAPI",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new br;class kr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.DownloadAPI.GetDownloadURL",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new kr;class Ur extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.DownloadAPI.GetDownloadURL.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"is_internal_use",kind:"scalar",T:8}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.isInternalUse=!1,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.isInternalUse=e.bool();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.isInternalUse!==!1&&t.tag(2,a.WireType.Varint).bool(e.isInternalUse);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Lr=new Ur;class Pr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.DownloadAPI.GetDownloadURL.HTTPHeader",[{no:1,name:"Name",kind:"scalar",jsonName:"Name",T:9},{no:2,name:"Value",kind:"scalar",jsonName:"Value",T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.name="",t.value="",e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.name=e.string();break;case 2:i.value=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.name!==""&&t.tag(1,a.WireType.LengthDelimited).string(e.name),e.value!==""&&t.tag(2,a.WireType.LengthDelimited).string(e.value);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const X=new Pr;class Rr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.DownloadAPI.GetDownloadURL.Response",[{no:1,name:"download_url",kind:"scalar",T:9},{no:2,name:"headers",kind:"message",repeat:2,T:()=>X}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.downloadUrl="",t.headers=[],e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.downloadUrl=e.string();break;case 2:i.headers.push(X.internalBinaryRead(e,e.uint32(),r));break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.downloadUrl!==""&&t.tag(1,a.WireType.LengthDelimited).string(e.downloadUrl);for(let i=0;i<e.headers.length;i++)X.internalBinaryWrite(e.headers[i],t.tag(2,a.WireType.LengthDelimited).fork(),r).join();let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Ir=new Rr,Y=new T.ServiceType("MiLaboratories.Controller.Shared.Download",[{name:"GetDownloadURL",options:{"google.api.http":{get:"/resources/{resource_id}/get-download-url"}},I:Lr,O:Ir}]);class Sr{constructor(e){h(this,"typeName",Y.typeName);h(this,"methods",Y.methods);h(this,"options",Y.options);this._transport=e}getDownloadURL(e,t){const r=this.methods[0],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}}class Oe{constructor(e,t,r,n){h(this,"grpcClient");h(this,"remoteFileDownloader");h(this,"localStorageIdsToRoot");h(this,"localFileReadLimiter",new g.ConcurrencyLimitingExecutor(32));this.httpClient=t,this.logger=r,this.grpcClient=e.createGrpcClientProvider(i=>new Sr(i)),this.remoteFileDownloader=new Me(t),this.localStorageIdsToRoot=xe(n)}close(){}async withBlobContent(e,t,r,n){const{downloadUrl:i,headers:d}=await this.grpcGetDownloadUrl(e,t,r.signal),s=Object.fromEntries(d.map(({name:c,value:l})=>[c,l]));return this.logger.info(`download blob ${p.stringifyWithResourceId(e)} from url ${i}, ops: ${JSON.stringify(r)}`),Nr(i)?await this.withLocalFileContent(i,r,n):await this.remoteFileDownloader.withContent(i,s,r,n)}async withLocalFileContent(e,t,r){const{storageId:n,relativePath:i}=He(e),d=ze(n,this.localStorageIdsToRoot,i);return await this.localFileReadLimiter.run(async()=>{var u,P;const s={start:(u=t.range)==null?void 0:u.from,end:((P=t.range)==null?void 0:P.to)!==void 0?t.range.to-1:void 0};let c,l=!1;try{const y=await w.stat(d);c=pe.createReadStream(d,s);const C=I.Readable.toWeb(c),J=await r(C,y.size);return l=!0,J}catch(y){throw!l&&c&&!c.destroyed&&c.destroy(),y}})}async grpcGetDownloadUrl({id:e,type:t},r,n){const i=r??{};return i.abort=n,await this.grpcClient.get().getDownloadURL({resourceId:e,isInternalUse:!1},p.addRTypeToMetadata(t,i)).response}}function He(o){const e=new URL(o);if(e.pathname=="")throw new G(`url for local filepath ${o} does not match url scheme`);return{storageId:e.host,relativePath:decodeURIComponent(e.pathname.slice(1))}}function ze(o,e,t){const r=e.get(o);if(r===void 0)throw new V(`Unknown storage location: ${o}`);return r===""?t:k.join(r,t)}const Dr="storage://";function Nr(o){return o.startsWith(Dr)}class G extends Error{constructor(){super(...arguments);h(this,"name","WrongLocalFileUrl")}}class V extends Error{constructor(){super(...arguments);h(this,"name","UnknownStorageError")}}function xe(o){const e=new Map;for(const t of o)t.localPath!==""&&N(t.localPath),e.set(t.storageId,t.localPath);return e}class $r extends a.MessageType{constructor(){super("google.protobuf.Timestamp",[{no:1,name:"seconds",kind:"scalar",T:3,L:0},{no:2,name:"nanos",kind:"scalar",T:5}])}now(){const e=this.create(),t=Date.now();return e.seconds=a.PbLong.from(Math.floor(t/1e3)).toBigInt(),e.nanos=t%1e3*1e6,e}toDate(e){return new Date(a.PbLong.from(e.seconds).toNumber()*1e3+Math.ceil(e.nanos/1e6))}fromDate(e){const t=this.create(),r=e.getTime();return t.seconds=a.PbLong.from(Math.floor(r/1e3)).toBigInt(),t.nanos=(r%1e3+(r<0&&r%1e3!==0?1e3:0))*1e6,t}internalJsonWrite(e,t){let r=a.PbLong.from(e.seconds).toNumber()*1e3;if(r<Date.parse("0001-01-01T00:00:00Z")||r>Date.parse("9999-12-31T23:59:59Z"))throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");if(e.nanos<0)throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");let n="Z";if(e.nanos>0){let i=(e.nanos+1e9).toString().substring(1);i.substring(3)==="000000"?n="."+i.substring(0,3)+"Z":i.substring(6)==="000"?n="."+i.substring(0,6)+"Z":n="."+i+"Z"}return new Date(r).toISOString().replace(".000Z",n)}internalJsonRead(e,t,r){if(typeof e!="string")throw new Error("Unable to parse Timestamp from JSON "+a.typeofJsonValue(e)+".");let n=e.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);if(!n)throw new Error("Unable to parse Timestamp from JSON. Invalid format.");let i=Date.parse(n[1]+"-"+n[2]+"-"+n[3]+"T"+n[4]+":"+n[5]+":"+n[6]+(n[8]?n[8]:"Z"));if(Number.isNaN(i))throw new Error("Unable to parse Timestamp from JSON. Invalid value.");if(i<Date.parse("0001-01-01T00:00:00Z")||i>Date.parse("9999-12-31T23:59:59Z"))throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");return r||(r=this.create()),r.seconds=a.PbLong.from(i/1e3).toBigInt(),r.nanos=0,n[7]&&(r.nanos=parseInt("1"+n[7]+"0".repeat(9-n[7].length))-1e9),r}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.seconds=0n,t.nanos=0,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.seconds=e.int64().toBigInt();break;case 2:i.nanos=e.int32();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.seconds!==0n&&t.tag(1,a.WireType.Varint).int64(e.seconds),e.nanos!==0&&t.tag(2,a.WireType.Varint).int32(e.nanos);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const ee=new $r;class vr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.LsAPI",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new vr;class Cr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.LsAPI.ListItem",[{no:1,name:"name",kind:"scalar",T:9},{no:2,name:"size",kind:"scalar",T:4,L:0},{no:3,name:"is_dir",kind:"scalar",T:8},{no:10,name:"full_name",kind:"scalar",T:9},{no:11,name:"directory",kind:"scalar",T:9},{no:12,name:"last_modified",kind:"message",T:()=>ee},{no:13,name:"version",kind:"scalar",T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.name="",t.size=0n,t.isDir=!1,t.fullName="",t.directory="",t.version="",e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.name=e.string();break;case 2:i.size=e.uint64().toBigInt();break;case 3:i.isDir=e.bool();break;case 10:i.fullName=e.string();break;case 11:i.directory=e.string();break;case 12:i.lastModified=ee.internalBinaryRead(e,e.uint32(),r,i.lastModified);break;case 13:i.version=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.name!==""&&t.tag(1,a.WireType.LengthDelimited).string(e.name),e.size!==0n&&t.tag(2,a.WireType.Varint).uint64(e.size),e.isDir!==!1&&t.tag(3,a.WireType.Varint).bool(e.isDir),e.fullName!==""&&t.tag(10,a.WireType.LengthDelimited).string(e.fullName),e.directory!==""&&t.tag(11,a.WireType.LengthDelimited).string(e.directory),e.lastModified&&ee.internalBinaryWrite(e.lastModified,t.tag(12,a.WireType.LengthDelimited).fork(),r).join(),e.version!==""&&t.tag(13,a.WireType.LengthDelimited).string(e.version);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const te=new Cr;class Fr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.LsAPI.List",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new Fr;class Br extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.LsAPI.List.Request",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"location",kind:"scalar",T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.location="",e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.location=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.location!==""&&t.tag(2,a.WireType.LengthDelimited).string(e.location);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Er=new Br;class Wr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.LsAPI.List.Response",[{no:1,name:"items",kind:"message",repeat:2,T:()=>te},{no:2,name:"delimiter",kind:"scalar",T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.items=[],t.delimiter="",e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.items.push(te.internalBinaryRead(e,e.uint32(),r));break;case 2:i.delimiter=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){for(let i=0;i<e.items.length;i++)te.internalBinaryWrite(e.items[i],t.tag(1,a.WireType.LengthDelimited).fork(),r).join();e.delimiter!==""&&t.tag(2,a.WireType.LengthDelimited).string(e.delimiter);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const _r=new Wr,re=new T.ServiceType("MiLaboratories.Controller.Shared.LS",[{name:"List",options:{},I:Er,O:_r}]);class Mr{constructor(e){h(this,"typeName",re.typeName);h(this,"methods",re.methods);h(this,"options",re.options);this._transport=e}list(e,t){const r=this.methods[0],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}}class Ae{constructor(e,t){h(this,"grpcClient");this.logger=t,this.grpcClient=e.createGrpcClientProvider(r=>new Mr(r))}close(){}async list(e,t,r){return await this.grpcClient.get().list({resourceId:e.id,location:t},p.addRTypeToMetadata(e.type,r)).response}}class Or extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI",[])}create(e){const t=globalThis.Object.create(this.messagePrototype);return e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}new Or;class Hr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI.StreamBinary",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"offset",kind:"scalar",T:3,L:0},{no:11,name:"chunk_size",kind:"scalar",opt:!0,T:13},{no:20,name:"read_limit",kind:"scalar",opt:!0,T:3,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.offset=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.offset=e.int64().toBigInt();break;case 11:i.chunkSize=e.uint32();break;case 20:i.readLimit=e.int64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.offset!==0n&&t.tag(2,a.WireType.Varint).int64(e.offset),e.chunkSize!==void 0&&t.tag(11,a.WireType.Varint).uint32(e.chunkSize),e.readLimit!==void 0&&t.tag(20,a.WireType.Varint).int64(e.readLimit);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const zr=new Hr;class xr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI.ReadBinary",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"offset",kind:"scalar",T:3,L:0},{no:11,name:"chunk_size",kind:"scalar",opt:!0,T:13}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.offset=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.offset=e.int64().toBigInt();break;case 11:i.chunkSize=e.uint32();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.offset!==0n&&t.tag(2,a.WireType.Varint).int64(e.offset),e.chunkSize!==void 0&&t.tag(11,a.WireType.Varint).uint32(e.chunkSize);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Ar=new xr;class jr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI.StreamText",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"offset",kind:"scalar",T:3,L:0},{no:20,name:"read_limit",kind:"scalar",opt:!0,T:3,L:0},{no:21,name:"search",kind:"scalar",opt:!0,T:9},{no:22,name:"search_re",kind:"scalar",opt:!0,T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.offset=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.offset=e.int64().toBigInt();break;case 20:i.readLimit=e.int64().toBigInt();break;case 21:i.search=e.string();break;case 22:i.searchRe=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.offset!==0n&&t.tag(2,a.WireType.Varint).int64(e.offset),e.readLimit!==void 0&&t.tag(20,a.WireType.Varint).int64(e.readLimit),e.search!==void 0&&t.tag(21,a.WireType.LengthDelimited).string(e.search),e.searchRe!==void 0&&t.tag(22,a.WireType.LengthDelimited).string(e.searchRe);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Gr=new jr;class Vr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI.ReadText",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"offset",kind:"scalar",T:3,L:0},{no:20,name:"read_limit",kind:"scalar",opt:!0,T:3,L:0},{no:21,name:"search",kind:"scalar",opt:!0,T:9},{no:22,name:"search_re",kind:"scalar",opt:!0,T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,t.offset=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.offset=e.int64().toBigInt();break;case 20:i.readLimit=e.int64().toBigInt();break;case 21:i.search=e.string();break;case 22:i.searchRe=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.offset!==0n&&t.tag(2,a.WireType.Varint).int64(e.offset),e.readLimit!==void 0&&t.tag(20,a.WireType.Varint).int64(e.readLimit),e.search!==void 0&&t.tag(21,a.WireType.LengthDelimited).string(e.search),e.searchRe!==void 0&&t.tag(22,a.WireType.LengthDelimited).string(e.searchRe);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const qr=new Vr;class Jr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI.LastLines",[{no:1,name:"resource_id",kind:"scalar",T:4,L:0},{no:2,name:"offset",kind:"scalar",opt:!0,T:3,L:0},{no:3,name:"line_count",kind:"scalar",opt:!0,T:5},{no:21,name:"search",kind:"scalar",opt:!0,T:9},{no:22,name:"search_re",kind:"scalar",opt:!0,T:9}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.resourceId=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.resourceId=e.uint64().toBigInt();break;case 2:i.offset=e.int64().toBigInt();break;case 3:i.lineCount=e.int32();break;case 21:i.search=e.string();break;case 22:i.searchRe=e.string();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.resourceId!==0n&&t.tag(1,a.WireType.Varint).uint64(e.resourceId),e.offset!==void 0&&t.tag(2,a.WireType.Varint).int64(e.offset),e.lineCount!==void 0&&t.tag(3,a.WireType.Varint).int32(e.lineCount),e.search!==void 0&&t.tag(21,a.WireType.LengthDelimited).string(e.search),e.searchRe!==void 0&&t.tag(22,a.WireType.LengthDelimited).string(e.searchRe);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const Zr=new Jr;class Qr extends a.MessageType{constructor(){super("MiLaboratories.Controller.Shared.StreamingAPI.Response",[{no:1,name:"data",kind:"scalar",T:12},{no:2,name:"size",kind:"scalar",T:4,L:0},{no:3,name:"new_offset",kind:"scalar",T:4,L:0}])}create(e){const t=globalThis.Object.create(this.messagePrototype);return t.data=new Uint8Array(0),t.size=0n,t.newOffset=0n,e!==void 0&&a.reflectionMergePartial(this,t,e),t}internalBinaryRead(e,t,r,n){let i=n??this.create(),d=e.pos+t;for(;e.pos<d;){let[s,c]=e.tag();switch(s){case 1:i.data=e.bytes();break;case 2:i.size=e.uint64().toBigInt();break;case 3:i.newOffset=e.uint64().toBigInt();break;default:let l=r.readUnknownField;if(l==="throw")throw new globalThis.Error(`Unknown field ${s} (wire type ${c}) for ${this.typeName}`);let u=e.skip(c);l!==!1&&(l===!0?a.UnknownFieldHandler.onRead:l)(this.typeName,i,s,c,u)}}return i}internalBinaryWrite(e,t,r){e.data.length&&t.tag(1,a.WireType.LengthDelimited).bytes(e.data),e.size!==0n&&t.tag(2,a.WireType.Varint).uint64(e.size),e.newOffset!==0n&&t.tag(3,a.WireType.Varint).uint64(e.newOffset);let n=r.writeUnknownFields;return n!==!1&&(n==!0?a.UnknownFieldHandler.onWrite:n)(this.typeName,e,t),t}}const F=new Qr,ne=new T.ServiceType("MiLaboratories.Controller.Shared.Streaming",[{name:"StreamBinary",serverStreaming:!0,options:{},I:zr,O:F},{name:"ReadBinary",options:{},I:Ar,O:F},{name:"StreamText",serverStreaming:!0,options:{},I:Gr,O:F},{name:"ReadText",options:{},I:qr,O:F},{name:"LastLines",options:{},I:Zr,O:F}]);class Kr{constructor(e){h(this,"typeName",ne.typeName);h(this,"methods",ne.methods);h(this,"options",ne.options);this._transport=e}streamBinary(e,t){const r=this.methods[0],n=this._transport.mergeOptions(t);return T.stackIntercept("serverStreaming",this._transport,r,n,e)}readBinary(e,t){const r=this.methods[1],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}streamText(e,t){const r=this.methods[2],n=this._transport.mergeOptions(t);return T.stackIntercept("serverStreaming",this._transport,r,n,e)}readText(e,t){const r=this.methods[3],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}lastLines(e,t){const r=this.methods[4],n=this._transport.mergeOptions(t);return T.stackIntercept("unary",this._transport,r,n,e)}}class je{constructor(e,t,r){h(this,"grpcClient");this.httpClient=t,this.logger=r,this.grpcClient=e.createGrpcClientProvider(n=>new Kr(n))}close(){}async lastLines({id:e,type:t},r,n=0n,i,d){return(await this.grpcClient.get().lastLines({resourceId:e,lineCount:r,offset:n,search:i},p.addRTypeToMetadata(t,d))).response}async readText({id:e,type:t},r,n=0n,i,d){return(await this.grpcClient.get().readText({resourceId:g.notEmpty(e),readLimit:BigInt(r),offset:n,search:i},p.addRTypeToMetadata(t,d))).response}}function Xr(o,e,t){return e.getDriver({name:"DownloadBlob",init:(r,n,i)=>new Oe(n,i,o,t)})}function Yr(o,e){return o.getDriver({name:"StreamLogs",init:(t,r,n)=>new je(r,n,e)})}function en(o,e){return o.getDriver({name:"UploadProgress",init:(t,r,n)=>new _e(r,n,o,e)})}function tn(o,e){return o.getDriver({name:"UploadBlob",init:(t,r,n)=>new We(r,n,o,e)})}function Ge(o,e){return o.getDriver({name:"LsFiles",init:(t,r,n)=>new Ae(r,e)})}const rn=new g.ConcurrencyLimitingExecutor(32);async function Ie(o,e){return await rn.run(async()=>{const t={};e&&(t.start=e.from,t.end=e.to-1);let r;try{return r=pe.createReadStream(o,t),await ue.buffer(r)}catch(n){throw r&&!r.destroyed&&r.destroy(),n}})}const Ve=/^blob\+local:\/\/download\/(?<path>.*)#(?<signature>.*)$/;function nn(o,e){return`blob+local://download/${o}#${e.sign(o)}`}function on(o){return!!o.match(Ve)}function ie(o,e){const t=o.match(Ve);if(t===null)throw new Error(`Local handle is malformed: ${o}, matches: ${t}`);const{path:r,signature:n}=t.groups;return e.verify(r,n,`Signature verification failed for: ${o}`),{path:r,signature:n}}const qe=m.rsSchema({kv:{"ctl/file/blobInfo":L.z.object({sizeBytes:L.z.coerce.number()})}});function ye(o,e){const t=o.kv["ctl/file/blobInfo"].sizeBytes;if(e){const r=e.to-e.from;if(r>t)throw new Error(`getSize: range (${JSON.stringify(e)}, newSize: ${r}) is greater than size (${t})`);return r}return t}const E=L.z.object({localPath:L.z.string(),pathSignature:L.z.string(),sizeBytes:L.z.string(),modificationTime:L.z.string()}),Te=L.z.object({storageId:L.z.string(),path:L.z.string()}),sn=L.z.union([E,Te]),Je=m.rsSchema({data:E,fields:{blob:!1}}),Ze=m.rsSchema({fields:{incarnation:!1}}),Qe=/^blob\+remote:\/\/download\/(?<content>(?<resourceType>.+)\/(?<resourceVersion>.+?)\/(?<resourceId>\d+?)\/(?<size>\d+?))#(?<signature>.*)$/;function an(o,e){let t=`${o.type.name}/${o.type.version}/${BigInt(o.id)}/${ye(o)}`;return`blob+remote://download/${t}#${e.sign(t)}`}function ln(o){return!!o.match(Qe)}function cn(o,e){const t=o.match(Qe);if(t===null)throw new Error(`Remote handle is malformed: ${o}, matches: ${t}`);const{content:r,resourceType:n,resourceVersion:i,resourceId:d,size:s,signature:c}=t.groups;return e.verify(r,c,`Signature verification failed for ${o}`),{info:{id:p.bigintToResourceId(BigInt(d)),type:{name:n,version:i}},size:Number(s)}}class q extends Error{constructor(){super(...arguments);h(this,"name","WrongResourceTypeError")}}class Ke{constructor(e){h(this,"updating");this.onUpdate=e}schedule(){this.updating==null&&(this.updating=(async()=>{try{await this.onUpdate()}catch(e){console.log(`error while updating in Updater: ${e}`)}finally{this.updating=void 0}})())}}function Xe(o,e){return o?`log+live://log/${e.type.name}/${e.type.version}/${BigInt(e.id)}`:`log+ready://log/${e.type.name}/${e.type.version}/${BigInt(e.id)}`}const Ye=/^log\+live:\/\/log\/(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*)$/;function z(o){return Ye.test(o)}const et=/^log\+ready:\/\/log\/(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*)$/;function dn(o){return et.test(o)}function x(o){let e;if(z(o))e=o.match(Ye);else if(dn(o))e=o.match(et);else throw new Error(`Log handle is malformed: ${o}`);if(e==null)throw new Error(`Log handle wasn't parsed: ${o}`);const{resourceType:t,resourceVersion:r,resourceId:n}=e.groups;return{id:p.bigintToResourceId(BigInt(n)),type:{name:t,version:r}}}function b(o){return`${BigInt(o)}`}function Se(o){return k.basename(o)}class hn{constructor(e,t,r,n,i){h(this,"change",new f.ChangeSource);h(this,"signalCtl",new AbortController);h(this,"counter",new g.CallersCounter);h(this,"error");h(this,"done",!1);h(this,"size",0);h(this,"state",{});this.logger=e,this.clientDownload=t,this.rInfo=r,this.handle=n,this.path=i}info(){return{rInfo:this.rInfo,fPath:this.path,done:this.done,error:this.error,state:this.state}}attach(e,t){this.counter.inc(t),this.done||this.change.attachWatcher(e)}async download(){try{const e=await this.ensureDownloaded();this.setDone(e),this.change.markChanged(`blob download for ${p.resourceIdToString(this.rInfo.id)} finished`)}catch(e){throw this.logger.error(`download blob ${p.stringifyWithResourceId(this.rInfo)} failed: ${e}, state: ${JSON.stringify(this.state)}`),be(e)&&(this.setError(e),this.change.markChanged(`blob download for ${p.resourceIdToString(this.rInfo.id)} failed`),await w.rm(this.path,{force:!0})),e}}async ensureDownloaded(){this.signalCtl.signal.throwIfAborted(),this.state={},this.state.filePath=this.path,await g.ensureDirExists(k.dirname(this.state.filePath)),this.signalCtl.signal.throwIfAborted(),this.state.dirExists=!0;const e=await g.fileExists(this.state.filePath);if(this.signalCtl.signal.throwIfAborted(),e){this.state.fileExists=!0,this.logger.info(`a blob was already downloaded: ${this.state.filePath}`);const r=await w.stat(this.state.filePath);return this.signalCtl.signal.throwIfAborted(),this.state.fileSize=r.size,this.state.fileSize}return await this.clientDownload.withBlobContent(this.rInfo,{},{signal:this.signalCtl.signal},async(r,n)=>(this.state.fileSize=n,this.state.downloaded=!0,await g.createPathAtomically(this.logger,this.state.filePath,async i=>{const d=I.Writable.toWeb(S.createWriteStream(i,{flags:"wx"}));await r.pipeTo(d,{signal:this.signalCtl.signal}),this.state.tempWritten=!0}),this.state.done=!0,n))}abort(e){this.signalCtl.abort(new tt(e))}getBlob(){return this.done?{done:this.done,result:un(this.handle,this.size,this.error)}:{done:!1}}setDone(e){this.done=!0,this.size=e}setError(e){this.done=!0,this.error=e}}function be(o){return o instanceof tt||o instanceof j||o instanceof V||o instanceof G||(o==null?void 0:o.code)=="ENOENT"||o.name=="RpcError"&&(o.code=="NOT_FOUND"||o.code=="ABORTED")}class tt extends Error{constructor(){super(...arguments);h(this,"name","DownloadAborted")}}function un(o,e,t){return t?{ok:!1,error:t}:o?{ok:!0,value:{handle:o,size:e}}:{ok:!1,error:new Error("No file or handle provided")}}class ke{constructor(e){h(this,"cache",new Map);h(this,"totalSizeBytes",0);this.softSizeBytes=e}existsFile(e){return this.cache.get(e)!=null}getFile(e,t){const r=this.cache.get(e);return r!=null&&r.counter.inc(t),r}removeFile(e,t){return g.mapGet(this.cache,e).counter.dec(t),this.toDelete()}toDelete(){if(this.totalSizeBytes<=this.softSizeBytes)return[];const e=[];let t=0;return g.mapEntries(this.cache).filter(([r,n])=>n.counter.isZero()).forEach(([r,n])=>{if(this.totalSizeBytes-t<=this.softSizeBytes)return;const i=g.mapGet(this.cache,r);t+=i.size,e.push(i)}),e}addCache(e,t){const r=this.cache.get(e.path)==null;if(this.cache.set(e.path,e),e.counter.inc(t),e.size<0)throw new Error(`empty sizeBytes: ${e}`);r&&(this.totalSizeBytes+=e.size)}removeCache(e){this.cache.delete(e.path),this.totalSizeBytes-=e.size}}const pn=L.z.object({ranges:L.z.array(v.RangeBytes)}),gn=".ranges.json";function fn(o){return o+gn}async function wn(o,e){let t={ranges:[]};try{const r=await w.readFile(e,"utf8");t=pn.parse(JSON.parse(r))}catch(r){if(r instanceof SyntaxError||r instanceof L.z.ZodError){const n=`readRangesFile: the file ${e} was corrupted: ${r}`;throw o.error(n),new nt(n)}if(!(r instanceof Error&&"code"in r&&r.code==="ENOENT"))throw r}return rt(t),t}async function mn(o,e,t){await g.createPathAtomically(o,e,async r=>{await w.writeFile(r,JSON.stringify(t,null,2),{flag:"wx"})})}function rt(o){o.ranges.sort((e,t)=>e.from-t.from);for(let e=0;e<o.ranges.length-1;e++)o.ranges[e].to>=o.ranges[e+1].from&&(yn(o,e),e--)}function yn(o,e){const t=Math.min(o.ranges[e].from,o.ranges[e+1].from),r=Math.max(o.ranges[e].to,o.ranges[e+1].to);o.ranges.splice(e,2,{from:t,to:r})}function W(o){return o.ranges.reduce((e,t)=>e+t.to-t.from,0)}function Tn(o,e){for(const t of o.ranges)if(t.from<=e.from&&e.to<=t.to)return!0;return!1}function bn(o,e){return o.ranges.push(e),rt(o),o}async function kn(o,e,t){try{await(await w.open(e,"w")).close(),await Un(e,t)}catch(r){o.error(`Error creating file ${e} on platform ${t}: ${r}`)}}async function Un(o,e){e==="win32"&&await g.spawnAsync("fsutil",["sparse","setflag",`"${o}"`],{stdio:"pipe"})}async function Ln(o,e,t,r,n){await g.fileExists(t)||await kn(o,t,e);const i=await w.open(t,"r+");await i.write(r,0,r.length,n),await i.close()}class nt extends Error{constructor(){super(...arguments);h(this,"name","CorruptedRangesError")}}class Pn{constructor(e,t){this.logger=e,this.cacheDir=t}fPath(e){return U.join(this.cacheDir,fn(e))}async get(e){return await wn(this.logger,this.fPath(e))}async set(e,t){return await mn(this.logger,this.fPath(e),t)}async delete(e){await S.promises.rm(this.fPath(e))}}class Rn{constructor(e,t){h(this,"suffix",".sparse.bin");this.logger=e,this.cacheDir=t}async all(){return await g.ensureDirExists(this.cacheDir),(await S.promises.readdir(this.cacheDir)).filter(t=>t.endsWith(this.suffix))}async exists(e){return await g.fileExists(this.path(e))}path(e){return U.join(this.cacheDir,e+this.suffix)}async write(e,t,r){await g.ensureDirExists(this.cacheDir),await Ln(this.logger,process.platform,this.path(e),t,r)}async delete(e){await S.promises.rm(this.path(e))}}class In{constructor(e,t,r,n){h(this,"lock",new Pt.functions.AwaitLock);h(this,"keyToLastAccessTime",new Map);h(this,"size",0);this.logger=e,this.maxSize=t,this.ranges=r,this.storage=n}async reset(){await oe(this.lock,async()=>{await this.resetUnsafe()})}async get(e,t){return await oe(this.lock,async()=>await this.getUnsafe(e,t))}async set(e,t,r){await oe(this.lock,async()=>{await this.setUnsafe(e,t,r)})}async resetUnsafe(){this.size=0,this.keyToLastAccessTime=new Map;const e=new Date;for(const t of await this.storage.all()){const r=await this.ranges.get(t);this.size+=W(r),this.keyToLastAccessTime.set(t,e)}}async getUnsafe(e,t){if(await this.storage.exists(e)){this.keyToLastAccessTime.set(e,new Date);const r=await this.getRanges(e);return Tn(r,t)?this.storage.path(e):void 0}}async setUnsafe(e,t,r){await this.setWithoutEviction(e,t,r),await this.ensureEvicted()}async setWithoutEviction(e,t,r){if(t.to-t.from!==r.length)throw new Error(`SparseCache.set: trying to set ${e} with wrong range length: range: ${JSON.stringify(t)}, data: ${r.length}`);this.keyToLastAccessTime.set(e,new Date);const n=await this.getRanges(e);this.size-=W(n),await this.storage.write(e,r,t.from);const i=bn(n,t);this.size+=W(i),await this.ranges.set(e,i)}async ensureEvicted(){const e=g.mapEntries(this.keyToLastAccessTime);for(e.sort(([t,r],[n,i])=>i.getTime()-r.getTime());this.size>this.maxSize;){const t=e.pop();if(!t)break;const[r,n]=t,i=await this.getRanges(r);this.size-=W(i),this.rmKey(r)}}async getRanges(e){try{return await this.ranges.get(e)}catch(t){if(t instanceof nt)return await this.rmKey(e),await this.resetUnsafe(),await this.ranges.get(e);throw t}}async rmKey(e){await this.storage.delete(e),await this.ranges.delete(e),this.keyToLastAccessTime.delete(e)}}async function oe(o,e){try{return await o.acquireAsync(),await e()}finally{o.release()}}class Ue{constructor(e,t,r,n,i,d,s){h(this,"keyToDownload",new Map);h(this,"cache");h(this,"rangesCache");h(this,"downloadQueue");h(this,"keyToOnDemand",new Map);h(this,"idToLastLines",new Map);h(this,"idToProgressLog",new Map);h(this,"saveDir");this.logger=e,this.clientDownload=t,this.clientLogs=r,this.rangesCacheDir=i,this.signer=d,this.ops=s,this.cache=new ke(this.ops.cacheSoftSizeBytes);const c=new Pn(this.logger,this.rangesCacheDir),l=new Rn(this.logger,this.rangesCacheDir);this.rangesCache=new In(this.logger,this.ops.rangesCacheMaxSizeBytes,c,l),this.downloadQueue=new g.TaskProcessor(this.logger,s.nConcurrentDownloads),this.saveDir=k.resolve(n)}static async init(e,t,r,n,i,d,s){const c=new Ue(e,t,r,n,i,d,s);return await c.rangesCache.reset(),c}getDownloadedBlob(e,t){if(t===void 0)return f.Computable.make(d=>this.getDownloadedBlob(e,d));const r=m.treeEntryToResourceInfo(e,t),n=R.randomUUID();t.addOnDestroy(()=>this.releaseBlob(r,n));const i=this.getDownloadedBlobNoCtx(t.watcher,r,n);return i==null&&t.markUnstable("download blob is still undefined"),i}getDownloadedBlobNoCtx(e,t,r){B("getDownloadedBlob",t.type);const n=this.getOrSetNewTask(t,r);n.attach(e,r);const i=n.getBlob();if(i.done){if(i.result.ok)return i.result.value;throw i.result.error}}getOrSetNewTask(e,t){const r=b(e.id),n=this.keyToDownload.get(r);if(n)return n;const i=k.resolve(this.saveDir,r),d=new hn(this.logger,this.clientDownload,e,nn(i,this.signer),i);return this.keyToDownload.set(r,d),this.downloadQueue.push({fn:()=>this.downloadBlob(d,t),recoverableErrorPredicate:s=>!be(s)}),d}async downloadBlob(e,t){await e.download();const r=e.getBlob();r.done&&r.result.ok&&this.cache.addCache(e,t)}getOnDemandBlob(e,t){if(t===void 0)return f.Computable.make(d=>this.getOnDemandBlob(e,d));const r=m.isPlTreeEntry(e)?m.makeResourceSnapshot(e,qe,t):e,n=R.randomUUID();return t.addOnDestroy(()=>this.releaseOnDemandBlob(r.id,n)),this.getOnDemandBlobNoCtx(r,n)}getOnDemandBlobNoCtx(e,t){B("getOnDemandBlob",e.type);let r=this.keyToOnDemand.get(b(e.id));return r===void 0&&(r=new Sn(ye(e),an(e,this.signer)),this.keyToOnDemand.set(b(e.id),r)),r.attach(t),r.getHandle()}getLocalPath(e){const{path:t}=ie(e,this.signer);return t}async getContent(e,t){if(t&&v.validateRangeBytes(t,"getContent"),on(e))return await Ie(this.getLocalPath(e),t);if(ln(e)){const r=cn(e,this.signer),n=b(r.info.id),i=await this.rangesCache.get(n,t??{from:0,to:r.size});if(i)return await Ie(i,t);const d=await this.clientDownload.withBlobContent({id:r.info.id,type:r.info.type},void 0,{range:t},async s=>await ue.buffer(s));return await this.rangesCache.set(n,t??{from:0,to:r.size},d),d}throw new Error("Malformed remote handle")}getComputableContent(e,t){return t&&v.validateRangeBytes(t,"getComputableContent"),f.Computable.make(r=>this.getDownloadedBlob(e,r),{postprocessValue:r=>r?this.getContent(r.handle,t):void 0}).withStableType()}getLastLogs(e,t,r){if(r==null)return f.Computable.make(s=>this.getLastLogs(e,t,s));const n=m.treeEntryToResourceInfo(e,r),i=R.randomUUID();r.addOnDestroy(()=>this.releaseBlob(n,i));const d=this.getLastLogsNoCtx(r.watcher,n,t,i);return d==null&&r.markUnstable("either a file was not downloaded or logs was not read"),d}getLastLogsNoCtx(e,t,r,n){B("getLastLogs",t.type);const i=this.getDownloadedBlobNoCtx(e,t,n);if(i==null)return;const{path:d}=ie(i.handle,this.signer);let s=this.idToLastLines.get(b(t.id));if(s==null){const l=new De(d,r);this.idToLastLines.set(b(t.id),l),s=l}const c=s.getOrSchedule(e);if(c.error)throw c.error;return c.log}getProgressLog(e,t,r){if(r==null)return f.Computable.make(s=>this.getProgressLog(e,t,s));const n=m.treeEntryToResourceInfo(e,r),i=R.randomUUID();r.addOnDestroy(()=>this.releaseBlob(n,i));const d=this.getProgressLogNoCtx(r.watcher,n,t,i);return d===void 0&&r.markUnstable("either a file was not downloaded or a progress log was not read"),d}getProgressLogNoCtx(e,t,r,n){B("getProgressLog",t.type);const i=this.getDownloadedBlobNoCtx(e,t,n);if(i==null)return;const{path:d}=ie(i.handle,this.signer);let s=this.idToProgressLog.get(b(t.id));if(s==null){const l=new De(d,1,r);this.idToProgressLog.set(b(t.id),l),s=l}const c=s.getOrSchedule(e);if(c.error)throw c.error;return c.log}getLogHandle(e,t){if(t==null)return f.Computable.make(n=>this.getLogHandle(e,n));const r=m.treeEntryToResourceInfo(e,t);return this.getLogHandleNoCtx(r)}getLogHandleNoCtx(e){return B("getLogHandle",e.type),Xe(!1,e)}async lastLines(e,t,r,n){const i=await this.clientLogs.lastLines(x(e),t,BigInt(r??0),n);return{live:!1,shouldUpdateHandle:!1,data:i.data,size:Number(i.size),newOffset:Number(i.newOffset)}}async readText(e,t,r,n){const i=await this.clientLogs.readText(x(e),t,BigInt(r??0),n);return{live:!1,shouldUpdateHandle:!1,data:i.data,size:Number(i.size),newOffset:Number(i.newOffset)}}async releaseBlob(e,t){const r=this.keyToDownload.get(b(e.id));if(r!=null)if(this.cache.existsFile(b(e.id))){const n=this.cache.removeFile(b(e.id),t);await Promise.all(n.map(async i=>{await w.rm(i.path),this.cache.removeCache(i),this.removeTask(g.mapGet(this.keyToDownload,Se(i.path)),`the task ${p.stringifyWithResourceId(i)} was removedfrom cache along with ${p.stringifyWithResourceId(n.map(d=>d.path))}`)}))}else r.counter.dec(t)&&this.removeTask(r,`the task ${p.stringifyWithResourceId(r.info())} was removed from cache`)}removeTask(e,t){e.abort(t),e.change.markChanged(`download task for ${e.path} removed: ${t}`),this.keyToDownload.delete(Se(e.path)),this.idToLastLines.delete(b(e.rInfo.id)),this.idToProgressLog.delete(b(e.rInfo.id))}async releaseOnDemandBlob(e,t){var n;(((n=this.keyToOnDemand.get(b(e)))==null?void 0:n.release(t))??!1)&&this.keyToOnDemand.delete(b(e))}async releaseAll(){this.downloadQueue.stop(),this.keyToDownload.forEach((e,t)=>{this.keyToDownload.delete(t),e.change.markChanged(`task ${p.resourceIdToString(e.rInfo.id)} released`)})}}class Sn{constructor(e,t){h(this,"counter",new g.CallersCounter);this.size=e,this.handle=t}getHandle(){return{handle:this.handle,size:this.size}}attach(e){this.counter.inc(e)}release(e){return this.counter.dec(e)}}class De{constructor(e,t,r){h(this,"updater");h(this,"log");h(this,"change",new f.ChangeSource);h(this,"error");this.path=e,this.lines=t,this.patternToSearch=r,this.updater=new Ke(async()=>this.update())}getOrSchedule(e){return this.change.attachWatcher(e),this.updater.schedule(),{log:this.log,error:this.error}}async update(){try{const e=await Dn(this.path,this.lines,this.patternToSearch);this.log!=e&&this.change.markChanged(`logs for ${this.path} updated`),this.log=e}catch(e){if(e.name=="RpcError"&&e.code=="NOT_FOUND"){this.log="",this.error=e,this.change.markChanged(`log update for ${this.path} failed, resource not found`);return}throw e}}}async function Dn(o,e,t){let r,n;try{r=pe.createReadStream(o),n=vt.createInterface({input:r,crlfDelay:1/0});const i=new Ut;for await(const d of n)t!=null&&!d.includes(t)||(i.push(d),i.length>e&&i.shift());return i.toArray().join(Re.EOL)+Re.EOL}finally{try{n&&n.close()}catch(i){console.error("Error closing readline interface:",i)}try{r&&!r.destroyed&&r.destroy()}catch(i){console.error("Error destroying read stream:",i)}}}function B(o,e){if(!e.name.startsWith("Blob/")){let t=`${o}: wrong resource type: ${e.name}, expected: a resource of type that starts with 'Blob/'.`;throw e.name=="Blob"&&(t+=" If it's called from workflow, should a file be exported with 'file.exportFile' function?"),new q(t)}}const it=m.rsSchema({});function ot(o,e){const t=m.isPlTreeEntry(o)?e.accessor(o).node():m.isPlTreeEntryAccessor(o)?o.node():o;return m.makeResourceSnapshot(t,it)}function Nn(o,e,t){const r=U.relative(e,t);return`plblob+folder://${o.sign(r)}.${r}.blob`}function $n(o,e,t){const r=U.relative(e,t);return`block-ui://${o.sign(r)}.${r}.uidir`}function vn(o,e,t){return st(o,e,t)}function Cn(o,e,t){return st(o,e,t)}function st(o,e,t){const r=new URL(e),[n,i,d]=r.host.split(".");o.verify(i,n,`signature verification failed for url: ${e}, subfolder: ${i}`);const s=decodeURIComponent(r.pathname.slice(1));let c=Fn(U.join(t,`${i}`),s);return(r.pathname==""||r.pathname=="/")&&(c=U.join(c,"index.html")),U.resolve(c)}function Fn(o,e){const t=U.resolve(o,e),r=U.resolve(o);if(!t.startsWith(r))throw new Error("Path validation failed.");return t}class Bn{constructor(e,t,r,n,i,d,s){h(this,"counter",new g.CallersCounter);h(this,"change",new f.ChangeSource);h(this,"signalCtl",new AbortController);h(this,"error");h(this,"done",!1);h(this,"size",0);h(this,"url");h(this,"state");this.logger=e,this.signer=t,this.saveDir=r,this.path=n,this.rInfo=i,this.format=d,this.clientDownload=s}info(){return{rInfo:this.rInfo,format:this.format,path:this.path,done:this.done,size:this.size,error:this.error,taskHistory:this.state}}attach(e,t){this.counter.inc(t),this.done||this.change.attachWatcher(e)}async download(){try{const e=await this.downloadAndDecompress(this.signalCtl.signal);this.setDone(e),this.change.markChanged(`download and decompress for ${p.resourceIdToString(this.rInfo.id)} finished`),this.logger.info(`blob to URL task is done: ${p.stringifyWithResourceId(this.info())}`)}catch(e){if(this.logger.warn(`a error was produced: ${e} for blob to URL task: ${p.stringifyWithResourceId(this.info())}`),En(e)){this.setError(e),this.change.markChanged(`download and decompress for ${p.resourceIdToString(this.rInfo.id)} failed`),await ce(this.path);return}throw e}}async downloadAndDecompress(e){return this.state={},this.state.parentDir=U.dirname(this.path),await g.ensureDirExists(this.state.parentDir),this.state.fileExisted=await g.fileExists(this.path),this.state.fileExisted?await at(this.path):await this.clientDownload.withBlobContent(this.rInfo,{},{signal:e},async(r,n)=>(this.state.downloaded=!0,await g.createPathAtomically(this.logger,this.path,async i=>{switch(this.state.tempPath=i,this.state.archiveFormat=this.format,this.format){case"tar":await w.mkdir(i);const d=I.Writable.toWeb(le.extract(i));await r.pipeTo(d,{signal:e});return;case"tgz":await w.mkdir(i);const s=I.Transform.toWeb(Ee.createGunzip()),c=I.Writable.toWeb(le.extract(i));await r.pipeThrough(s,{signal:e}).pipeTo(c,{signal:e});return;case"zip":this.state.zipPath=this.path+".zip";const l=I.Writable.toWeb(S.createWriteStream(this.state.zipPath));await r.pipeTo(l,{signal:e}),this.state.zipPathCreated=!0,await St(this.state.zipPath,i,{filter:u=>!u.path.endsWith("/")}),this.state.zipDecompressed=!0,await S.promises.rm(this.state.zipPath),this.state.zipPathDeleted=!0;return;default:a.assertNever(this.format)}}),this.state.pathCreated=!0,n))}getURL(){if(this.done)return{url:g.notEmpty(this.url)};if(this.error)return{error:this.error}}setDone(e){this.done=!0,this.size=e,this.url=Nn(this.signer,this.saveDir,this.path)}setError(e){this.error=String(e)}abort(e){this.signalCtl.abort(new lt(e))}}async function at(o){const e=await w.readdir(o,{withFileTypes:!0});return(await Promise.all(e.map(async r=>{const n=U.join(o,r.name);return r.isDirectory()?await at(n):(await w.stat(n)).size}))).reduce((r,n)=>r+n,0)}async function ce(o){await w.rm(o,{recursive:!0,force:!0})}let lt=class extends Error{constructor(){super(...arguments);h(this,"name","URLAborted")}};function En(o){return o instanceof lt||o instanceof j||o instanceof V||o instanceof G||(o==null?void 0:o.code)=="ENOENT"||o.name=="RpcError"&&(o.code=="NOT_FOUND"||o.code=="ABORTED")||String(o).includes("incorrect header check")}function _(o,e){return`id:${String(BigInt(o))}-${e}`}class Wn{constructor(e,t,r,n,i={cacheSoftSizeBytes:50*1024*1024,nConcurrentDownloads:50}){h(this,"idToDownload",new Map);h(this,"downloadQueue");h(this,"cache");this.logger=e,this.signer=t,this.clientDownload=r,this.saveDir=n,this.opts=i,this.downloadQueue=new g.TaskProcessor(this.logger,this.opts.nConcurrentDownloads,{type:"exponentialWithMaxDelayBackoff",initialDelay:1e4,maxDelay:3e4,backoffMultiplier:1.5,jitter:.5}),this.cache=new ke(this.opts.cacheSoftSizeBytes)}info(){return{saveDir:this.saveDir,opts:this.opts,idToDownloadSize:this.idToDownload.size,idToDownloadKeys:this.idToDownload.keys(),idToDownload:Array.from(this.idToDownload.entries()).map(([e,t])=>[e,t.info()])}}getPathForCustomProtocol(e){if(v.isFolderURL(e))return vn(this.signer,e,this.saveDir);throw new Error(`getPathForCustomProtocol: ${e} is invalid`)}extractArchiveAndGetURL(e,t,r){if(r===void 0)return f.Computable.make(s=>this.extractArchiveAndGetURL(e,t,s));const n=m.isPlTreeEntry(e)?ot(e,r):e,i=R.randomUUID();r.addOnDestroy(()=>this.releasePath(n.id,t,i));const d=this.extractArchiveAndGetURLNoCtx(n,t,r.watcher,i);if((d==null?void 0:d.url)===void 0&&r.markUnstable(`a path to the downloaded archive might be undefined. The current result: ${d}`),(d==null?void 0:d.error)!==void 0)throw d==null?void 0:d.error;return d==null?void 0:d.url}extractArchiveAndGetURLNoCtx(e,t,r,n){const i=this.idToDownload.get(_(e.id,t));if(i!=null)return i.attach(r,n),i.getURL();const d=this.setNewTask(r,e,t,n);return this.downloadQueue.push({fn:async()=>this.downloadUrl(d,n),recoverableErrorPredicate:s=>!be(s)}),d.getURL()}async downloadUrl(e,t){var r;await e.download(),((r=e.getURL())==null?void 0:r.url)!=null&&this.cache.addCache(e,t)}async releasePath(e,t,r){const n=this.idToDownload.get(_(e,t));if(n!=null)if(this.cache.existsFile(n.path)){const i=this.cache.removeFile(n.path,r);await Promise.all(i.map(async d=>{await ce(d.path),this.cache.removeCache(d),this.removeTask(d,`the task ${p.stringifyWithResourceId(d.info())} was removedfrom cache along with ${p.stringifyWithResourceId(i.map(s=>s.info()))}`)}))}else n.counter.dec(r)&&this.removeTask(n,`the task ${p.stringifyWithResourceId(n.info())} was removed from cache`)}async releaseAll(){this.downloadQueue.stop(),await Promise.all(Array.from(this.idToDownload.entries()).map(async([e,t])=>{await ce(t.path),this.cache.removeCache(t),this.removeTask(t,`the task ${p.stringifyWithResourceId(t.info())} was released when the driver was closed`)}))}setNewTask(e,t,r,n){const i=new Bn(this.logger,this.signer,this.saveDir,this.getFilePath(t.id,r),t,r,this.clientDownload);return i.attach(e,n),this.idToDownload.set(_(t.id,r),i),i}removeTask(e,t){e.abort(t),e.change.markChanged(`task for ${p.resourceIdToString(e.rInfo.id)} removed: ${t}`),this.idToDownload.delete(_(e.rInfo.id,e.format))}getFilePath(e,t){return k.join(this.saveDir,`${String(BigInt(e))}_${t}`)}}class ct{constructor(e,t,r,n,i,d){h(this,"change",new f.ChangeSource);h(this,"counter",new g.CallersCounter);h(this,"nMaxUploads");h(this,"nPartsWithThisUploadSpeed",0);h(this,"nPartsToIncreaseUpload",10);h(this,"uploadData");h(this,"progress");h(this,"failed");h(this,"alreadyExisted",!1);this.logger=e,this.clientBlob=t,this.clientProgress=r,this.maxNConcurrentPartsUpload=n,this.res=d,this.nMaxUploads=this.maxNConcurrentPartsUpload;const{uploadData:s,progress:c}=_n(d,i);this.uploadData=s,this.progress=c}getProgress(e,t){if(this.incCounter(e,t),this.failed)throw this.logger.error(`Uploading terminally failed: ${this.progress.lastError}`),new Error(this.progress.lastError);return Mn(this.progress)}shouldScheduleUpload(){return ht(this.progress)}async uploadBlobTask(){try{await dt(this.logger,this.clientBlob,this.res,this.uploadData,this.isComputableDone.bind(this),{nPartsWithThisUploadSpeed:this.nPartsWithThisUploadSpeed,nPartsToIncreaseUpload:this.nPartsToIncreaseUpload,currentSpeed:this.nMaxUploads,maxSpeed:this.maxNConcurrentPartsUpload}),this.change.markChanged(`blob upload for ${p.resourceIdToString(this.res.id)} finished`)}catch(e){if(this.setRetriableError(e),de(e)){this.logger.warn(`resource was deleted while uploading a blob: ${e}`),this.change.markChanged(`blob upload for ${p.resourceIdToString(this.res.id)} aborted, resource was deleted`),this.setDone(!0);return}if(this.logger.error(`error while uploading a blob: ${e}`),this.change.markChanged(`blob upload for ${p.resourceIdToString(this.res.id)} failed`),Le(e)){this.setTerminalError(e);return}throw xn(e)&&(this.nMaxUploads=jn(this.logger,this.nMaxUploads,1)),e}}async updateStatus(){var e;try{const t=await this.clientProgress.getStatus(this.res,{timeout:1e4}),r=this.progress.status,n=zn(this.alreadyExisted,Hn(t));this.progress.status=n,this.setDone(t.done),(t.done||this.progress.status.progress!=(r==null?void 0:r.progress))&&this.change.markChanged(`upload status for ${p.resourceIdToString(this.res.id)} changed`)}catch(t){if(this.setRetriableError(t),t.name=="RpcError"&&t.code=="DEADLINE_EXCEEDED"||(e=t==null?void 0:t.message)!=null&&e.includes("DEADLINE_EXCEEDED")){this.logger.warn("deadline exceeded while getting a status of BlobImport");return}if(de(t)){this.logger.warn(`resource was not found while updating a status of BlobImport: ${t}, ${p.stringifyWithResourceId(this.res)}`),this.change.markChanged(`upload status for ${p.resourceIdToString(this.res.id)} changed, resource not found`),this.setDone(!0);return}this.logger.error(`retryable error while updating a status of BlobImport: ${t}`)}}setRetriableError(e){this.progress.lastError=String(e)}setTerminalError(e){this.progress.lastError=String(e),this.progress.done=!1,this.failed=!0}setDoneIfOutputSet(e){On(e)&&(this.setDone(!0),this.alreadyExisted=!0)}setDone(e){this.progress.done=e,e&&(this.progress.lastError=void 0)}incCounter(e,t){this.change.attachWatcher(e),this.counter.inc(t)}decCounter(e){return this.counter.dec(e)}isComputableDone(){return this.counter.isZero()}}async function dt(o,e,t,r,n,i){Dt(A(t),"the upload operation can be done only for BlobUploads");const d=1e4;if(n())return;const s=await e.initUpload(t,{timeout:d});o.info(`started to upload blob ${t.id}, parts overall: ${s.overall}, parts remained: ${s.toUpload.length}, number of concurrent uploads: ${i.currentSpeed}`);const c=l=>async u=>{n()||(await e.partUpload(t,r.localPath,BigInt(r.modificationTime),l,{timeout:d}),o.info(`uploaded chunk ${l}/${s.overall} of resource: ${t.id}`),i.nPartsWithThisUploadSpeed++,i.nPartsWithThisUploadSpeed>=i.nPartsToIncreaseUpload&&(i.nPartsWithThisUploadSpeed=0,i.currentSpeed=An(o,i.currentSpeed,i.maxSpeed),u.setConcurrency(i.currentSpeed)))};await g.asyncPool(i.currentSpeed,s.toUpload.map(c)),!n()&&(await e.finalize(t,{timeout:d}),o.info(`uploading of resource ${t.id} finished.`))}function _n(o,e){let t,r;return A(o)&&(r=E.parse(o.data),t=ut(e,r.localPath,r.pathSignature)),{uploadData:r,progress:{done:!1,status:void 0,isUpload:A(o),isUploadSignMatch:t,lastError:void 0}}}function ht(o){return o.isUpload&&(o.isUploadSignMatch??!1)}function Mn(o){return o.done,o.isUpload,o.isUploadSignMatch,o.lastError,o.status&&(o.status.progress,o.status.bytesProcessed,o.status.bytesTotal),o}function On(o){return"blob"in o.fields?o.fields.blob!==void 0:o.fields.incarnation!==void 0}function A(o){return o.type.name.startsWith("BlobUpload")}function ut(o,e,t){try{return o.verify(e,t),!0}catch{return!1}}function Hn(o){return{progress:o.progress??0,bytesProcessed:Number(o.bytesProcessed),bytesTotal:Number(o.bytesTotal)}}function zn(o,e){return o&&e.bytesTotal!=0&&e.bytesProcessed==0?{progress:1,bytesProcessed:Number(e.bytesTotal),bytesTotal:Number(e.bytesTotal)}:e}function de(o){return o.name=="RpcError"&&(o.code=="NOT_FOUND"||o.code=="ABORTED"||o.code=="ALREADY_EXISTS")}function Le(o){return o instanceof ge||o instanceof fe||o instanceof me}function xn(o){return o==null?void 0:o.message.includes("UND_ERR_HEADERS_TIMEOUT")}function An(o,e,t){const r=Math.min(e+2,t);return r!=e&&o.info(`uploadTask.increaseConcurrency: increased from ${e} to ${r}`),r}function jn(o,e,t){const r=Math.max(Math.round(e/2),t);return r!=e&&o.info(`uploadTask.decreaseConcurrency: decreased from ${e} to ${r}`),r}function pt(o,e){const t=m.isPlTreeEntry(o)?e.accessor(o).node():m.isPlTreeEntryAccessor(o)?o.node():o;return t.resourceType.name.startsWith("BlobUpload")?m.makeResourceSnapshot(t,Je):m.makeResourceSnapshot(t,Ze)}class Gn{constructor(e,t,r,n,i={nConcurrentPartUploads:10,nConcurrentGetProgresses:10,pollingInterval:1e3,stopPollingDelay:1e3}){h(this,"idToProgress",new Map);h(this,"uploadQueue");h(this,"hooks");h(this,"scheduledOnNextState",[]);h(this,"keepRunning",!1);h(this,"currentLoop");this.logger=e,this.signer=t,this.clientBlob=r,this.clientProgress=n,this.opts=i,this.uploadQueue=new g.TaskProcessor(this.logger,1,{type:"exponentialWithMaxDelayBackoff",initialDelay:20,maxDelay:15e3,backoffMultiplier:1.5,jitter:.5}),this.hooks=new f.PollingComputableHooks(()=>this.startUpdating(),()=>this.stopUpdating(),{stopDebounce:i.stopPollingDelay},(d,s)=>this.scheduleOnNextState(d,s))}getProgressId(e,t){if(t==null)return f.Computable.make(d=>this.getProgressId(e,d));const r=m.isPlTreeEntry(e)?pt(e,t):e,n=R.randomUUID();return t.attacheHooks(this.hooks),t.addOnDestroy(()=>this.release(r.id,n)),this.getProgressIdNoCtx(t.watcher,r,n)}getProgressIdNoCtx(e,t,r){qn("getProgressId",t.type);const n=this.idToProgress.get(t.id);if(n!=null)return n.setDoneIfOutputSet(t),n.getProgress(e,r);const i=new ct(this.logger,this.clientBlob,this.clientProgress,this.opts.nConcurrentPartUploads,this.signer,t);return this.idToProgress.set(t.id,i),i.shouldScheduleUpload()&&this.uploadQueue.push({fn:()=>i.uploadBlobTask(),recoverableErrorPredicate:d=>!Le(d)}),i.setDoneIfOutputSet(t),i.getProgress(e,r)}async release(e,t){const r=this.idToProgress.get(e);if(r===void 0)return;r.decCounter(t)&&this.idToProgress.delete(e)}async releaseAll(){this.uploadQueue.stop()}scheduleOnNextState(e,t){this.scheduledOnNextState.push({resolve:e,reject:t})}startUpdating(){this.keepRunning=!0,this.currentLoop===void 0&&(this.currentLoop=this.mainLoop())}stopUpdating(){this.keepRunning=!1}async mainLoop(){for(;this.keepRunning;){const e=this.scheduledOnNextState;this.scheduledOnNextState=[];try{await g.asyncPool(this.opts.nConcurrentGetProgresses,this.getAllNotDoneProgresses().map(t=>async()=>await t.updateStatus())),e.forEach(t=>t.resolve())}catch(t){console.error(t),e.forEach(r=>r.reject(t))}if(!this.keepRunning)break;await Be.scheduler.wait(this.opts.pollingInterval)}this.currentLoop=void 0}getAllNotDoneProgresses(){return Array.from(this.idToProgress.entries()).filter(([e,t])=>!Vn(t.progress)).map(([e,t])=>t)}}function Vn(o){var e;return o.done&&(((e=o.status)==null?void 0:e.progress)??0)>=1}function qn(o,e){if(!e.name.startsWith("BlobUpload")&&!e.name.startsWith("BlobIndex"))throw new q(`${o}: wrong resource type: ${e.name}, expected: a resource of either type 'BlobUpload' or 'BlobIndex'.`)}class Jn{constructor(e,t,r={nConcurrentGetLogs:10,pollingInterval:1e3,stopPollingDelay:1e3}){h(this,"idToLastLines",new Map);h(this,"idToProgressLog",new Map);h(this,"hooks");h(this,"scheduledOnNextState",[]);h(this,"keepRunning",!1);h(this,"currentLoop");this.logger=e,this.clientLogs=t,this.opts=r,this.hooks=new f.PollingComputableHooks(()=>this.startUpdating(),()=>this.stopUpdating(),{stopDebounce:r.stopPollingDelay},(n,i)=>this.scheduleOnNextState(n,i))}getLastLogs(e,t,r){if(r==null)return f.Computable.make(s=>this.getLastLogs(e,t,s));const n=m.treeEntryToResourceInfo(e,r),i=R.randomUUID();r.attacheHooks(this.hooks),r.addOnDestroy(()=>this.releaseLastLogs(n.id,i));const d=this.getLastLogsNoCtx(r.watcher,n,t,i);return r.markUnstable("The logs are from stream, so we consider them unstable. Final values will be got from blobs."),d}getLastLogsNoCtx(e,t,r,n){se("getLastLogs",t.type);let i=this.idToLastLines.get(t.id);if(i==null){const s=new Ne(this.logger,this.clientLogs,t,r);this.idToLastLines.set(t.id,s),i=s}i.attach(e,n);const d=i.getLog();if(d.error!=null)throw d.error;return d.log}getProgressLog(e,t,r){if(r==null)return f.Computable.make(s=>this.getProgressLog(e,t,s));const n=m.treeEntryToResourceInfo(e,r),i=R.randomUUID();r.attacheHooks(this.hooks),r.addOnDestroy(()=>this.releaseProgressLog(n.id,i));const d=this.getProgressLogNoCtx(r.watcher,n,t,i);return r.markUnstable("The progress log is from the stream, so we consider it unstable. Final value will be got from blobs."),d}getProgressLogNoCtx(e,t,r,n){se("getProgressLog",t.type);let i=this.idToProgressLog.get(t.id);if(i==null){const s=new Ne(this.logger,this.clientLogs,t,1,r);this.idToProgressLog.set(t.id,s),i=s}i.attach(e,n);const d=i.getLog();if(d.error)throw d.error;return d.log}getLogHandle(e,t){if(t==null)return f.Computable.make(i=>this.getLogHandle(e,i));const r=m.treeEntryToResourceInfo(e,t),n=this.getLogHandleNoCtx(r);return t.markUnstable(`live_log:${p.resourceIdToString(r.id)}`),n}getLogHandleNoCtx(e){return se("getLogHandle",e.type),Xe(!0,e)}async lastLines(e,t,r,n){return await this.tryWithNotFound(e,()=>this.clientLogs.lastLines(x(e),t,BigInt(r??0),n))}async readText(e,t,r,n){return await this.tryWithNotFound(e,()=>this.clientLogs.readText(x(e),t,BigInt(r??0),n))}async tryWithNotFound(e,t){if(!z(e))throw new Error(`Not live log handle was passed to live log driver, handle: ${e}`);try{const r=await t();return{live:!0,shouldUpdateHandle:!1,data:r.data,size:Number(r.size),newOffset:Number(r.newOffset)}}catch(r){if(r.name=="RpcError"&&r.code=="NOT_FOUND")return{shouldUpdateHandle:!0};throw r}}async releaseLastLogs(e,t){var n;((n=this.idToLastLines.get(e))==null?void 0:n.release(t))&&this.idToLastLines.delete(e)}async releaseProgressLog(e,t){var n;((n=this.idToProgressLog.get(e))==null?void 0:n.release(t))&&this.idToProgressLog.delete(e)}async releaseAll(){}scheduleOnNextState(e,t){this.scheduledOnNextState.push({resolve:e,reject:t})}startUpdating(){this.keepRunning=!0,this.currentLoop===void 0&&(this.currentLoop=this.mainLoop())}stopUpdating(){this.keepRunning=!1}async mainLoop(){for(;this.keepRunning;){const e=this.scheduledOnNextState;this.scheduledOnNextState=[];try{const t=this.getAllLogs();await g.asyncPool(this.opts.nConcurrentGetLogs,t.map(r=>async()=>await r.update())),e.forEach(r=>r.resolve())}catch(t){console.error(t),e.forEach(r=>r.reject(t))}if(!this.keepRunning)break;await Be.scheduler.wait(this.opts.pollingInterval)}this.currentLoop=void 0}getAllLogs(){return Array.from(this.idToLastLines.entries()).concat(Array.from(this.idToProgressLog.entries())).map(([e,t])=>t)}}class Ne{constructor(e,t,r,n,i){h(this,"logs");h(this,"error");h(this,"change",new f.ChangeSource);h(this,"counter",new g.CallersCounter);this.logger=e,this.clientLogs=t,this.rInfo=r,this.lines=n,this.patternToSearch=i}getLog(){return{log:this.logs,error:this.error}}attach(e,t){this.change.attachWatcher(e),this.counter.inc(t)}release(e){return this.counter.dec(e)}async update(){try{const e=await this.clientLogs.lastLines(this.rInfo,this.lines,0n,this.patternToSearch),t=new TextDecoder().decode(e.data);this.logs!=t&&this.change.markChanged(`logs for ${p.resourceIdToString(this.rInfo.id)} updated`),this.logs=t,this.error=void 0;return}catch(e){if(e.name=="RpcError"&&e.code=="NOT_FOUND"){this.logs="",this.error=e,this.change.markChanged();return}throw this.logger.error(`Stream log lines for ${p.stringifyWithResourceId(this.rInfo.id)} failed, reason: ${e}`),e}}}function se(o,e){if(!e.name.startsWith("StreamWorkdir"))throw new q(`${o}: wrong resource type: ${e.name}, expected: a resource of type 'StreamWorkdir'.`)}class Zn{constructor(e,t,r){this.logger=e,this.logsStreamDriver=t,this.downloadDriver=r}getLastLogs(e,t,r){if(r===void 0)return f.Computable.make(i=>this.getLastLogs(e,t,i));const n=O(r,e);if(n===void 0){r.markUnstable("no stream in stream manager");return}if(M(n))return this.downloadDriver.getLastLogs(n,t,r);try{return this.logsStreamDriver.getLastLogs(n,t,r)}catch(i){if(i.name=="RpcError"&&i.code=="NOT_FOUND"){r.markUnstable(`NOT_FOUND in logs stream driver while getting last logs: ${i}`);return}throw i}}getProgressLog(e,t,r){if(r===void 0)return f.Computable.make(i=>this.getProgressLog(e,t,i));const n=O(r,e);if(n===void 0){r.markUnstable("no stream in stream manager");return}if(M(n))return this.downloadDriver.getProgressLog(n,t,r);try{return this.logsStreamDriver.getProgressLog(n,t,r)}catch(i){if(i.name=="RpcError"&&i.code=="NOT_FOUND"){r.markUnstable(`NOT_FOUND in logs stream driver while getting a progress log: ${i}`);return}throw i}}getProgressLogWithInfo(e,t,r){if(r===void 0)return f.Computable.make(i=>this.getProgressLogWithInfo(e,t,i));const n=O(r,e);if(n===void 0){r.markUnstable("no stream in stream manager");return}if(M(n))return{progressLine:this.downloadDriver.getProgressLog(n,t,r),live:!1};try{return{progressLine:this.logsStreamDriver.getProgressLog(n,t,r),live:!0}}catch(i){if(i.name=="RpcError"&&i.code=="NOT_FOUND"){r.markUnstable(`NOT_FOUND in logs stream driver while getting a progress log with info: ${i}`);return}throw i}}getLogHandle(e,t){if(t===void 0)return f.Computable.make(n=>this.getLogHandle(e,n));const r=O(t,e);if(r===void 0){t.markUnstable("no stream in stream manager");return}return M(r)?this.downloadDriver.getLogHandle(r,t):this.logsStreamDriver.getLogHandle(r,t)}async lastLines(e,t,r,n){return z(e)?await this.logsStreamDriver.lastLines(e,t,r,n):await this.downloadDriver.lastLines(e,t,r,n)}async readText(e,t,r,n){return z(e)?await this.logsStreamDriver.readText(e,t,r,n):await this.downloadDriver.readText(e,t,r,n)}}function M(o){return!o.type.name.startsWith("StreamWorkdir")}function O(o,e){var t;return(t=o.accessor(e).node().traverse("stream"))==null?void 0:t.resourceInfo}class Qn{constructor(e,t,r,n,i){h(this,"counter",new g.CallersCounter);h(this,"change",new f.ChangeSource);h(this,"signalCtl",new AbortController);h(this,"error");h(this,"done",!1);h(this,"size",0);this.logger=e,this.path=t,this.url=r,this.signer=n,this.saveDir=i}info(){return{url:this.url.toString(),path:this.path,done:this.done,size:this.size,error:this.error}}attach(e,t){this.counter.inc(t),this.done||this.change.attachWatcher(e)}async download(e,t){try{const r=await this.downloadAndUntar(e,t,this.signalCtl.signal);this.setDone(r),this.change.markChanged(`download of ${this.url} finished`)}catch(r){if(r instanceof $e||r instanceof j){this.setError(r),this.change.markChanged(`download of ${this.url} failed`),await he(this.path);return}throw r}}async downloadAndUntar(e,t,r){return await g.ensureDirExists(k.dirname(this.path)),await g.fileExists(this.path)?await gt(this.path):await e.withContent(this.url.toString(),{},{signal:r},async(i,d)=>{let s=i;if(t){const c=I.Transform.toWeb(Ee.createGunzip());s=i.pipeThrough(c,{signal:r})}return await g.createPathAtomically(this.logger,this.path,async c=>{await w.mkdir(c);const l=I.Writable.toWeb(le.extract(c));await s.pipeTo(l,{signal:r})}),d})}getUrl(){if(this.done)return{url:$n(this.signer,this.saveDir,g.notEmpty(this.path))};if(this.error)return{error:this.error}}setDone(e){this.done=!0,this.size=e}setError(e){this.error=String(e)}abort(e){this.signalCtl.abort(new $e(e))}}class $e extends Error{constructor(){super(...arguments);h(this,"name","URLAborted")}}async function gt(o){const e=await w.readdir(o,{withFileTypes:!0});return(await Promise.all(e.map(async r=>{const n=k.join(o,r.name);return r.isDirectory()?await gt(n):(await w.stat(n)).size}))).reduce((r,n)=>r+n,0)}async function he(o){await w.rm(o,{recursive:!0,force:!0})}class Kn{constructor(e,t,r,n,i={cacheSoftSizeBytes:1*1024*1024*1024,withGunzip:!0,nConcurrentDownloads:50}){h(this,"downloadHelper");h(this,"urlToDownload",new Map);h(this,"downloadQueue");h(this,"cache");this.logger=e,this.saveDir=r,this.signer=n,this.opts=i,this.downloadQueue=new g.TaskProcessor(this.logger,this.opts.nConcurrentDownloads),this.cache=new ke(this.opts.cacheSoftSizeBytes),this.downloadHelper=new Me(t)}getUrl(e,t){if(t===void 0)return f.Computable.make(i=>this.getUrl(e,i));const r=R.randomUUID();t.addOnDestroy(()=>this.releasePath(e,r));const n=this.getUrlNoCtx(e,t.watcher,r);return(n==null?void 0:n.url)===void 0&&t.markUnstable(`a path to the downloaded and untared archive might be undefined. The current result: ${n}`),n}getUrlNoCtx(e,t,r){const n=e.toString(),i=this.urlToDownload.get(n);if(i!==void 0)return i.attach(t,r),i.getUrl();const d=this.setNewTask(t,e,r);return this.downloadQueue.push({fn:async()=>this.downloadUrl(d,r),recoverableErrorPredicate:s=>!0}),d.getUrl()}getPathForBlockUI(e){if(!v.isBlockUIURL(e))throw new Error(`getPathForBlockUI: ${e} is invalid`);return Cn(this.signer,e,this.saveDir)}async downloadUrl(e,t){var r;await e.download(this.downloadHelper,this.opts.withGunzip),((r=e.getUrl())==null?void 0:r.url)!==void 0&&this.cache.addCache(e,t)}async releasePath(e,t){const r=e.toString(),n=this.urlToDownload.get(r);if(n!=null)if(this.cache.existsFile(n.path)){const i=this.cache.removeFile(n.path,t);await Promise.all(i.map(async d=>{await he(d.path),this.cache.removeCache(d),this.removeTask(d,`the task ${p.stringifyWithResourceId(d.info())} was removedfrom cache along with ${p.stringifyWithResourceId(i.map(s=>s.info()))}`)}))}else n.counter.dec(t)&&this.removeTask(n,`the task ${p.stringifyWithResourceId(n.info())} was removed from cache`)}async releaseAll(){this.downloadQueue.stop(),await Promise.all(Array.from(this.urlToDownload.entries()).map(async([e,t])=>{await he(t.path),this.cache.removeCache(t),this.removeTask(t,`the task ${p.stringifyWithResourceId(t.info())} was released when the driver was closed`)}))}setNewTask(e,t,r){const n=new Qn(this.logger,this.getFilePath(t),t,this.signer,this.saveDir);return n.attach(e,r),this.urlToDownload.set(t.toString(),n),n}removeTask(e,t){e.abort(t),e.change.markChanged(`task for url ${e.url} removed: ${t}`),this.urlToDownload.delete(e.url.toString())}getFilePath(e){const t=R.createHash("sha256").update(e.toString()).digest("hex");return k.join(this.saveDir,t)}}function ae(o,e){return`index://index/${encodeURIComponent(JSON.stringify({storageId:o,path:e}))}`}function Xn(o,e,t,r){const n={localPath:o,pathSignature:e.sign(o),sizeBytes:String(t),modificationTime:String(r)};return`upload://upload/${encodeURIComponent(JSON.stringify(n))}`}function Yn(o){const e=new URL(o);return E.parse(JSON.parse(decodeURIComponent(e.pathname.substring(1))))}function ei(o){const e=new URL(o);return Te.parse(JSON.parse(decodeURIComponent(e.pathname.substring(1))))}function ve(o){if(ii(o))return si(o);if(ti(o))return ni(o);g.assertNever(o)}const ft=/^local:\/\/(?<name>.*)\/(?<path>.*)$/;function ti(o){return ft.test(o)}function ri(o,e){return`local://${o}/${encodeURIComponent(e)}`}function ni(o){const e=o.match(ft);if(e==null)throw new Error(`Local list handle wasn't parsed: ${o}`);const{name:t,path:r}=e.groups;return{rootPath:decodeURIComponent(r),name:t,isRemote:!1}}const wt=/^remote:\/\/(?<name>.*)\/(?<resourceId>.*)$/;function ii(o){return wt.test(o)}function oi(o,e){return`remote://${o}/${BigInt(e)}`}function si(o){const e=o.match(wt);if(e==null)throw new Error(`Remote list handle wasn't parsed: ${o}`);const{name:t,resourceId:r}=e.groups;return{id:p.bigintToResourceId(BigInt(r)),type:ai(t),name:t,isRemote:!0}}function ai(o){return{name:`LS/${o}`,version:"1"}}async function mt(){const o=Fe.homedir();if(U.sep=="/")return[{name:"local",root:"/",initialPath:o}];{const t=U.parse(o).root.replaceAll(":\\","");try{return(await Nt.promisify($t.exec)("wmic logicaldisk get name")).stdout.split(`\r
2
- `).filter(i=>i.includes(":")).map(i=>i.trim().replaceAll(":","")).map(i=>{const d=i==t;return{name:`local_disk_${i}`,root:`${i}:\\`,initialPath:d?o:`${i}:\\`}})}catch{return[{name:`local_disk_${t}`,root:`${t}:\\`,initialPath:o}]}}}class Pe{constructor(e,t,r,n,i,d,s){this.logger=e,this.lsClient=t,this.storageIdToResourceId=r,this.signer=n,this.virtualStoragesMap=i,this.localProjectionsMap=d,this.openFileDialogCallback=s}async getLocalFileContent(e,t){const r=await this.tryResolveLocalFileHandle(e);if(t)throw new Error("Range request not yet supported.");return await w.readFile(r)}async getLocalFileSize(e){const t=await this.tryResolveLocalFileHandle(e);return(await w.stat(t)).size}async showOpenMultipleFilesDialog(e){const t=await this.openFileDialogCallback(!0,e);return t===void 0?{}:{files:await Promise.all(t.map(r=>this.getLocalFileHandle(r)))}}async showOpenSingleFileDialog(e){const t=await this.openFileDialogCallback(!1,e);return t===void 0?{}:{file:await this.getLocalFileHandle(t[0])}}async tryResolveLocalFileHandle(e){if(v.isImportFileHandleIndex(e)){const t=ei(e),r=this.localProjectionsMap.get(t.storageId);if(!r)throw new Error(`Storage ${t.storageId} is not mounted locally.`);return k.join(r.localPath,t.path)}else{const t=Yn(e);this.signer.verify(t.localPath,t.pathSignature,"Failed to validate local file handle signature.");const r=t.localPath,n=await w.stat(r,{bigint:!0});if(String(n.mtimeMs/1000n)!==t.modificationTime)throw new Error("File has changed since the handle was created.");return r}}async getLocalFileHandle(e){N(e);for(const r of this.localProjectionsMap.values())if(e.startsWith(r.localPath)){const n=r.localPath===""?e:k.relative(r.localPath,e);return ae(r.storageId,n)}const t=await w.stat(e,{bigint:!0});return Xn(e,this.signer,t.size,t.mtimeMs/1000n)}async getStorageList(){const e=[...this.virtualStoragesMap.values()].map(n=>({name:n.name,handle:ri(n.name,n.root),initialFullPath:n.initialPath})),r=Object.entries(this.storageIdToResourceId).map(([n,i])=>({name:n,handle:oi(n,i),initialFullPath:"",isInitialPathHome:!1})).filter(n=>n.name!=="root");return[...e,...r]}async listFiles(e,t){const r=ve(e);if(r.isRemote)return{entries:(await this.lsClient.list(r,t)).items.map(s=>({type:s.isDir?"dir":"file",name:s.name,fullPath:s.fullName,handle:ae(r.name,s.fullName)}))};k.sep==="/"&&t===""&&(t="/"),r.rootPath===""&&N(t);const n=k.isAbsolute(t)?t:k.join(r.rootPath,t),i=[];for await(const d of await w.opendir(n)){if(!d.isFile()&&!d.isDirectory())continue;const s=k.join(n,d.name);i.push({type:d.isFile()?"file":"dir",name:d.name,fullPath:s,handle:await this.getLocalFileHandle(s)})}return{entries:i}}async listRemoteFilesWithAdditionalInfo(e,t){const r=ve(e);if(!r.isRemote)throw new Error(`Storage ${r.name} is not remote`);return{entries:(await this.lsClient.list(r,t)).items.map(i=>({type:i.isDir?"dir":"file",name:i.name,fullPath:i.fullName,handle:ae(r.name,i.fullName),size:Number(i.size)}))}}async fileToImportHandle(e){throw new Error("Not implemented. This method must be implemented and intercepted in desktop preload script.")}static async init(e,t,r,n,i,d){const s=Ge(t,e);d||(d=await mt());for(const u of d)N(u.root);for(const u of n)u.localPath!==""&&N(u.localPath);const c=new Map(d.map(u=>[u.name,u])),l=new Map(n.map(u=>[u.storageId,u]));if(new Set([...c.keys(),...l.keys()]).size!==c.size+l.size)throw new Error("Intersection between local projection storage ids and virtual storages names detected.");return new Pe(e,s,await li(t),r,c,l,i)}}async function li(o){return o.withReadTx("GetAvailableStorageIds",async e=>{const t=await e.getResourceByName("LSProvider"),r=await e.getResourceData(t,!0);return ci(r)})}function ci(o){return Object.fromEntries(o.fields.filter(e=>e.type=="Dynamic"&&p.isNotNullResourceId(e.value)).map(e=>[e.name.substring(8),e.value]))}exports.ClientDownload=Oe;exports.ClientLogs=je;exports.ClientLs=Ae;exports.ClientProgress=_e;exports.ClientUpload=We;exports.DefaultVirtualLocalStorages=mt;exports.DownloadBlobToURLDriver=Wn;exports.DownloadDriver=Ue;exports.DownloadUrlDriver=Kn;exports.DownloadableBlobSnapshot=it;exports.ImportFileHandleData=sn;exports.ImportFileHandleIndexData=Te;exports.ImportFileHandleUploadData=E;exports.IndexResourceSnapshot=Ze;exports.LogsDriver=Zn;exports.LogsStreamDriver=Jn;exports.LsDriver=Pe;exports.MTimeError=ge;exports.NetworkError=we;exports.NoFileForUploading=me;exports.OnDemandBlobResourceSnapshot=qe;exports.UnexpectedEOF=fe;exports.UnknownStorageError=V;exports.Updater=Ke;exports.UploadDriver=Gn;exports.UploadResourceSnapshot=Je;exports.UploadTask=ct;exports.WrongLocalFileUrl=G;exports.WrongResourceTypeError=q;exports.createDownloadClient=Xr;exports.createLogsClient=Yr;exports.createLsFilesClient=Ge;exports.createUploadBlobClient=tn;exports.createUploadProgressClient=en;exports.getFullPath=ze;exports.getSize=ye;exports.isMyUpload=ht;exports.isResourceWasDeletedError=de;exports.isSignMatch=ut;exports.isUpload=A;exports.makeBlobImportSnapshot=pt;exports.makeDownloadableBlobSnapshot=ot;exports.newLocalStorageIdsToRoot=xe;exports.nonRecoverableError=Le;exports.parseLocalUrl=He;exports.uploadBlob=dt;exports.validateAbsolute=N;
1
+ export { ClientUpload, MTimeError, NetworkError, NoFileForUploading, UnexpectedEOF } from './clients/upload.js';
2
+ export { ClientProgress } from './clients/progress.js';
3
+ export { ClientDownload, UnknownStorageError, WrongLocalFileUrl, getFullPath, newLocalStorageIdsToRoot, parseLocalUrl } from './clients/download.js';
4
+ export { ClientLs } from './clients/ls_api.js';
5
+ export { ClientLogs } from './clients/logs.js';
6
+ export { createDownloadClient, createLogsClient, createLsFilesClient, createUploadBlobClient, createUploadProgressClient } from './clients/constructors.js';
7
+ export { DownloadDriver } from './drivers/download_blob/download_blob.js';
8
+ export { DownloadBlobToURLDriver } from './drivers/download_blob_url/driver.js';
9
+ export { DownloadableBlobSnapshot, makeDownloadableBlobSnapshot } from './drivers/download_blob_url/snapshot.js';
10
+ export { UploadDriver, makeBlobImportSnapshot } from './drivers/upload.js';
11
+ export { UploadTask, isMyUpload, isResourceWasDeletedError, isSignMatch, isUpload, nonRecoverableError, uploadBlob } from './drivers/upload_task.js';
12
+ export { LogsStreamDriver } from './drivers/logs_stream.js';
13
+ export { LogsDriver } from './drivers/logs.js';
14
+ export { DownloadUrlDriver } from './drivers/download_url/driver.js';
15
+ export { LsDriver } from './drivers/ls.js';
16
+ export { DefaultVirtualLocalStorages } from './drivers/virtual_storages.js';
17
+ export { Updater, WrongResourceTypeError } from './drivers/helpers/helpers.js';
18
+ export { ImportFileHandleData, ImportFileHandleIndexData, ImportFileHandleUploadData, IndexResourceSnapshot, OnDemandBlobResourceSnapshot, UploadResourceSnapshot, getSize } from './drivers/types.js';
19
+ export { validateAbsolute } from './helpers/validate.js';
3
20
  //# sourceMappingURL=index.js.map