cnvrg 1.6.35 → 1.9.8

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,31 @@
1
+ module Cnvrg
2
+ class ConnectJobSsh
3
+ def initialize(job_id)
4
+ home_dir = File.expand_path('~')
5
+ config = YAML.load_file(home_dir+"/.cnvrg/config.yml")
6
+ @owner = config.to_h[:owner]
7
+ @job_id = job_id
8
+ rescue => e
9
+ @owner = ""
10
+ Cnvrg::Logger.log_info("cnvrg is not configured")
11
+ end
12
+
13
+ def start(username, password)
14
+ Cnvrg::API_V2.request("#{@owner}/job_ssh/#{@job_id}/start" , 'POST', {username: username, password: password})
15
+ end
16
+
17
+ def status()
18
+ Cnvrg::API_V2.request("#{@owner}/job_ssh/#{@job_id}/status" , 'GET', nil)
19
+ end
20
+
21
+ def run_portforward_command(pod_name, port, kubeconfig, namespace)
22
+ command = "kubectl"
23
+ if kubeconfig.present?
24
+ command = "kubectl --kubeconfig=#{kubeconfig}"
25
+ end
26
+ bashCommand = "#{command} -n #{namespace} port-forward #{pod_name} #{port}:22"
27
+ puts("\nrunning command #{bashCommand}")
28
+ `#{bashCommand}`
29
+ end
30
+ end
31
+ end
@@ -128,6 +128,7 @@ module Cnvrg
128
128
  message = options["message"]
129
129
  cli.sync_data_new(new_branch, force, verbose, commit, all_files, tags, parallel, chunk_size, init, message)
130
130
  end
131
+
131
132
  desc 'data download', 'Download files from remote server'
132
133
  method_option :new_branch, :type => :boolean, :aliases => ["-nb"], :desc => "create new branch of commits", :default => false
133
134
  method_option :verbose, :type => :boolean, :aliases => ["-v"], :default => false
@@ -151,27 +152,52 @@ module Cnvrg
151
152
  method_option :read, :type => :boolean, :aliases => ["-r", "--read"], :default => false
152
153
  method_option :remote, :type => :boolean, :aliases => ["-h", "--remote"], :default => false
153
154
  method_option :relative, :type => :boolean, :aliases => ["-rel", "--relative"], :default => false
154
-
155
+ method_option :flatten, :type => :boolean, :aliases => ["-f", "--flatten"], :default => false
156
+ method_option :soft, :type => :boolean, :aliases => ["-s", "--soft"], :default => false, :hide => true
155
157
  def clone(dataset_url)
156
- #test
157
158
  cli = Cnvrg::CLI.new()
158
159
  only_tree =options[:only_tree]
159
160
  commit =options[:commit]
160
161
  query =options[:query]
161
162
  read = options[:read]
162
163
  remote = options[:remote]
163
- cli.clone_data(dataset_url, only_tree=only_tree,commit=commit, query=query, read=read, remote=remote, relative: options[:relative])
164
+ soft = options[:soft]
165
+ flatten = options[:flatten]
166
+ cli.clone_data(
167
+ dataset_url,
168
+ only_tree=only_tree,
169
+ commit=commit,
170
+ query=query,
171
+ read=read,
172
+ remote=remote,
173
+ flatten: flatten,
174
+ relative: options[:relative],
175
+ soft: soft
176
+ )
164
177
  end
165
178
 
166
179
  desc 'data verify DATASETS_TITLES', 'verify datasets', :hide => true
167
- method_option :timeout, :type => :numeric, :aliases => ["-t", "--timeout"], :desc => "Time to wait before returning final answer", :default => 15
168
-
180
+ method_option :timeout, :type => :numeric, :aliases => ["-t", "--timeout"], :desc => "Time to wait before returning final answer", :default => nil
169
181
  def verify(*dataset_titles)
170
182
  cli = Cnvrg::CLI.new()
171
183
  timeout =options[:timeout]
172
184
  cli.verify_datasets(dataset_titles, timeout)
173
185
  end
174
186
 
187
+ desc 'data scan', 'lookup datasets', :hide => true
188
+ def scan()
189
+ cli = Cnvrg::CLI.new()
190
+ cli.scan_datasets()
191
+ end
192
+
193
+ desc "data block DATASET_TITLES", 'verifying that datasets exists', hide: true
194
+ def block(*dataset_slugs)
195
+ not_verified = true
196
+ while not_verified
197
+ not_verified = dataset_slugs.select{|slug| not Dataset.verify_dataset(slug)}.present?
198
+ end
199
+ end
200
+
175
201
  desc 'data set --url=DATASET_URL', 'Set dataset url to other url'
176
202
  method_option :url, :type => :string, :aliases => ["--url"], :default => ''
177
203
  def set
@@ -186,22 +212,48 @@ module Cnvrg
186
212
  desc 'data put DATASET_URL FILES_PREFIX', 'Upload selected files from local dataset directory to remote server'
187
213
  method_option :dir, :type => :string, :aliases => ["-d", "--dir"], :default => ''
188
214
  method_option :commit, :type => :string, :aliases => ["-c", "--commit"], :default => ''
215
+ method_option :force, :type => :boolean, :aliases => ["-f","--force"], :default => false
216
+ method_option :threads, :type => :numeric, :aliases => ["-t","--threads"], :default => 15
217
+ method_option :chunk_size, :type => :numeric, :aliases => ["-cs","--chunk"], :default => 1000
189
218
  method_option :message, :type => :string, :aliases => ["--message"], :desc => "create commit with message", :default => nil
190
219
  def put(dataset_url, *files)
191
220
  cli = Cnvrg::CLI.new()
192
221
  dir = options[:dir]
222
+ force = options["force"]
193
223
  commit = options[:commit]
194
224
  message = options[:message]
225
+ threads = options[:threads]
226
+ chunk_size = options[:chunk_size]
227
+ cli.data_put(
228
+ dataset_url,
229
+ files: files,
230
+ dir: dir,
231
+ commit: commit,
232
+ force: force,
233
+ threads: threads,
234
+ chunk_size: chunk_size,
235
+ message: message
236
+ )
237
+ end
195
238
 
196
- cli.data_put(dataset_url, files: files, dir: dir, commit: commit, message: message)
239
+ desc 'data rm DATASET_URL FILES_PREFIX', 'Delete selected files from remote server'
240
+ method_option :message, :type => :string, :aliases => ["--message"], :desc => "create commit with message", :default => nil
241
+ def rm(dataset_url, *regex_list)
242
+ cli = Cnvrg::CLI.new()
243
+ message = options[:message]
244
+ cli.data_rm(dataset_url, regex_list: regex_list, message: message)
197
245
  end
198
246
 
199
247
  desc 'data clone_query --query=QUERY_SLUG DATASET_URL', 'Clone dataset with specific query'
200
248
  method_option :query, :type => :string, :aliases => ["-q", "--query"], :default => nil
249
+ method_option :soft, :type => :boolean, :aliases => ["-s", "--soft"], :default => false, :hide => true
250
+ method_option :flatten, :type => :boolean, :aliases => ["-f", "--flatten"], :default => false
201
251
  def clone_query(dataset_url)
202
252
  cli = Cnvrg::CLI.new()
203
- query =options[:query]
204
- cli.clone_data_query(dataset_url,query=query)
253
+ query = options[:query]
254
+ flatten = options[:flatten]
255
+ soft =options[:soft]
256
+ cli.clone_data_query(dataset_url,query=query, flatten, soft: soft)
205
257
  end
206
258
 
207
259
  desc 'data delete DATASET_SLUG', 'Delete dataset'
@@ -217,12 +269,13 @@ module Cnvrg
217
269
  cli.list_dataset()
218
270
 
219
271
  end
220
- desc 'data commits', 'List all commits for a current dataset'
221
272
 
222
- def commits()
273
+ desc 'data commits URL/SLUG', 'List all commits for a given dataset'
274
+ method_option :commit_sha1, :type => :string, :aliases => ["-c", "--commit"], :default => nil
275
+ def commits(dataset_url)
223
276
  cli = Cnvrg::CLI.new()
224
- cli.list_dataset_commits()
225
-
277
+ commit_sha1 = options[:commit_sha1]
278
+ cli.list_dataset_commits(dataset_url, commit_sha1:commit_sha1)
226
279
  end
227
280
 
228
281
  desc 'data files DATASET_URL', 'Show list of dataset files'
@@ -1,5 +1,5 @@
1
1
  require 'mimemagic'
2
- require 'aws-sdk'
2
+ require 'aws-sdk-s3'
3
3
  require 'URLcrypt'
4
4
  require 'parallel'
5
5
  require 'fileutils'
@@ -25,7 +25,7 @@ module Cnvrg
25
25
 
26
26
  def refresh_storage_token
27
27
  current_time = Time.current
28
- if current_time - @token_issue_time > 3.hours
28
+ if current_time - @token_issue_time > 1.hours
29
29
  @downloader = @dataset.get_storage_client
30
30
  @token_issue_time = Time.current
31
31
  end
@@ -55,6 +55,26 @@ module Cnvrg
55
55
  paths.map{|p| p.gsub(/^\.\//, '')}
56
56
  end
57
57
 
58
+ def get_files_and_folders(paths)
59
+ files_and_folders = {}
60
+ paths.each do |file|
61
+ if File.exists? file
62
+ if File.directory? file
63
+ Dir.glob("#{file}/**/*").select do |f|
64
+ files_and_folders["#{f}/"] = "folder" if File.directory? f
65
+ files_and_folders[f] = "file" if File.file? f
66
+ end
67
+ files_and_folders["#{file}/"] = "folder"
68
+ else
69
+ files_and_folders[file] = "file"
70
+ end
71
+ next
72
+ end
73
+ raise SignalException.new(1, "Cant find file #{file}") unless File.exists? "#{Dir.pwd}/#{file}"
74
+ end
75
+ return files_and_folders
76
+ end
77
+
58
78
  def check_files_sha1(files, resolver, tag)
59
79
  conflicts = 0
60
80
  files.each do |file|
@@ -110,18 +130,36 @@ module Cnvrg
110
130
  end
111
131
  end
112
132
 
113
-
133
+ # This is for backwards compatibility only and should be removed in future versions:
114
134
  def put_commit(commit_sha1)
115
- response = Cnvrg::API.request("#{@base_resource}/commit/latest", 'PUT', {commit_sha1: commit_sha1})
135
+ response = Cnvrg::API.request(
136
+ "#{@base_resource}/commit/latest",
137
+ 'PUT',
138
+ {
139
+ commit_sha1: commit_sha1,
140
+ ignore: true # tells the new server to ignore this api call since its coming from the new CLI
141
+ }
142
+ )
116
143
  if response.present?
117
144
  msg = response['result']
118
145
  else
119
- msg = "Cant save changes in the dataset"
146
+ msg = "Can't save changes in the dataset"
120
147
  end
121
148
 
122
149
  Cnvrg::Result.new(Cnvrg::CLI.is_response_success(response, false), msg)
123
150
  end
124
151
 
152
+ def create_progressbar(title, total)
153
+ return ProgressBar.create(
154
+ :title => title,
155
+ :progress_mark => '=',
156
+ :format => "%b>>%i| %p%% %t",
157
+ :starting_at => 0,
158
+ :total => total,
159
+ :autofinish => true
160
+ )
161
+ end
162
+
125
163
 
126
164
  def upload_multiple_files(commit_sha1, tree, threads: ParallelThreads, force: false, new_branch: false, prefix: '', partial_commit: nil, total: nil)
127
165
  begin
@@ -158,6 +196,9 @@ module Cnvrg
158
196
  end
159
197
 
160
198
  files = results['files']
199
+
200
+ progressbar.progress += tree.keys.length - files.length if progressbar.present?
201
+ progress_semaphore = Mutex.new
161
202
  upload_error_files = []
162
203
  @temp_upload_progressbar.progress += tree.keys.length - files.length if @temp_upload_progressbar.present?
163
204
  Parallel.map((files.keys), in_threads: threads) do |k|
@@ -169,8 +210,9 @@ module Cnvrg
169
210
  tree.except!(k)
170
211
  Cnvrg::Logger.log_error_message("Error while upload single file #{o["path"]}")
171
212
  end
172
- @temp_upload_progressbar.progress += 1 if @temp_upload_progressbar.present?
213
+ progress_semaphore.synchronize { progressbar.progress += 1 if progressbar.present? }
173
214
  end
215
+
174
216
  blob_ids = files.values.map {|f| f['bv_id']}
175
217
  if blob_ids.present?
176
218
  dirs = tree.keys.select {|k| tree[k].nil?} || []
@@ -190,17 +232,231 @@ module Cnvrg
190
232
  end
191
233
  end
192
234
 
193
- def upload_single_file(file)
235
+ def delete_multiple_files(commit_sha1, regex_list)
236
+ begin
237
+ Cnvrg::Logger.log_info("Sending Delete Files request")
238
+ resp = Cnvrg::API.request(
239
+ @base_resource + "delete_files",
240
+ 'POST_JSON',
241
+ {
242
+ commit_sha1: commit_sha1,
243
+ regex_list: regex_list,
244
+ }
245
+ )
246
+ unless Cnvrg::CLI.is_response_success(resp, false)
247
+ Cnvrg::Logger.log_method(bind: binding)
248
+ raise Exception.new("Got an error message from server, #{resp.try(:fetch, "message")}")
249
+ end
250
+ Cnvrg::Logger.log_info("Delete Files request Successful")
251
+ return resp["files"], resp["folders"], resp["job_id"]
252
+ rescue => e
253
+ Cnvrg::Logger.log_method(bind: binding)
254
+ Cnvrg::Logger.log_error(e)
255
+ raise e
256
+ end
257
+ end
258
+
259
+ def delete_file_chunk(commit_sha1, regex_list, chunk_size, offset)
260
+ begin
261
+ resp = Cnvrg::API.request(
262
+ @base_resource + "delete_files_by_chunk",
263
+ 'POST_JSON',
264
+ {
265
+ commit_sha1: commit_sha1,
266
+ regex_list: regex_list,
267
+ chunk_size: chunk_size,
268
+ offset: offset
269
+ }
270
+ )
271
+ unless Cnvrg::CLI.is_response_success(resp, false)
272
+ Cnvrg::Logger.log_method(bind: binding)
273
+ raise Exception.new("Got an error message from server, #{resp.try(:fetch, "message")}")
274
+ end
275
+ return resp["total_changes"]
276
+ rescue => e
277
+ Cnvrg::Logger.log_method(bind: binding)
278
+ Cnvrg::Logger.log_error(e)
279
+ raise e
280
+ end
281
+ end
282
+
283
+ def get_delete_progress(commit_sha1, job_id)
284
+ begin
285
+ resp = Cnvrg::API.request(
286
+ @base_resource + "get_delete_progress",
287
+ 'POST_JSON',
288
+ {
289
+ commit_sha1: commit_sha1,
290
+ job_id: job_id
291
+ }
292
+ )
293
+ unless Cnvrg::CLI.is_response_success(resp, false)
294
+ Cnvrg::Logger.log_method(bind: binding)
295
+ raise Exception.new("Got an error message from server, #{resp.try(:fetch, "message")}")
296
+ end
297
+ return resp["total_deleted"]
298
+ rescue => e
299
+ Cnvrg::Logger.log_method(bind: binding)
300
+ Cnvrg::Logger.log_error(e)
301
+ raise e
302
+ end
303
+ end
304
+
305
+ def request_upload_files(commit_sha1, tree, force, new_branch, partial_commit)
306
+ retry_count = 0
307
+ loop do
308
+ upload_resp = Cnvrg::API.request(@base_resource + "upload_files", 'POST_JSON', {
309
+ commit_sha1: commit_sha1,
310
+ tree: tree,
311
+ force: force,
312
+ is_branch: new_branch,
313
+ partial_commit: partial_commit
314
+ })
315
+ if not (Cnvrg::CLI.is_response_success(upload_resp, false))
316
+ #Cnvrg::Logger.log_method(bind: binding)
317
+ retry_count += 1
318
+
319
+ puts "Failed request upload files: #{Time.current}"
320
+ puts upload_resp
321
+
322
+ if retry_count > 5
323
+ raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
324
+ end
325
+ sleep 5
326
+ next
327
+ end
328
+ return upload_resp['result'].with_indifferent_access
329
+ end
330
+
331
+ end
332
+
333
+ def upload_multiple_files_optimized(files, commit_sha1, threads: ParallelThreads, chunk_size: 1000, force: false, new_branch: false, prefix: '', partial_commit: nil)
334
+ cli = CLI.new
335
+ cli.log_message("Using #{threads} threads with chunk size of #{chunk_size}.", Thor::Shell::Color::GREEN)
336
+
337
+ progressbar = create_progressbar("Upload Progress", files.size)
338
+ cli = CLI.new
339
+
340
+ # Vars to handle the parallelism
341
+ progress_mutex = Mutex.new
342
+ file_queue = Queue.new
343
+ progress_queue = Queue.new
344
+ worker_threads = []
345
+
346
+ # Vars to keep track of uploaded files and directories
347
+ dirs = []
348
+ uploaded_files = []
349
+
350
+ begin
351
+ # Init working threads that handle the upload of the files:
352
+ threads.times do |i|
353
+ worker_threads[i] = Thread.new do
354
+ # wait for file_queue.close to break the loop
355
+ while file = file_queue.deq
356
+ success = upload_single_file(cli, file)
357
+ file[:success] = success
358
+ if not success
359
+ cli.log_message("Error while uploading file: #{file[:absolute_path]}", Thor::Shell::Color::RED)
360
+ Cnvrg::Logger.log_error_message("Error while upload single file #{file["path"]}")
361
+ end
362
+ progress_queue << file
363
+ end
364
+ end
365
+ end
366
+
367
+ # init the thread that handles the file upload progress and saving them in the server
368
+ progress_thread = Thread.new do
369
+ loop do
370
+ file = progress_queue.deq(non_block: true) rescue nil # to prevent deadlocks
371
+ unless file.nil?
372
+ progress_mutex.synchronize {
373
+ progressbar.progress += 1
374
+ uploaded_files.append(file) if file[:success]
375
+ }
376
+
377
+ if uploaded_files.size == chunk_size or progressbar.finished?
378
+ refresh_storage_token
379
+ # puts "progress: #{progress_queue.length}"
380
+ # puts "files: #{file_queue.length}"
381
+ Cnvrg::Logger.info("Sending Upload files save")
382
+ blob_ids = uploaded_files.map {|f| f['bv_id']}
383
+ upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids, dirs: dirs})
384
+ unless Cnvrg::CLI.is_response_success(upload_resp, false)
385
+ Cnvrg::Logger.log_method(bind: binding)
386
+ raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
387
+ end
388
+ # cli.log_message("Saved file chunk to server", Thor::Shell::Color::GREEN)
389
+ uploaded_files = []
390
+ dirs = []
391
+ end
392
+ else
393
+ sleep(0.1)
394
+ end
395
+
396
+ if progressbar.finished?
397
+ # puts "finished"
398
+ file_queue.close()
399
+ progress_queue.close()
400
+ Thread.exit
401
+ end
402
+ end
403
+ end
404
+
405
+ file_chunks = files.each_slice(chunk_size).to_a
406
+ # Fetch the required files from the server:
407
+ Parallel.map((file_chunks), in_threads: 10) do |files_chunk|
408
+
409
+ tree = @dataset.generate_chunked_idx(files_chunk, prefix: prefix)
410
+ results = request_upload_files(commit_sha1, tree, force, new_branch, partial_commit)
411
+
412
+ # puts "Got #{results['files'].size} files to upload from #{files_chunk.size} files"
413
+
414
+ if results['files'].blank?
415
+ progress_mutex.synchronize { progressbar.progress += tree.keys.length }
416
+ next
417
+ end
418
+
419
+ # Handle directories:
420
+ new_dirs = tree.keys.select {|k| tree[k].nil?}
421
+ dirs += new_dirs
422
+
423
+ files_to_upload = results['files']
424
+ progress_mutex.synchronize {
425
+ progressbar.progress += tree.keys.length - files_to_upload.length
426
+ }
427
+
428
+ files_to_upload.keys.each do |key|
429
+ while file_queue.size > 5000
430
+ sleep(0.1)
431
+ end
432
+ file_queue.push tree[key].merge(files_to_upload[key])
433
+ end
434
+ end
435
+
436
+ progress_thread.join()
437
+ worker_threads.each(&:join)
438
+
439
+ rescue => e
440
+ puts e
441
+ Cnvrg::Logger.log_method(bind: binding)
442
+ Cnvrg::Logger.log_error(e)
443
+ raise e
444
+ end
445
+ end
446
+
447
+ def upload_single_file(cli, file)
448
+ success = false
194
449
  begin
195
450
  file = file.as_json
196
451
  Cnvrg::Logger.log_info("Uploading #{file["absolute_path"]}")
197
452
  @downloader.safe_upload(file["path"], file["absolute_path"])
453
+ success = true
198
454
  Cnvrg::Logger.log_info("#{file["absolute_path"]} uploaded.")
199
455
  rescue => e
200
456
  Cnvrg::Logger.log_error_message("Error while upload single file #{file["path"]}")
201
457
  Cnvrg::Logger.log_error(e)
202
- return false
203
458
  end
459
+ success
204
460
  end
205
461
 
206
462
  def upload_file(absolute_path, relative_path, commit_sha1)
@@ -869,45 +1125,57 @@ module Cnvrg
869
1125
  return false
870
1126
  end
871
1127
  end
1128
+
872
1129
  def revoke_download(tar_files, extracted_files)
873
1130
  begin
874
-
875
1131
  FileUtils.rm_rf(tar_files) unless (tar_files.nil? or tar_files.empty?)
876
1132
  FileUtils.rm_rf(extracted_files) unless (extracted_files.nil? or extracted_files.empty?)
877
-
878
1133
  rescue => e
879
1134
  return false
880
1135
  end
881
-
882
1136
  return true
883
-
884
1137
  end
1138
+
885
1139
  def delete_commit(commit_sha1)
886
1140
  response = Cnvrg::API.request("#{base_resource}/commit/#{commit_sha1}", 'DELETE')
887
1141
  Cnvrg::CLI.is_response_success(response, true)
888
1142
  return response
889
1143
  end
1144
+
890
1145
  def get_commit(commit_sha1)
891
1146
  response = Cnvrg::API.request("#{base_resource}/commit/#{commit_sha1}", 'GET')
892
1147
  Cnvrg::CLI.is_response_success(response, true)
893
1148
  return response
894
1149
  end
895
1150
 
896
- def start_commit(new_branch,force=false,delete_commit=nil, chunks: 0, dataset: @dataset, message:nil)
1151
+ def start_commit(new_branch, force=false, chunks: 0, dataset: @dataset, message:nil)
897
1152
  begin
898
1153
  #if we are pushing with force or to branch we dont need to send current/next commit cause we want to
899
1154
  # create a new commit.
900
1155
  idx = (force || new_branch) ? {} : dataset.get_idx
901
1156
  commit = idx[:commit]
902
1157
  next_commit = idx[:next_commit]
903
- response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {dataset_slug: @dataset_slug, new_branch: new_branch,force:force,
904
- username: @owner,current_commit: commit, next_commit: next_commit, total_chunks: chunks, message: message})
905
- Cnvrg::CLI.is_response_success(response, true)
906
- return response
1158
+ response = Cnvrg::API.request(
1159
+ "#{base_resource}/commit/start",
1160
+ 'POST',
1161
+ {
1162
+ dataset_slug: @dataset_slug,
1163
+ new_branch: new_branch,
1164
+ force:force,
1165
+ username: @owner,
1166
+ current_commit: commit,
1167
+ next_commit: next_commit,
1168
+ total_chunks: chunks,
1169
+ message: message
1170
+ }
1171
+ )
1172
+ Cnvrg::CLI.is_response_success(response, true)
1173
+ return response
907
1174
  rescue => e
908
1175
  return false
909
1176
  end
910
1177
  end
1178
+
911
1179
  def last_valid_commit()
912
1180
  begin
913
1181
  #if we are pushing with force or to branch we dont need to send current/next commit cause we want to
@@ -920,15 +1188,24 @@ module Cnvrg
920
1188
  end
921
1189
  end
922
1190
 
923
- def end_commit(commit_sha1,force, success: true, uploaded_files: 0 )
1191
+ def end_commit(commit_sha1, force, success: true, uploaded_files: 0, commit_type: nil)
924
1192
  begin
925
- response = Cnvrg::API.request("#{base_resource}/commit/end", 'POST', {commit_sha1: commit_sha1,force:force, success: success, uploaded_files: uploaded_files})
1193
+ response = Cnvrg::API.request(
1194
+ "#{base_resource}/commit/end",
1195
+ 'POST',
1196
+ {
1197
+ commit_sha1: commit_sha1,
1198
+ force:force,
1199
+ success: success,
1200
+ uploaded_files: uploaded_files,
1201
+ commit_type: commit_type
1202
+ }
1203
+ )
926
1204
  Cnvrg::CLI.is_response_success(response, true)
927
1205
  return response
928
1206
  rescue => e
929
1207
  return false
930
1208
  end
931
-
932
1209
  end
933
1210
 
934
1211
  def end_commit_tar(commit_sha1, cur_idx)
@@ -1011,11 +1288,17 @@ module Cnvrg
1011
1288
  end
1012
1289
  end
1013
1290
 
1014
- def generate_parallel_idx
1015
-
1291
+ def last_valid_commit()
1292
+ begin
1293
+ response = Cnvrg::API.request("#{base_resource}/last_valid_commit", 'GET')
1294
+ Cnvrg::CLI.is_response_success(response, true)
1295
+ return response
1296
+ rescue => e
1297
+ return false
1298
+ end
1016
1299
  end
1017
1300
 
1018
- def download_multiple_files_s3(files, project_home, conflict: false, progressbar: nil, read_only:false)
1301
+ def download_multiple_files_s3(files, project_home, conflict: false, progressbar: nil, read_only:false, flatten: false)
1019
1302
  begin
1020
1303
  refresh_storage_token
1021
1304
  parallel_options = {
@@ -1024,22 +1307,24 @@ module Cnvrg
1024
1307
  }
1025
1308
  Parallel.map(files["keys"], parallel_options) do |f|
1026
1309
  begin
1027
- local_path = @dataset.local_path + '/' + f['name']
1028
- Cnvrg::Logger.log_info("Downloading #{local_path}")
1029
- progressbar.progress += 1 if progressbar.present?
1030
- if local_path.end_with? "/"
1031
- @downloader.mkdir(local_path, recursive: true)
1032
- next
1033
- end
1034
- # blob
1035
- local_path = "#{local_path}.conflict" if conflict
1036
- storage_path = f["path"]
1037
- if File.exists? local_path
1038
- Cnvrg::Logger.log_info("Trying to download #{local_path} but its already exists, skipping..")
1039
- next
1040
- end
1041
- resp = @downloader.download(storage_path, local_path)
1042
- Cnvrg::Logger.log_info("Download #{local_path} success resp: #{resp}")
1310
+ file_path = f['name']
1311
+ file_path = File.basename(f['name']) if flatten
1312
+ local_path = @dataset.local_path + '/' + file_path
1313
+ Cnvrg::Logger.log_info("Downloading #{local_path}")
1314
+ progressbar.progress += 1 if progressbar.present?
1315
+ if local_path.end_with? "/"
1316
+ @downloader.mkdir(local_path, recursive: true)
1317
+ next
1318
+ end
1319
+ # blob
1320
+ local_path = "#{local_path}.conflict" if conflict
1321
+ storage_path = f["path"]
1322
+ if File.exists? local_path
1323
+ Cnvrg::Logger.log_info("Trying to download #{local_path} but its already exists, skipping..")
1324
+ next
1325
+ end
1326
+ resp = @downloader.download(storage_path, local_path)
1327
+ Cnvrg::Logger.log_info("Download #{local_path} success resp: #{resp}")
1043
1328
  rescue => e
1044
1329
  Cnvrg::Logger.log_error(e)
1045
1330
  end