cnvrg 1.11.17 → 1.11.26

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7fa5b2801b29b6714aedcba33ae817bbf23e97f567d5dbb26f472c26b0483b95
4
- data.tar.gz: 3b2993cd78a012f5252f9fd98ef2621d976c94c36b26a197557ca6c4ea6ad923
3
+ metadata.gz: d5b1b849c4b05132d0dbdd9f23e767ecd0080b7ae01a96a9aad5bd5b0c7327e6
4
+ data.tar.gz: e4d57db1c3dede3fe7907ade6be0787d950d5b17db380c7032b41a7df12de577
5
5
  SHA512:
6
- metadata.gz: f011cb9515169650552aec66df27f5eac692f303630411ef41ae62b25975190b1411d0d02f1bd54a8afe2f7955b6bdf24a62b18201db4c875d4189dfcf5a5e75
7
- data.tar.gz: a655213fd5f2cbd52fc772b8f4fb2249f8ba21af51f8afbd69e330f1c1f792f4bea4b133a7cf9d015f1cdff853e4dba253a3c163206ddbd4da8c425a948494d3
6
+ metadata.gz: 5a8755391132d2f30c2a7c35ab4fb3c12bbb3a6f3e29df282ca884c4a8917c03944e45639b5b03f9b1a576d65c7d57c9f3323d969271cff15ce973a0f5b42b66
7
+ data.tar.gz: 0e22fb319eee5086097ed6108c21e333048ac443b9a1b4813475eb1932aa0857aba97722cdef42a9c4fe0d26784c8a95efd628e5ac4def95f785458750851b95
data/cnvrg.gemspec CHANGED
@@ -23,8 +23,8 @@ Gem::Specification.new do |spec|
23
23
  spec.add_development_dependency 'rspec', '~> 3.0'
24
24
  spec.add_development_dependency 'vcr', '~> 3.0'
25
25
  spec.add_development_dependency 'aruba'
26
- spec.add_development_dependency 'pry'
27
-
26
+ spec.add_development_dependency 'pry'
27
+
28
28
  spec.add_runtime_dependency 'mimemagic', '~> 0.3.1','>=0.3.2'
29
29
  spec.add_runtime_dependency 'faraday', '~> 0.15.2'
30
30
  spec.add_runtime_dependency 'netrc', '~> 0.11.0'
data/lib/cnvrg/api.rb CHANGED
@@ -31,6 +31,10 @@ module Cnvrg
31
31
  end
32
32
  def self.request(resource, method = 'GET', data = {}, parse_request = true)
33
33
  resource = URI::encode resource
34
+
35
+ # We need to remoe all double slashes from the url to work with the proxy
36
+ resource = resource.gsub(/[\/]{2,}/, "/").gsub("https:/", "https://").gsub("http:/", "http://")
37
+
34
38
  begin
35
39
  n = Netrc.read
36
40
  rescue => e
data/lib/cnvrg/cli.rb CHANGED
@@ -858,7 +858,7 @@ module Cnvrg
858
858
  method_option :read, :type => :boolean, :aliases => ["-r", "--read"], :default => false
859
859
  method_option :remote, :type => :boolean, :aliases => ["-h", "--remote"], :default => false
860
860
  method_option :soft, :type => :boolean, :aliases => ["-s", "--soft"], :default => false, :hide => true
861
- def clone_data(dataset_url, only_tree=false, commit=nil, query=nil, read=false, remote=false, flatten: false, relative: false, soft: false, threads: 15)
861
+ def clone_data(dataset_url, only_tree=false, commit=nil, query=nil, read=false, remote=false, flatten: false, relative: false, soft: false, threads: 15, cache_link: false)
862
862
  begin
863
863
  verify_logged_in(false)
864
864
  log_start(__method__, args, options)
@@ -904,7 +904,7 @@ module Cnvrg
904
904
 
905
905
  commit = response["result"]["commit"]
906
906
  files_count = response["result"]["file_count"]
907
- files = @files.get_clone_chunk(commit: commit)
907
+ files = @files.get_clone_chunk(commit: commit, cache_link: cache_link)
908
908
  downloaded_files = 0
909
909
  progressbar = ProgressBar.create(:title => "Download Progress",
910
910
  :progress_mark => '=',
@@ -917,7 +917,7 @@ module Cnvrg
917
917
 
918
918
  while files['keys'].length > 0
919
919
  Cnvrg::Logger.log_info("download multiple files, #{downloaded_files.size} files downloaded")
920
- @files.download_multiple_files_s3(files, @dataset.local_path, progressbar: progressbar, read_only: read, flatten: flatten, threads: threads)
920
+ @files.download_multiple_files_s3(files, @dataset.local_path, progressbar: progressbar, read_only: read, flatten: flatten, threads: threads, cache_link: cache_link)
921
921
 
922
922
  downloaded_files += files['keys'].length
923
923
  files = @files.get_clone_chunk(commit: commit, latest_id: files['latest'])
@@ -1201,15 +1201,18 @@ module Cnvrg
1201
1201
  end
1202
1202
 
1203
1203
  desc '', '', :hide => true
1204
- def data_put(dataset_url, files: [], dir: '', commit: '', chunk_size: 1000, force: false, override: false, threads: 15, message: nil)
1204
+ def data_put(dataset_url, files: [], dir: '', commit: '', chunk_size: 1000, force: false, override: false, threads: 15, message: nil, auto_cache: false, external_disk: nil)
1205
1205
  begin
1206
1206
  verify_logged_in(false)
1207
1207
  log_start(__method__, args, options)
1208
-
1208
+ if auto_cache && external_disk.blank?
1209
+ raise SignalException.new(1, "for auto caching external disk is required")
1210
+ end
1209
1211
  owner, slug = get_owner_slug(dataset_url)
1210
1212
  @dataset = Dataset.new(dataset_info: {:owner => owner, :slug => slug})
1211
1213
  @datafiles = Cnvrg::Datafiles.new(owner, slug, dataset: @dataset)
1212
1214
  @files = @datafiles.verify_files_exists(files)
1215
+ @files = @files.uniq { |t| t.gsub('./', '')}
1213
1216
 
1214
1217
  if @files.blank?
1215
1218
  raise SignalException.new(1, "Cant find files to upload, exiting.")
@@ -1227,7 +1230,7 @@ module Cnvrg
1227
1230
  Cnvrg::Logger.info("Put files in latest commit")
1228
1231
  response = @datafiles.last_valid_commit()
1229
1232
  unless response #means we failed in the start commit.
1230
- raise SignalException.new(1, "Cant put files into commit:#{commit}, check the dataset id and commitc")
1233
+ raise SignalException.new(1, "Cant put files into commit:#{commit}, check the dataset id and commit")
1231
1234
  end
1232
1235
  @commit = response['result']['sha1']
1233
1236
  else
@@ -1253,7 +1256,7 @@ module Cnvrg
1253
1256
  raise SignalException.new(1, res.msg)
1254
1257
  end
1255
1258
  Cnvrg::Logger.info("Saving commit on server")
1256
- res = @datafiles.end_commit(@commit,force, success: true, commit_type: "put")
1259
+ res = @datafiles.end_commit(@commit,force, success: true, commit_type: "put", auto_cache: auto_cache, external_disk: external_disk)
1257
1260
  msg = res['result']
1258
1261
  response = Cnvrg::Result.new(Cnvrg::CLI.is_response_success(res, true), msg)
1259
1262
  unless response.is_success?
@@ -1261,19 +1264,25 @@ module Cnvrg
1261
1264
  end
1262
1265
 
1263
1266
  log_message("Uploading files finished Successfully", Thor::Shell::Color::GREEN)
1267
+ if msg['cache_error'].present?
1268
+ log_message("Couldn't cache commit: #{msg['cache_error']}", Thor::Shell::Color::YELLOW)
1269
+ end
1264
1270
  rescue SignalException => e
1265
1271
  log_message(e.message, Thor::Shell::Color::RED)
1266
1272
  return false
1267
1273
  end
1268
1274
  end
1269
1275
 
1270
-
1271
1276
  desc '', '', :hide => true
1272
- def data_rm(dataset_url, regex_list: [], commit: '', message: nil)
1277
+ def data_rm(dataset_url, regex_list: [], commit: '', message: nil, auto_cache: false, external_disk: nil)
1273
1278
  begin
1274
1279
  verify_logged_in(false)
1275
1280
  log_start(__method__, args, options)
1276
1281
 
1282
+ if auto_cache && external_disk.blank?
1283
+ raise SignalException.new(1, "for auto caching external disk is required")
1284
+ end
1285
+
1277
1286
  owner, slug = get_owner_slug(dataset_url)
1278
1287
  @dataset = Dataset.new(dataset_info: {:owner => owner, :slug => slug})
1279
1288
  @datafiles = Cnvrg::Datafiles.new(owner, slug, dataset: @dataset)
@@ -1309,7 +1318,7 @@ module Cnvrg
1309
1318
  offset += chunk_size
1310
1319
  end
1311
1320
 
1312
- res = @datafiles.end_commit(@commit,false, success: true)
1321
+ res = @datafiles.end_commit(@commit,false, success: true, auto_cache: auto_cache, external_disk: external_disk)
1313
1322
  msg = res['result']
1314
1323
  response = Cnvrg::Result.new(Cnvrg::CLI.is_response_success(res, true), msg)
1315
1324
  unless response.is_success?
@@ -1317,6 +1326,9 @@ module Cnvrg
1317
1326
  end
1318
1327
 
1319
1328
  log_message("Deleting files finished Successfully", Thor::Shell::Color::GREEN)
1329
+ if msg['cache_error'].present?
1330
+ log_message("Couldn't cache commit: #{msg['cache_error']}", Thor::Shell::Color::YELLOW)
1331
+ end
1320
1332
  rescue SignalException => e
1321
1333
  log_message(e.message, Thor::Shell::Color::RED)
1322
1334
  return false
@@ -2308,7 +2320,6 @@ module Cnvrg
2308
2320
  @project = Project.new(get_project_home)
2309
2321
  chunk_size = chunk_size ? chunk_size : options["chunk_size"]
2310
2322
 
2311
-
2312
2323
  # Enable local/experiment exception logging
2313
2324
  suppress_exceptions = suppress_exceptions ? suppress_exceptions : options[:suppress_exceptions]
2314
2325
  if in_exp
@@ -2346,7 +2357,6 @@ module Cnvrg
2346
2357
  log_message("#{check} Project is up to date", Thor::Shell::Color::GREEN, (((options["sync"] or sync) and !direct) ? false : true))
2347
2358
  return true
2348
2359
  end
2349
- force = true
2350
2360
  end
2351
2361
 
2352
2362
  if ignore.nil? or ignore.empty?
data/lib/cnvrg/data.rb CHANGED
@@ -81,7 +81,6 @@ module Cnvrg
81
81
  end
82
82
  end
83
83
 
84
-
85
84
  desc "data upload", "Upload files from local dataset directory to remote server"
86
85
  method_option :verbose, :type => :boolean, :aliases => ["-v"], :default => false
87
86
  method_option :new_branch, :type => :boolean, :aliases => ["-nb"], :desc => "create new branch of commits"
@@ -155,6 +154,7 @@ module Cnvrg
155
154
  method_option :flatten, :type => :boolean, :aliases => ["-f", "--flatten"], :default => false
156
155
  method_option :soft, :type => :boolean, :aliases => ["-s", "--soft"], :default => false, :hide => true
157
156
  method_option :threads, :type => :numeric, :aliases => ["--threads"], :default => 15
157
+ method_option :cache_link, :type => :boolean, :aliases => ["--cache_link"], :default => false, :hide => true
158
158
  def clone(dataset_url)
159
159
  cli = Cnvrg::CLI.new()
160
160
  only_tree =options[:only_tree]
@@ -165,6 +165,7 @@ module Cnvrg
165
165
  soft = options[:soft]
166
166
  flatten = options[:flatten]
167
167
  threads = options[:threads]
168
+ cache_link = options[:cache_link]
168
169
  cli.clone_data(
169
170
  dataset_url,
170
171
  only_tree=only_tree,
@@ -175,7 +176,8 @@ module Cnvrg
175
176
  flatten: flatten,
176
177
  relative: options[:relative],
177
178
  soft: soft,
178
- threads: threads
179
+ threads: threads,
180
+ cache_link: cache_link
179
181
  )
180
182
  end
181
183
 
@@ -220,6 +222,8 @@ module Cnvrg
220
222
  method_option :threads, :type => :numeric, :aliases => ["-t","--threads"], :default => 15
221
223
  method_option :chunk_size, :type => :numeric, :aliases => ["-cs","--chunk"], :default => 1000
222
224
  method_option :message, :type => :string, :aliases => ["--message"], :desc => "create commit with message", :default => nil
225
+ method_option :auto_cache, :type => :boolean, :aliases => ["--auto_cache"], :desc => "auto_cache", :default => false
226
+ method_option :external_disk, :type => :string, :aliases => ["--external_disk"], :desc => "external_disk_title", :default => nil
223
227
 
224
228
  def put(dataset_url, *files)
225
229
  cli = Cnvrg::CLI.new()
@@ -231,6 +235,8 @@ module Cnvrg
231
235
  message = options[:message]
232
236
  threads = options[:threads]
233
237
  chunk_size = options[:chunk_size]
238
+ auto_cache = options[:auto_cache]
239
+ external_disk = options[:external_disk]
234
240
  cli.data_put(
235
241
  dataset_url,
236
242
  files: files,
@@ -240,16 +246,28 @@ module Cnvrg
240
246
  override: override,
241
247
  threads: threads,
242
248
  chunk_size: chunk_size,
243
- message: message
249
+ message: message,
250
+ auto_cache: auto_cache,
251
+ external_disk: external_disk
244
252
  )
245
253
  end
246
254
 
247
255
  desc 'data rm DATASET_URL FILES_PREFIX', 'Delete selected files from remote server'
248
256
  method_option :message, :type => :string, :aliases => ["--message"], :desc => "create commit with message", :default => nil
257
+ method_option :auto_cache, :type => :boolean, :aliases => ["--auto_cache"], :desc => "auto_cache", :default => false
258
+ method_option :external_disk, :type => :string, :aliases => ["--external_disk"], :desc => "external_disk_title", :default => nil
249
259
  def rm(dataset_url, *regex_list)
250
260
  cli = Cnvrg::CLI.new()
251
261
  message = options[:message]
252
- cli.data_rm(dataset_url, regex_list: regex_list, message: message)
262
+ auto_cache = options[:auto_cache]
263
+ external_disk = options[:external_disk]
264
+ cli.data_rm(
265
+ dataset_url,
266
+ regex_list: regex_list,
267
+ message: message,
268
+ auto_cache: auto_cache,
269
+ external_disk: external_disk
270
+ )
253
271
  end
254
272
 
255
273
  desc 'data clone_query --query=QUERY_SLUG DATASET_URL', 'Clone dataset with specific query'
@@ -43,8 +43,11 @@ module Cnvrg
43
43
  def verify_files_exists(files)
44
44
  paths = []
45
45
  files.each do |file|
46
+ # dir shouldnt have ending slash.
47
+ file = file[0..-2] if file.end_with? '/'
46
48
  if File.exists? file
47
49
  if File.directory? file
50
+ paths << file unless file == '.'
48
51
  paths += Dir.glob("#{file}/**/*")
49
52
  else
50
53
  paths << file
@@ -349,11 +352,11 @@ module Cnvrg
349
352
  progress_mutex = Mutex.new
350
353
  file_queue = Queue.new
351
354
  progress_queue = Queue.new
355
+ dirs_queue = Queue.new
352
356
  worker_threads = []
353
357
  progress_threads = []
354
358
 
355
359
  # Vars to keep track of uploaded files and directories
356
- dirs = []
357
360
  uploaded_files = []
358
361
 
359
362
  begin
@@ -376,6 +379,29 @@ module Cnvrg
376
379
  end
377
380
  end
378
381
 
382
+ dir_thread = Thread.new do
383
+ dirs_to_create = []
384
+ loop do
385
+ progress_mutex.synchronize {
386
+ dir = dirs_queue.deq(non_block: true) rescue nil
387
+ dirs_to_create << dir unless dir.nil?
388
+ }
389
+ if dirs_to_create.size >= 1000 || progressbar.finished?
390
+ resp = Cnvrg::API.request(@base_resource + "create_dirs", "POST", {dirs: dirs_to_create, commit_sha1: commit_sha1})
391
+
392
+ break if resp == false # if resp is false it means 404 which is old server
393
+ unless Cnvrg::CLI.is_response_success(resp, false)
394
+ time = Time.current
395
+ Cnvrg::Logger.log_error_message("Failed to create dirs: #{time}, #{resp.try(:fetch, "message")}")
396
+ dirs_to_create = []
397
+ next
398
+ end
399
+ dirs_to_create = []
400
+ end
401
+ break if progressbar.finished? && dirs_queue.empty? && dirs_to_create.empty?
402
+ end
403
+ end
404
+
379
405
  # init the thread that handles the file upload progress and saving them in the server
380
406
  threads.times do |i|
381
407
  progress_threads[i] = Thread.new do
@@ -383,7 +409,6 @@ module Cnvrg
383
409
  file = progress_queue.deq(non_block: true) rescue nil # to prevent deadlocks
384
410
  unless file.nil?
385
411
  blob_ids = []
386
- dirs_to_upload = []
387
412
 
388
413
  progress_mutex.synchronize {
389
414
  progressbar.progress += 1
@@ -391,9 +416,7 @@ module Cnvrg
391
416
 
392
417
  if uploaded_files.size >= chunk_size or progressbar.finished?
393
418
  blob_ids = uploaded_files.map {|f| f['bv_id']}
394
- dirs_to_upload = dirs.clone
395
419
  uploaded_files = []
396
- dirs = []
397
420
  end
398
421
  }
399
422
 
@@ -401,10 +424,9 @@ module Cnvrg
401
424
  refresh_storage_token
402
425
  Cnvrg::Logger.info("Finished upload chunk of #{chunk_size} files, Sending Upload files save")
403
426
 
404
-
405
427
  retry_count = 0
406
428
  loop do
407
- upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids, dirs: dirs_to_upload})
429
+ upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids})
408
430
 
409
431
  if not (Cnvrg::CLI.is_response_success(upload_resp, false))
410
432
  retry_count += 1
@@ -427,8 +449,8 @@ module Cnvrg
427
449
 
428
450
  if progressbar.finished?
429
451
  Cnvrg::Logger.info("Progress bar finished closing queues")
430
- file_queue.close()
431
- progress_queue.close()
452
+ file_queue.close
453
+ progress_queue.close
432
454
  Thread.exit
433
455
  end
434
456
  end
@@ -441,7 +463,21 @@ module Cnvrg
441
463
  files_chunk = chunk.map{|p| p.gsub(/^\.\//, '')}
442
464
  Cnvrg::Logger.info("Generating chunk idx")
443
465
  tree = @dataset.generate_chunked_idx(files_chunk, prefix: prefix, threads: threads, cli: cli)
466
+
467
+ progress_mutex.synchronize {
468
+ # Handle directories:
469
+ new_dirs = tree.keys.select { |k| tree[k].nil? }
470
+
471
+ if new_dirs.blank?
472
+ ## we need to send 1 file so we will inflated dirs from in case when we dont have folders in the tree
473
+ file = tree.keys.find { |k| tree[k] != nil }
474
+ dirs_queue.push file
475
+ end
476
+
477
+ new_dirs.each { |dir| dirs_queue.push dir }
478
+ }
444
479
  Cnvrg::Logger.info("Getting files info from server")
480
+
445
481
  results = request_upload_files(commit_sha1, tree, override, new_branch, partial_commit)
446
482
  next unless results
447
483
 
@@ -450,11 +486,8 @@ module Cnvrg
450
486
  next
451
487
  end
452
488
 
453
- # Handle directories:
454
- new_dirs = tree.keys.select {|k| tree[k].nil?}
455
- dirs += new_dirs
456
-
457
489
  files_to_upload = results['files']
490
+
458
491
  progress_mutex.synchronize {
459
492
  progressbar.progress += tree.keys.length - files_to_upload.length
460
493
  }
@@ -466,7 +499,10 @@ module Cnvrg
466
499
  file_queue.push tree[key].merge(files_to_upload[key])
467
500
  end
468
501
  end
502
+
469
503
  Cnvrg::Logger.info("Waiting to progress and workers to finish")
504
+ dir_thread.join
505
+ dirs_queue.close
470
506
  progress_threads.each(&:join)
471
507
  worker_threads.each(&:join)
472
508
  Thread.report_on_exception = true
@@ -1207,7 +1243,7 @@ module Cnvrg
1207
1243
  false
1208
1244
  end
1209
1245
 
1210
- def end_commit(commit_sha1, force, success: true, uploaded_files: 0, commit_type: nil)
1246
+ def end_commit(commit_sha1, force, success: true, uploaded_files: 0, commit_type: nil, auto_cache: false, external_disk: nil)
1211
1247
  counter = 0
1212
1248
  begin
1213
1249
  counter += 1
@@ -1219,7 +1255,9 @@ module Cnvrg
1219
1255
  force:force,
1220
1256
  success: success,
1221
1257
  uploaded_files: uploaded_files,
1222
- commit_type: commit_type
1258
+ commit_type: commit_type,
1259
+ auto_cache: auto_cache,
1260
+ external_disk: external_disk
1223
1261
  }
1224
1262
  )
1225
1263
  is_success = Cnvrg::CLI.is_response_success(response, false)
@@ -1253,8 +1291,8 @@ module Cnvrg
1253
1291
  response['result']['files']
1254
1292
  end
1255
1293
 
1256
- def get_clone_chunk(latest_id: nil, chunk_size: 1000, commit: 'latest')
1257
- response = Cnvrg::API.request("#{@base_resource}/clone_chunk", 'POST',{commit: commit, chunk_size: chunk_size, latest_id: latest_id})
1294
+ def get_clone_chunk(latest_id: nil, chunk_size: 1000, commit: 'latest', cache_link: false)
1295
+ response = Cnvrg::API.request("#{@base_resource}/clone_chunk", 'POST',{commit: commit, chunk_size: chunk_size, latest_id: latest_id, cache_link: cache_link})
1258
1296
  unless Cnvrg::CLI.is_response_success(response, false)
1259
1297
  Cnvrg::Logger.log_info("#{{commit: commit, chunk_size: chunk_size, latest_id: latest_id}}")
1260
1298
  return nil
@@ -1321,7 +1359,7 @@ module Cnvrg
1321
1359
  end
1322
1360
  end
1323
1361
 
1324
- def download_multiple_files_s3(files, project_home, conflict: false, progressbar: nil, read_only:false, flatten: false, threads: 15)
1362
+ def download_multiple_files_s3(files, project_home, conflict: false, progressbar: nil, read_only:false, flatten: false, threads: 15, cache_link: false)
1325
1363
  begin
1326
1364
  refresh_storage_token
1327
1365
  parallel_options = {
@@ -1342,10 +1380,18 @@ module Cnvrg
1342
1380
  # blob
1343
1381
  local_path = "#{local_path}.conflict" if conflict
1344
1382
  storage_path = f["path"]
1345
- # if File.exists? local_path
1346
- # Cnvrg::Logger.log_info("Trying to download #{local_path} but its already exists, skipping..")
1347
- # next
1348
- # end
1383
+ # if File.exists? local_path
1384
+ # Cnvrg::Logger.log_info("Trying to download #{local_path} but its already exists, skipping..")
1385
+ # next
1386
+ # end
1387
+ if cache_link
1388
+ cached_commits = f['cached_commits']
1389
+
1390
+ if cached_commits.present?
1391
+ next if @downloader.link_file(cached_commits, local_path, @dataset.title, f['name'])
1392
+ end
1393
+ end
1394
+
1349
1395
  resp = @downloader.safe_download(storage_path, local_path)
1350
1396
  Cnvrg::Logger.log_info("Download #{local_path} success resp: #{resp}")
1351
1397
  rescue => e
data/lib/cnvrg/dataset.rb CHANGED
@@ -564,7 +564,8 @@ module Cnvrg
564
564
  safe_path = file
565
565
  safe_path = file[1..-1] if file.start_with? "/"
566
566
 
567
- label = safe_path.gsub(self.local_path + "/", "")
567
+ dataset_local_path = self.local_path + "/"
568
+ label = safe_path.start_with?(dataset_local_path) ? safe_path.sub(dataset_local_path, "") : safe_path
568
569
  label = "#{prefix}/#{label}" if prefix.present?
569
570
  if not Cnvrg::Files.valid_file_name?(label)
570
571
  if cli
@@ -598,6 +599,7 @@ module Cnvrg
598
599
  }
599
600
  end
600
601
  end
602
+
601
603
  if prefix.present? #add the prefix as dirs to the files
602
604
  #lets say the prefix is a/b/c so we want that a/, a/b/, a/b/c/ will be in our files_list
603
605
  dirs = prefix.split('/')
@@ -17,7 +17,9 @@ module Cnvrg
17
17
  count = 0
18
18
  begin
19
19
  count += 1
20
- sts = open(sts_path, {ssl_verify_mode: 0}).read rescue nil
20
+ sts_file = open(sts_path, {ssl_verify_mode: 0})
21
+ sts = sts_file.read
22
+ sts.split("\n")
21
23
  rescue => e
22
24
  backoff_time_seconds = backoff_time(count)
23
25
  sleep backoff_time_seconds
@@ -25,7 +27,6 @@ module Cnvrg
25
27
  retry if count <= 20
26
28
  raise StandardError.new("Cant access storage: #{e.message}")
27
29
  end
28
- sts.split("\n")
29
30
  end
30
31
 
31
32
  def cut_prefix(prefix, file)
@@ -36,6 +37,21 @@ module Cnvrg
36
37
  ### need to be implemented..
37
38
  end
38
39
 
40
+ def link_file(cached_commits, local_path, dataset_title, file_name)
41
+ prepare_download(local_path)
42
+ cached_commits.each do |cached_commit|
43
+ nfs_path = "/nfs-disk/#{cached_commit}/#{dataset_title}/#{file_name}"
44
+ if File.exist? nfs_path
45
+ FileUtils.ln(nfs_path, local_path)
46
+ return true
47
+ end
48
+ end
49
+ false
50
+ rescue => e
51
+ Cnvrg::Logger.log_error(e)
52
+ false
53
+ end
54
+
39
55
  def safe_download(storage_path, local_path, decrypt: true)
40
56
  safe_operation(local_path) { self.download(storage_path, local_path, decrypt: decrypt) }
41
57
  end
data/lib/cnvrg/files.rb CHANGED
@@ -804,6 +804,8 @@ module Cnvrg
804
804
  # progressbar can throw an exception so we no longer trust it!
805
805
  begin
806
806
  progress.progress += 1 if progress.present?
807
+ rescue
808
+ nil
807
809
  ensure
808
810
  download_succ_count += 1
809
811
  end
data/lib/cnvrg/flow.rb CHANGED
@@ -62,7 +62,7 @@ module Cnvrg
62
62
  resp = Cnvrg::API.request(url, 'POST', {flow_version: recipe.to_json}) || {}
63
63
  if resp["status"] == 200
64
64
  return [Flows.new(resp["flow_version"]["flow_id"], project: project), resp["flow_version"]["id"]]
65
- elsif resp["status"] == 400
65
+ elsif resp["status"].between?(400,499)
66
66
  raise StandardError.new(resp["message"])
67
67
  end
68
68
  raise StandardError.new("Can't create new flow")
data/lib/cnvrg/project.rb CHANGED
@@ -448,8 +448,8 @@ module Cnvrg
448
448
  next
449
449
  end
450
450
  if File.directory? e
451
-
452
- tree_idx[label + "/"] = nil
451
+ dir_name = (label.ends_with? "/") ? label : (label + "/")
452
+ tree_idx[dir_name] = nil
453
453
  else
454
454
  file_in_idx = old_idx[:tree][label] rescue nil
455
455
  last_modified = File.mtime(e).to_f
@@ -513,6 +513,7 @@ module Cnvrg
513
513
  #upload
514
514
  local_idx = self.generate_idx(deploy: deploy, files: specific_files)
515
515
  end
516
+
516
517
  commit = local_idx[:commit]
517
518
  tree = local_idx[:tree]
518
519
  ignore_list = self.send_ignore_list()
@@ -521,12 +522,12 @@ module Cnvrg
521
522
  if tree.present?
522
523
  added += local_idx[:tree].keys
523
524
  end
524
- response = {"result" => {"commit" => nil, "tree" => {"added" => added,
525
- "updated_on_server" => [],
526
- "updated_on_local" => [],
527
- "update_local" => [],
528
- "deleted" => [],
529
- "conflicts" => []}}}
525
+ response = { "result" => { "commit" => nil, "tree" => { "added" => added,
526
+ "updated_on_server" => [],
527
+ "updated_on_local" => [],
528
+ "update_local" => [],
529
+ "deleted" => [],
530
+ "conflicts" => [] } } }
530
531
  return response
531
532
  end
532
533
  #we dont want to send it on download - we only compare between commits sha1 in download.
@@ -534,6 +535,7 @@ module Cnvrg
534
535
  #the new server doesnt need the tree, but the old probably needs :X
535
536
  local_idx[:tree] = {} if Cnvrg::Helpers.server_version > 0
536
537
  end
538
+
537
539
  response = Cnvrg::API.request(@base_resource + "status", 'POST', {idx: local_idx, new_branch: new_branch,
538
540
  current_commit: commit, ignore: ignore_list, force: force, in_exp: in_exp, download: download})
539
541
 
data/lib/cnvrg/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Cnvrg
2
- VERSION = '1.11.17'
3
- end
2
+ VERSION = '1.11.26'
3
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: cnvrg
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.11.17
4
+ version: 1.11.26
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yochay Ettun
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2021-01-26 00:00:00.000000000 Z
13
+ date: 2021-03-08 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: bundler