cnvrg 1.10.5 → 1.10.15
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/cnvrg/cli.rb +47 -17
- data/lib/cnvrg/data.rb +12 -11
- data/lib/cnvrg/datafiles.rb +7 -8
- data/lib/cnvrg/dataset.rb +15 -10
- data/lib/cnvrg/helpers.rb +8 -3
- data/lib/cnvrg/project.rb +1 -1
- data/lib/cnvrg/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4d377618af4c931ec6553e946bb1f32c25bbb7bccd538f040309f4318c20e3bd
|
4
|
+
data.tar.gz: 744a798d06c8c562991be9b68cf6b8258870e657a93e2125ff3bc3c76bd7b2b6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e52939b069a7ef45d69a0ae8a086291245db90602a13039c7981ac9a3c85913cf3c3b105198a149cc2913668a5fcf59288266ada60d71b5c327b3b90bc3c594a
|
7
|
+
data.tar.gz: 03d8880d00c707ab6afcb3509da1e02c4f01aeaf5e378b49a3ec7c651b2b00176b01be5de9320da652173480279bf1d820cf9e2a255ec1e21217e361c65fd375
|
data/lib/cnvrg/cli.rb
CHANGED
@@ -1208,12 +1208,14 @@ module Cnvrg
|
|
1208
1208
|
log_message("Uploading #{@files.size} files", Thor::Shell::Color::GREEN)
|
1209
1209
|
number_of_chunks = (@files.size.to_f / chunk_size).ceil
|
1210
1210
|
if commit.blank?
|
1211
|
+
Cnvrg::Logger.info("Creating commit")
|
1211
1212
|
response = @datafiles.start_commit(false, force, chunks: number_of_chunks, message: message )
|
1212
1213
|
unless response #means we failed in the start commit.
|
1213
1214
|
raise SignalException.new(1, "Cant put files into dataset, check the dataset id")
|
1214
1215
|
end
|
1215
1216
|
@commit = response['result']['commit_sha1']
|
1216
1217
|
elsif commit.eql? "latest"
|
1218
|
+
Cnvrg::Logger.info("Put files in latest commit")
|
1217
1219
|
response = @datafiles.last_valid_commit()
|
1218
1220
|
unless response #means we failed in the start commit.
|
1219
1221
|
raise SignalException.new(1, "Cant put files into commit:#{commit}, check the dataset id and commitc")
|
@@ -1233,15 +1235,15 @@ module Cnvrg
|
|
1233
1235
|
override: override,
|
1234
1236
|
chunk_size: chunk_size,
|
1235
1237
|
prefix: dir,
|
1236
|
-
threads: threads
|
1238
|
+
threads: threads,
|
1237
1239
|
)
|
1238
|
-
|
1240
|
+
Cnvrg::Logger.info("Finished upload files")
|
1239
1241
|
# This is for backwards compatibility only and should be removed in future versions:
|
1240
1242
|
res = @datafiles.put_commit(@commit)
|
1241
1243
|
unless res.is_success?
|
1242
1244
|
raise SignalException.new(1, res.msg)
|
1243
1245
|
end
|
1244
|
-
|
1246
|
+
Cnvrg::Logger.info("Saving commit on server")
|
1245
1247
|
res = @datafiles.end_commit(@commit,force, success: true, commit_type: "put")
|
1246
1248
|
msg = res['result']
|
1247
1249
|
response = Cnvrg::Result.new(Cnvrg::CLI.is_response_success(res, true), msg)
|
@@ -1268,11 +1270,22 @@ module Cnvrg
|
|
1268
1270
|
@datafiles = Cnvrg::Datafiles.new(owner, slug, dataset: @dataset)
|
1269
1271
|
|
1270
1272
|
# Init a new commit
|
1271
|
-
response = @datafiles.start_commit(false,
|
1273
|
+
response = @datafiles.start_commit(false, false, chunks: 1, message: message )
|
1272
1274
|
unless response #means we failed in the start commit.
|
1273
1275
|
raise SignalException.new(1, "Cant put files into dataset, check the dataset id")
|
1274
1276
|
end
|
1275
1277
|
@commit = response['result']['commit_sha1']
|
1278
|
+
|
1279
|
+
# Server expects certain regex format with * so fix those that dont comply
|
1280
|
+
regex_list = regex_list.map do |regex|
|
1281
|
+
if regex.end_with? "/"
|
1282
|
+
# if user wants to delete entire folder add regex to delete contents as well
|
1283
|
+
[regex, "#{regex}*"]
|
1284
|
+
else
|
1285
|
+
regex
|
1286
|
+
end
|
1287
|
+
end.flatten
|
1288
|
+
|
1276
1289
|
files_to_delete, folders_to_delete, job_id = @datafiles.delete_multiple_files(@commit, regex_list)
|
1277
1290
|
log_message("Deleting #{files_to_delete} files and #{folders_to_delete} folders", Thor::Shell::Color::GREEN)
|
1278
1291
|
|
@@ -3598,20 +3611,35 @@ module Cnvrg
|
|
3598
3611
|
exit_status = 0
|
3599
3612
|
|
3600
3613
|
if options['wait']
|
3614
|
+
end_pos = 0
|
3601
3615
|
while true
|
3602
3616
|
tries = 0
|
3603
3617
|
begin
|
3604
3618
|
result = Cnvrg::API_V2.request(
|
3605
|
-
"#{project.owner}/projects/#{project.slug}/experiments/#{res["result"]["exp_url"]}/
|
3619
|
+
"#{project.owner}/projects/#{project.slug}/experiments/#{res["result"]["exp_url"]}/info",
|
3620
|
+
'GET',
|
3621
|
+
{ exit_status: true, grid: res["result"]["grid"], pos: end_pos }
|
3606
3622
|
)
|
3607
|
-
|
3608
|
-
|
3609
|
-
|
3610
|
-
|
3611
|
-
|
3612
|
-
|
3613
|
-
|
3623
|
+
|
3624
|
+
exit_statuses = result.values.pluck('exit_status')
|
3625
|
+
if exit_statuses.include? nil
|
3626
|
+
if res["result"]["grid"]
|
3627
|
+
system("clear") || system("cls")
|
3628
|
+
msg = "#{Time.current}: waiting for all experiments to finish"
|
3629
|
+
puts msg
|
3630
|
+
else
|
3631
|
+
end_pos = result[res['result']['exp_url']]['end_pos']
|
3632
|
+
logs = result[res['result']['exp_url']]['logs']
|
3633
|
+
logs.each do |log|
|
3634
|
+
puts log['message']
|
3635
|
+
end
|
3636
|
+
end
|
3614
3637
|
sleep 3
|
3638
|
+
else
|
3639
|
+
result.each do |slug, value|
|
3640
|
+
puts "Experiment #{slug} was exited with status #{value['exit_status']}"
|
3641
|
+
end
|
3642
|
+
break
|
3615
3643
|
end
|
3616
3644
|
rescue => e
|
3617
3645
|
log_error(e)
|
@@ -4849,17 +4877,19 @@ module Cnvrg
|
|
4849
4877
|
exp_name = exp["title"]
|
4850
4878
|
if exp["end_commit"].present? and exp["status"] != "Ongoing"
|
4851
4879
|
log_message("#{exp_name} has ended, getting files from end commit", Thor::Shell::Color::BLUE)
|
4852
|
-
Cnvrg::Helpers.get_experiment_events_log_from_server(exp, @project)
|
4880
|
+
num_of_new_files = Cnvrg::Helpers.get_experiment_events_log_from_server(exp, @project)
|
4853
4881
|
exps_map[exp_slug] = exp
|
4854
4882
|
else
|
4855
4883
|
log_message("#{exp_name} is running should get logs", Thor::Shell::Color::BLUE)
|
4856
|
-
success = Cnvrg::Helpers.get_experiment_events_log_via_kubectl(exp, namespace)
|
4884
|
+
success, num_of_new_files = Cnvrg::Helpers.get_experiment_events_log_via_kubectl(exp, namespace)
|
4857
4885
|
if !success and exp["last_successful_commit"].present? and !copied_commits.include?(exp["last_successful_commit"])
|
4858
4886
|
log_message("Failed to get kube files, using last commit", Thor::Shell::Color::BLUE)
|
4859
|
-
Cnvrg::Helpers.get_experiment_events_log_from_server(exp, @project, commit: exp["last_successful_commit"])
|
4887
|
+
num_of_new_files = Cnvrg::Helpers.get_experiment_events_log_from_server(exp, @project, commit: exp["last_successful_commit"])
|
4860
4888
|
copied_commits << exp["last_successful_commit"]
|
4861
4889
|
end
|
4862
4890
|
end
|
4891
|
+
|
4892
|
+
log_message("New tf files copied", Thor::Shell::Color::BLUE) if num_of_new_files > 0
|
4863
4893
|
rescue => e
|
4864
4894
|
Cnvrg::Logger.log_error(e)
|
4865
4895
|
end
|
@@ -5273,8 +5303,8 @@ module Cnvrg
|
|
5273
5303
|
count += 1
|
5274
5304
|
end
|
5275
5305
|
if File.exist? logfile_old
|
5276
|
-
|
5277
|
-
|
5306
|
+
#@files = Cnvrg::Files.new(Cnvrg::CLI.get_owner, "")
|
5307
|
+
#@files.upload_log_file(logfile_old, "log_#{date}.log", yesterday)
|
5278
5308
|
FileUtils.remove logfile_old
|
5279
5309
|
end
|
5280
5310
|
|
data/lib/cnvrg/data.rb
CHANGED
@@ -214,7 +214,7 @@ module Cnvrg
|
|
214
214
|
|
215
215
|
desc 'data put DATASET_URL FILES_PREFIX', 'Upload selected files from local dataset directory to remote server'
|
216
216
|
method_option :dir, :type => :string, :aliases => ["-d", "--dir"], :default => ''
|
217
|
-
method_option :commit, :type => :string, :aliases => ["-c", "--commit"], :default => ''
|
217
|
+
# method_option :commit, :type => :string, :aliases => ["-c", "--commit"], :default => ''
|
218
218
|
method_option :force, :type => :boolean, :aliases => ["-f","--force"], :default => false
|
219
219
|
method_option :override, :type => :boolean, :aliases => ["--override"], :default => false
|
220
220
|
method_option :threads, :type => :numeric, :aliases => ["-t","--threads"], :default => 15
|
@@ -226,20 +226,21 @@ module Cnvrg
|
|
226
226
|
dir = options[:dir]
|
227
227
|
force = options[:force]
|
228
228
|
override = options[:override]
|
229
|
-
commit = options[:commit]
|
229
|
+
# commit = options[:commit]
|
230
|
+
commit = ''
|
230
231
|
message = options[:message]
|
231
232
|
threads = options[:threads]
|
232
233
|
chunk_size = options[:chunk_size]
|
233
234
|
cli.data_put(
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
235
|
+
dataset_url,
|
236
|
+
files: files,
|
237
|
+
dir: dir,
|
238
|
+
commit: commit,
|
239
|
+
force: force,
|
240
|
+
override: override,
|
241
|
+
threads: threads,
|
242
|
+
chunk_size: chunk_size,
|
243
|
+
message: message
|
243
244
|
)
|
244
245
|
end
|
245
246
|
|
data/lib/cnvrg/datafiles.rb
CHANGED
@@ -376,16 +376,16 @@ module Cnvrg
|
|
376
376
|
|
377
377
|
if uploaded_files.size == chunk_size or progressbar.finished?
|
378
378
|
refresh_storage_token
|
379
|
-
|
380
|
-
# puts "files: #{file_queue.length}"
|
379
|
+
Cnvrg::Logger.info("Finished upload chunk of #{chunk_size} files")
|
381
380
|
Cnvrg::Logger.info("Sending Upload files save")
|
382
381
|
blob_ids = uploaded_files.map {|f| f['bv_id']}
|
382
|
+
Cnvrg::Logger.info("Sending chunk to server")
|
383
383
|
upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids, dirs: dirs})
|
384
384
|
unless Cnvrg::CLI.is_response_success(upload_resp, false)
|
385
385
|
Cnvrg::Logger.log_method(bind: binding)
|
386
386
|
raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
|
387
387
|
end
|
388
|
-
|
388
|
+
Cnvrg::Logger.info("Chunk saved on server")
|
389
389
|
uploaded_files = []
|
390
390
|
dirs = []
|
391
391
|
end
|
@@ -394,7 +394,7 @@ module Cnvrg
|
|
394
394
|
end
|
395
395
|
|
396
396
|
if progressbar.finished?
|
397
|
-
|
397
|
+
Cnvrg::Logger.info("Progress bar finished closing queues")
|
398
398
|
file_queue.close()
|
399
399
|
progress_queue.close()
|
400
400
|
Thread.exit
|
@@ -405,12 +405,11 @@ module Cnvrg
|
|
405
405
|
file_chunks = files.each_slice(chunk_size).to_a
|
406
406
|
# Fetch the required files from the server:
|
407
407
|
Parallel.map((file_chunks), in_threads: 10) do |files_chunk|
|
408
|
-
|
408
|
+
Cnvrg::Logger.info("Generating chunk idx")
|
409
409
|
tree = @dataset.generate_chunked_idx(files_chunk, prefix: prefix, threads: threads)
|
410
|
+
Cnvrg::Logger.info("Getting files info from server")
|
410
411
|
results = request_upload_files(commit_sha1, tree, override, new_branch, partial_commit)
|
411
412
|
|
412
|
-
# puts "Got #{results['files'].size} files to upload from #{files_chunk.size} files"
|
413
|
-
|
414
413
|
if results['files'].blank?
|
415
414
|
progress_mutex.synchronize { progressbar.progress += tree.keys.length }
|
416
415
|
next
|
@@ -432,7 +431,7 @@ module Cnvrg
|
|
432
431
|
file_queue.push tree[key].merge(files_to_upload[key])
|
433
432
|
end
|
434
433
|
end
|
435
|
-
|
434
|
+
Cnvrg::Logger.info("Finishing sub processes of datasets' upload")
|
436
435
|
progress_thread.join()
|
437
436
|
worker_threads.each(&:join)
|
438
437
|
|
data/lib/cnvrg/dataset.rb
CHANGED
@@ -544,8 +544,12 @@ module Cnvrg
|
|
544
544
|
def generate_chunked_idx(list_files = [], threads: 15, prefix: '')
|
545
545
|
tree = {}
|
546
546
|
Parallel.map(list_files, in_threads: threads) do |file|
|
547
|
-
|
548
|
-
|
547
|
+
|
548
|
+
# Fix for root path issue
|
549
|
+
safe_path = file
|
550
|
+
safe_path = file[1..-1] if file.start_with? "/"
|
551
|
+
|
552
|
+
label = safe_path.gsub(self.local_path + "/", "")
|
549
553
|
label = "#{prefix}/#{label}" if prefix.present?
|
550
554
|
if not Cnvrg::Files.valid_file_name?(label)
|
551
555
|
raise StandardError.new("#{label} is not a valid file name.")
|
@@ -558,9 +562,16 @@ module Cnvrg
|
|
558
562
|
file_size = File.size(file).to_f
|
559
563
|
mime_type = MimeMagic.by_path(file)
|
560
564
|
content_type = !(mime_type.nil? or mime_type.text?) ? mime_type.type : "text/plain"
|
561
|
-
relative_path =
|
565
|
+
relative_path = safe_path.gsub(/^#{@local_path + "/"}/, "")
|
562
566
|
relative_path = "#{prefix}/#{relative_path}" if prefix.present?
|
563
|
-
tree[label] = {
|
567
|
+
tree[label] = {
|
568
|
+
sha1: sha1,
|
569
|
+
file_name: file_name,
|
570
|
+
file_size: file_size,
|
571
|
+
content_type: content_type,
|
572
|
+
absolute_path: file,
|
573
|
+
relative_path: relative_path
|
574
|
+
}
|
564
575
|
end
|
565
576
|
end
|
566
577
|
if prefix.present? #add the prefix as dirs to the files
|
@@ -873,13 +884,8 @@ module Cnvrg
|
|
873
884
|
end
|
874
885
|
|
875
886
|
def self.stop_if_dataset_present(dataset_home, dataset_name, commit: nil)
|
876
|
-
|
877
887
|
cli = Cnvrg::CLI.new()
|
878
888
|
config = YAML.load_file(dataset_home + "/.cnvrg/config.yml")
|
879
|
-
if commit.present?
|
880
|
-
local_commit = YAML.load_file(dataset_home + "/.cnvrg/idx.yml")[:commit] rescue nil
|
881
|
-
return if commit != local_commit or local_commit.blank?
|
882
|
-
end
|
883
889
|
if config[:dataset_name] == dataset_name
|
884
890
|
cli.log_message("Dataset already present, clone aborted")
|
885
891
|
exit(0)
|
@@ -887,6 +893,5 @@ module Cnvrg
|
|
887
893
|
rescue => e
|
888
894
|
nil
|
889
895
|
end
|
890
|
-
|
891
896
|
end
|
892
897
|
end
|
data/lib/cnvrg/helpers.rb
CHANGED
@@ -364,11 +364,14 @@ parameters:
|
|
364
364
|
@files.download_files(files, commit_sha1, progress: nil)
|
365
365
|
FileUtils.rm_rf("#{dest_dir}")
|
366
366
|
FileUtils.mkdir_p(dest_dir)
|
367
|
+
num_of_new_files = 0
|
367
368
|
files.each do |f|
|
368
369
|
file_dir = "#{dest_dir}/#{File.dirname(f)}"
|
369
370
|
FileUtils.mkdir_p(file_dir)
|
371
|
+
num_of_new_files += 1 unless File.exist?("#{dest_dir}/#{f}")
|
370
372
|
FileUtils.mv(f, "#{dest_dir}/#{f}")
|
371
373
|
end
|
374
|
+
return num_of_new_files
|
372
375
|
end
|
373
376
|
|
374
377
|
def get_experiment_events_log_via_kubectl(exp, namespace)
|
@@ -376,7 +379,7 @@ parameters:
|
|
376
379
|
result = `kubectl -n #{namespace} get pods | grep #{exp["slug"]}`
|
377
380
|
|
378
381
|
pod_name = result.split(" ")[0]
|
379
|
-
return false if pod_name.blank?
|
382
|
+
return false, 0 if pod_name.blank?
|
380
383
|
FileUtils.mkdir_p(dest_dir)
|
381
384
|
working_dir = `kubectl -n #{namespace} exec #{pod_name} -c agent -- pwd`
|
382
385
|
working_dir.strip!
|
@@ -399,16 +402,18 @@ parameters:
|
|
399
402
|
end
|
400
403
|
end
|
401
404
|
|
405
|
+
num_of_new_files = 0
|
402
406
|
all_files.each do |file|
|
403
407
|
file_dir = "#{dest_dir}/#{File.dirname(file)}"
|
404
408
|
FileUtils.mkdir_p(file_dir)
|
409
|
+
num_of_new_files += 1 unless File.exist?("#{dest_dir}/#{file}")
|
405
410
|
res = `kubectl -n #{namespace} cp #{pod_name}:#{file} -c agent #{dest_dir}/#{file}`
|
406
411
|
end
|
407
412
|
|
408
|
-
return true
|
413
|
+
return true, num_of_new_files
|
409
414
|
rescue => e
|
410
415
|
Cnvrg::Logger.log_error(e)
|
411
|
-
return false
|
416
|
+
return false, 0
|
412
417
|
end
|
413
418
|
end
|
414
419
|
|
data/lib/cnvrg/project.rb
CHANGED
@@ -421,7 +421,7 @@ module Cnvrg
|
|
421
421
|
|
422
422
|
def generate_idx(deploy: false, files: [])
|
423
423
|
if File.exists? "#{self.local_path}/.cnvrg/idx.yml"
|
424
|
-
old_idx = YAML.load_file("#{self.local_path}/.cnvrg/idx.yml")
|
424
|
+
old_idx = YAML.load_file("#{self.local_path}/.cnvrg/idx.yml") rescue {:tree => {}, :commit => nil}
|
425
425
|
else
|
426
426
|
old_idx = {:tree => {}, :commit => nil}
|
427
427
|
end
|
data/lib/cnvrg/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: cnvrg
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.10.
|
4
|
+
version: 1.10.15
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Yochay Ettun
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date: 2020-08
|
13
|
+
date: 2020-09-08 00:00:00.000000000 Z
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
16
16
|
name: bundler
|