cnvrg 0.0.149 → 0.0.1410

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 89c404e15ae2c15390c06632e16fdb46725a7dab
4
- data.tar.gz: 28435401db7d5d2a78ba88de8946513ffc86dca5
3
+ metadata.gz: 7fc2d5ed72c306c741afc72e0427f6a9a713a71e
4
+ data.tar.gz: 12c7dbc1a35c33aa1a090a84056b700be07f52b4
5
5
  SHA512:
6
- metadata.gz: 5442ff1a286b258d5624ba1bc00d7944423a99b0b74128cd1b90dffd24511e52e9609a7119ba2d24c4bec348927f7a637fb669b7f961d780f6070103e8cb8e20
7
- data.tar.gz: 678610c32a2104cab60cec8bcab410cc5d76372079b99044776e444cc7713f206cb10b87521425459e3a8c53937032665a795fd36adfa48a58c9227a1963b998
6
+ metadata.gz: 9cdb426120acfb6b29ca96895a98adb57035cf8ae32bd077ffd71a69f1154bf67703f615b1aee86e0053eb4c3e080db8c49653d9bf9bf6cdada8800d5fe4d2b8
7
+ data.tar.gz: 07b4ddfc04e5b66890aec5a95fb7a18d100e62d6f14c7e9dbaebe1d8f6c3a2f34e24df4a5c99973f7546cf2aa9c4a3d17822813f68b84939baf90a39562c1647
@@ -584,8 +584,6 @@ module Cnvrg
584
584
  path = Dir.pwd
585
585
  @dataset = Dataset.new(path)
586
586
 
587
- @dataset.generate_idx()
588
-
589
587
  url = @dataset.url
590
588
  check = Helpers.checkmark
591
589
  say "#{check} Link finished successfully", Thor::Shell::Color::GREEN
@@ -663,25 +661,11 @@ module Cnvrg
663
661
  end
664
662
 
665
663
  desc 'init_data_container', 'Init dataset directory', :hide => true
666
- method_option :container, :type => :string, :aliases => ["--c"], :default => ""
667
664
  method_option :login_content, :type => :string, :aliases => ["--l"], :default => ""
668
- method_option :owner, :type => :string, :aliases => ["--o"], :default => ""
669
- method_option :dataset_slug, :type => :string, :aliases => ["--ds"], :default => ""
670
- method_option :dataset_name, :type => :string, :aliases => ["--dn"], :default => ""
671
- method_option :sha1, :type => :string, :aliases => ["--s"], :default => ""
672
- method_option :username, :type => :string, :aliases => ["--u"], :default => ""
673
- method_option :api_url, :type => :string, :aliases => ["--a"], :default => ""
674
-
675
- def init_data_container()
665
+
666
+ def init_data_container(container)
676
667
  begin
677
- container = options["container"]
678
668
  login_content = options["login_content"]
679
- dataset_slug = options["dataset_slug"]
680
- owner = options["owner"]
681
- dataset_name = options["dataset_name"]
682
- sha1 = options["sha1"]
683
- username = options["username"]
684
- api_url = options["api_url"]
685
669
 
686
670
  container = Docker::Container.get(container)
687
671
  command = ["/bin/bash", "-lc", "sudo echo -e \"#{login_content}\" >/home/ds/.netrc"]
@@ -694,18 +678,6 @@ module Cnvrg
694
678
  container.exec(command, tty: true)
695
679
  command = ["/bin/bash", "-lc", "sudo chmod 0600 /home/ds/.netrc"]
696
680
  container.exec(command, tty: true)
697
- command = ["/bin/bash", "-lc", "cnvrg set_remote_api_url #{owner} #{username} #{api_url}"]
698
- container.exec(command, tty: true)
699
- command = ["/bin/bash", "-lc", "mkdir -p /home/ds/notebooks/data/.cnvrg"]
700
- container.exec(command, tty: true)
701
- command = ["/bin/bash", "-lc", "touch /home/ds/notebooks/data/.cnvrg/config.yml"]
702
- container.exec(command, tty: true)
703
- command = ["/bin/bash", "-lc", "touch /home/ds/notebooks/data/.cnvrgignore"]
704
- container.exec(command, tty: true)
705
-
706
- command = ["/bin/bash", "-lc", "cnvrg data_init_container #{owner} #{dataset_slug} #{dataset_name}"]
707
- container.exec(command, tty: true)
708
-
709
681
 
710
682
  rescue SignalException
711
683
  log_end(-1)
@@ -736,12 +708,10 @@ module Cnvrg
736
708
  exit(1)
737
709
  end
738
710
  end
711
+
739
712
  desc 'data_snap', 'Init dataset directory', :hide => true
740
713
 
741
- def data_init_container(owner,dataset_slug,dataset_name)
742
- puts owner
743
- puts dataset_name
744
- puts dataset_slug
714
+ def data_init_container(owner, dataset_slug, dataset_name)
745
715
 
746
716
  if Dataset.init_container(owner, dataset_slug, dataset_name)
747
717
 
@@ -1050,16 +1020,19 @@ module Cnvrg
1050
1020
 
1051
1021
  @files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug)
1052
1022
  if !@dataset.update_ignore_list(ignore)
1053
- say "Couldn't append new ignore files to .cnvrgignore", Thor::Shell::Color::YELLOW
1023
+ say "Couldn't append new ignore files to .cnvrgignore", Thor::Shell::Color::RED
1024
+ exit(1)
1054
1025
  end
1055
- result = @dataset.compare_idx(false)
1026
+ say "Checking dataset", Thor::Shell::Color::BLUE
1027
+ local_idx = @dataset.generate_idx
1028
+ result = @dataset.compare_idx(false, commit=@dataset.last_local_commit, local_idx= local_idx)
1056
1029
 
1057
1030
 
1058
1031
  commit = result["result"]["commit"]
1059
1032
  if commit != @dataset.last_local_commit and !@dataset.last_local_commit.nil? and !result["result"]["tree"]["updated_on_server"].empty?
1060
1033
  log_end(0)
1061
1034
 
1062
- say "Remote server has an updated version, please run `cnvrg download` first, or alternatively: `cnvrg sync`", Thor::Shell::Color::YELLOW
1035
+ say "Remote server has an updated version, please run `cnvrg data download` first", Thor::Shell::Color::YELLOW
1063
1036
  exit(1)
1064
1037
  end
1065
1038
 
@@ -1076,16 +1049,6 @@ module Cnvrg
1076
1049
  update_total = result["added"].size + result["updated_on_local"].size + result["deleted"].size
1077
1050
  successful_updates = []
1078
1051
  successful_deletions = []
1079
- if verbose
1080
- if update_total == 1
1081
- say "Updating #{update_total} file", Thor::Shell::Color::BLUE
1082
- else
1083
- say "Updating #{update_total} files", Thor::Shell::Color::BLUE
1084
- end
1085
- else
1086
- say "Syncing files", Thor::Shell::Color::BLUE unless sync
1087
-
1088
- end
1089
1052
 
1090
1053
  # Start commit
1091
1054
  res = @files.start_commit(false)["result"]
@@ -1108,19 +1071,51 @@ module Cnvrg
1108
1071
  @dataset.update_idx_with_files_commits!((successful_deletions+successful_updates), commit_time)
1109
1072
 
1110
1073
  @dataset.update_idx_with_commit!(commit_sha1)
1074
+ say "Compressing data", Thor::Shell::Color::BLUE
1111
1075
 
1112
1076
  home_dir = File.expand_path('~')
1113
1077
  tar_path = "#{home_dir}/.cnvrg/tmp/#{@dataset.slug}_#{commit_sha1}.tar.gz"
1114
- tar_files_path = "#{home_dir}/.cnvrg/tmp/#{@dataset.slug}_#{commit_sha1}.txt"
1078
+ tar_files_path = "#{home_dir}/.cnvrg/tmp/#{@dataset.slug}_#{commit_sha1}.txt"
1115
1079
  tar_files = (result["added"] + result["updated_on_local"] +["\n"]).join("\n")
1116
- File.open(tar_files_path, 'w') {|f| f.write tar_files}
1117
- create_tar(dataset_dir, tar_path, tar_files_path)
1080
+ File.open(tar_files_path, 'w') { |f| f.write tar_files }
1081
+ is_tar = create_tar(dataset_dir, tar_path, tar_files_path)
1082
+ if !is_tar
1083
+ say "ERROR: Couldn't compress data", Thor::Shell::Color::RED
1084
+ FileUtils.rm_rf([tar_path]) if File.exist? tar_path
1085
+ FileUtils.rm_rf([tar_files_path]) if File.exist? tar_files_path
1086
+ exit(1)
1087
+ end
1088
+ say "Uploading data", Thor::Shell::Color::BLUE
1089
+
1118
1090
  res = @files.upload_tar_file(tar_path, tar_path, commit_sha1)
1119
- cur_idx = @dataset.get_idx.to_h
1120
- res = @files.end_commit_tar(commit_sha1, cur_idx)
1091
+
1092
+ if res
1093
+ cur_idx = @dataset.get_idx.to_h
1094
+
1095
+ res = @files.end_commit_tar(commit_sha1, cur_idx)
1096
+ if !Cnvrg::CLI.is_response_success(res, false)
1097
+ FileUtils.rm_rf([tar_files_path]) if File.exist? tar_files_path
1098
+ FileUtils.rm_rf([tar_path]) if File.exist? tar_path
1099
+
1100
+
1101
+ @files.rollback_commit(commit_sha1)
1102
+ say "Rolling Back all changes.", Thor::Shell::Color::RED
1103
+ exit(1)
1104
+ end
1105
+
1106
+ else
1107
+ FileUtils.rm_rf([tar_files_path]) if File.exist? tar_files_path
1108
+ FileUtils.rm_rf([tar_path]) if File.exist? tar_path
1109
+
1110
+
1111
+ @files.rollback_commit(commit_sha1)
1112
+ say "Rolling Back all changes.", Thor::Shell::Color::RED
1113
+ exit(1)
1114
+ end
1115
+
1121
1116
 
1122
1117
  # delete
1123
- FileUtils.rm_rf([tar_path,tar_files_path])
1118
+ FileUtils.rm_rf([tar_path, tar_files_path])
1124
1119
 
1125
1120
  rescue SignalException
1126
1121
  log_end(-1)
@@ -1162,7 +1157,8 @@ module Cnvrg
1162
1157
 
1163
1158
 
1164
1159
  rescue => e
1165
- log_end(-1)
1160
+ log_end(-1, e.message)
1161
+
1166
1162
 
1167
1163
  say "Error occurd, \nAborting", Thor::Shell::Color::RED
1168
1164
  @files.rollback_commit(commit_sha1)
@@ -1195,11 +1191,15 @@ module Cnvrg
1195
1191
  desc 'list data', 'List all dataset you currently have'
1196
1192
 
1197
1193
  def list_dataset
1198
- verify_logged_in(true)
1194
+ verify_logged_in(false)
1199
1195
  log_start(__method__, args, options)
1200
1196
  dataset_dir = is_cnvrg_dir(Dir.pwd)
1201
1197
  @dataset = Dataset.new(dataset_dir)
1202
- result = @dataset.list()
1198
+ owner = @dataset.owner
1199
+ if owner.nil? or owner.empty?
1200
+ owner = CLI.get_owner()
1201
+ end
1202
+ result = @dataset.list(owner)
1203
1203
  list = result["result"]["list"]
1204
1204
 
1205
1205
  print_table(list)
@@ -1757,7 +1757,7 @@ module Cnvrg
1757
1757
 
1758
1758
  log_end(0)
1759
1759
  end
1760
- rescue =>e
1760
+ rescue => e
1761
1761
  log_end(-1)
1762
1762
 
1763
1763
  say "Error occurd, \nAborting", Thor::Shell::Color::BLUE
@@ -2444,17 +2444,6 @@ module Cnvrg
2444
2444
  data_commit = options["data_commit"] || nil
2445
2445
  ignore = options[:ignore] || ""
2446
2446
 
2447
- if !data.nil? and !data.empty?
2448
- if ignore.nil? or ignore.empty?
2449
- ignore = "data"
2450
- else
2451
- ignore +=",data"
2452
- end
2453
- end
2454
- if ignore.nil? or ignore.empty?
2455
- ignore= ""
2456
- end
2457
-
2458
2447
  instance_type = options["machine_type"] || nil
2459
2448
  schedule = options["schedule"] || ""
2460
2449
  if schedule.start_with? 'in'
@@ -2483,9 +2472,9 @@ module Cnvrg
2483
2472
  upload_output_option = "--upload_output=#{upload_output}"
2484
2473
  end
2485
2474
  options_hash = Hash[options]
2486
- options_hash.except!("schedule", "machine_type", "image", "upload_output", "grid", "ignore", "data", "data_commit")
2475
+ options_hash.except!("schedule", "machine_type", "image", "upload_output", "grid", "data", "data_commit")
2487
2476
  exec_options = options_hash.map { |x| "--#{x[0]}=#{x[1]}" }.flatten.join(" ")
2488
- command = "#{exec_options} --ignore=#{ignore} #{upload_output_option} #{cmd.flatten.join(" ")}"
2477
+ command = "#{exec_options} #{upload_output_option} #{cmd.flatten.join(" ")}"
2489
2478
  commit_to_run = options["commit"] || nil
2490
2479
  if !schedule.nil? and !schedule.empty?
2491
2480
 
@@ -4355,6 +4344,7 @@ module Cnvrg
4355
4344
  return false
4356
4345
  end
4357
4346
  end
4347
+
4358
4348
  def data_dir_include()
4359
4349
  all_dirs = Dir.glob("**/*/", File::FNM_DOTMATCH)
4360
4350
  all_dirs.flatten!
@@ -52,8 +52,6 @@ module Cnvrg
52
52
  desc 'data list', 'list of datasets'
53
53
  def list()
54
54
  cli = Cnvrg::CLI.new()
55
- verbose = options["verbose"]
56
- sync = options["sync"]
57
55
 
58
56
  cli.list_dataset()
59
57
 
@@ -59,6 +59,8 @@ module Cnvrg
59
59
  blob_id: upload_resp["result"]["id"]})
60
60
  return true
61
61
  end
62
+ else
63
+ return false
62
64
  end
63
65
  return false
64
66
  end
@@ -290,7 +292,7 @@ module Cnvrg
290
292
  is_success = false
291
293
  count = 0
292
294
  while !is_success and count <3
293
- resp = `python #{tmp.path} --num-threads=128 --max-singlepart-upload-size=#{MULTIPART_SPLIT} put -f #{file_path} s3://#{URLcrypt.decrypt(upload_resp["result"]["bucket"])}/#{upload_resp["result"]["path"]+"/"+File.basename(file_path)} > /dev/null 2>&1`
295
+ resp = `python #{tmp.path} --max-singlepart-upload-size=#{MULTIPART_SPLIT} put -f #{file_path} s3://#{URLcrypt.decrypt(upload_resp["result"]["bucket"])}/#{upload_resp["result"]["path"]+"/"+File.basename(file_path)} > /dev/null 2>&1`
294
296
  is_success =$?.success?
295
297
  count +=1
296
298
 
@@ -31,8 +31,8 @@ module Cnvrg
31
31
  return response
32
32
 
33
33
  end
34
- def list
35
- response = Cnvrg::API.request("users/#{self.owner}/datasets/list", 'GET')
34
+ def list(owner)
35
+ response = Cnvrg::API.request("users/#{owner}/datasets/list", 'GET')
36
36
  CLI.is_response_success(response)
37
37
  return response
38
38
 
@@ -150,17 +150,23 @@ module Cnvrg
150
150
 
151
151
  def self.init_container(owner, dataset_slug,dataset_name)
152
152
 
153
-
154
153
  cnvrgignore = Helpers.cnvrgignore_content
155
154
  begin
156
-
155
+ list_dirs = [ ".cnvrg"
156
+ ]
157
+ list_files = [
158
+ ".cnvrgignore",
159
+ ".cnvrg/config.yml"
160
+ ]
161
+ FileUtils.mkdir_p list_dirs
162
+ FileUtils.touch list_files
157
163
 
158
164
  config = {dataset_name: dataset_name,
159
165
  dataset_slug: dataset_slug,
160
166
  owner: owner}
161
- File.open("/home/ds/notebooks/data/.cnvrg/config.yml", "w+") { |f| f.write config.to_yaml }
167
+ File.open(".cnvrg/config.yml", "w+") { |f| f.write config.to_yaml }
162
168
 
163
- File.open("/home/ds/notebooks/data/.cnvrgignore", "w+") { |f| f.write cnvrgignore } unless File.exist? ".cnvrgignore"
169
+ File.open(".cnvrgignore", "w+") { |f| f.write cnvrgignore } unless File.exist? ".cnvrgignore"
164
170
  rescue => e
165
171
  puts e
166
172
  puts e.backtrace
@@ -227,9 +233,11 @@ module Cnvrg
227
233
  CLI.is_response_success(response)
228
234
  return response
229
235
  end
230
- def compare_idx(new_branch, commit=last_local_commit)
236
+ def compare_idx(new_branch, commit=last_local_commit,local_idx=nil)
237
+ if local_idx.nil?
238
+ local_idx = self.generate_idx
239
+ end
231
240
 
232
- local_idx = self.generate_idx
233
241
  response = Cnvrg::API.request("users/#{self.owner}/datasets/#{self.slug}/status", 'POST', {idx: local_idx, new_branch: new_branch, current_commit: commit})
234
242
  CLI.is_response_success(response)
235
243
  return response
@@ -269,7 +269,7 @@ module Cnvrg
269
269
  is_success = false
270
270
  count = 0
271
271
  while !is_success and count <3
272
- resp = `python #{tmp.path} --num-threads=128 --max-singlepart-upload-size=#{MULTIPART_SPLIT} put -f #{file_path} s3://#{URLcrypt.decrypt(upload_resp["result"]["bucket"])}/#{upload_resp["result"]["path"]+"/"+File.basename(file_path)} > /dev/null 2>&1`
272
+ resp = `python #{tmp.path} --max-singlepart-upload-size=#{MULTIPART_SPLIT} put -f #{file_path} s3://#{URLcrypt.decrypt(upload_resp["result"]["bucket"])}/#{upload_resp["result"]["path"]+"/"+File.basename(file_path)} > /dev/null 2>&1`
273
273
  is_success =$?.success?
274
274
  count +=1
275
275
 
@@ -1,4 +1,4 @@
1
1
  module Cnvrg
2
- VERSION = '0.0.149'
2
+ VERSION = '0.0.1410'
3
3
  end
4
4
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: cnvrg
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.149
4
+ version: 0.0.1410
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yochay Ettun
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2017-05-16 00:00:00.000000000 Z
12
+ date: 2017-05-21 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: bundler