cnvrg 1.6.3.1 → 1.6.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 031b5d643980f087c06ded03bb010a85b54127fdf4a95bcc90cd8d75211bb7a0
4
- data.tar.gz: 8c41307f0535e9ce926ee272f2806e9fb2444a09ddc3aeb55cf2eb5265016216
3
+ metadata.gz: cd6065b7fe98eebf485fe2813928c509d9f23700c0e6e34f6cbf880f6717e39b
4
+ data.tar.gz: 9bc650433cd77fba6de2edd9b15ad380c9302dc9eb250c35ffa7b2e3c9668e3f
5
5
  SHA512:
6
- metadata.gz: bf9afdf0369891dadd4c6dc1bdcd22883d688fafe68be8ca70927fe521576980eb3563a4d57eab9c5aa7f6d8d16d619a7631f8e8d8da513901b47eb72d95ae8c
7
- data.tar.gz: 61ef3ef8107e2c7589ccc17b7e434e4cdde997490b17adb6d2bf32e9876d9f8bdb039c6f269cc391e6cce2d16ce9adce052deded77880b564a4bd7098031f48f
6
+ metadata.gz: a175f7aaeda31208f381beeefd924bad22e42421d3da023e14e6468460cbd9b3a0fc36602fd4113f7e023ee81877c9cc9f0e659d18e539a774fcc5dc292038aa
7
+ data.tar.gz: 3dfcbf2f22735ad612cea0974ba75709742371737fc3bfd8a6c6b84c79acdcca30e736b21e39cd397b7394f59891cb649f30ac58ab3ae1be426e1c9a376aa1e8
@@ -31,7 +31,7 @@ Gem::Specification.new do |spec|
31
31
  spec.add_runtime_dependency 'open4', '~> 1.3', '>= 1.3.4'
32
32
  spec.add_runtime_dependency 'highline', '~> 1.7', '>= 1.7.8'
33
33
  spec.add_runtime_dependency 'thor', '~> 0.19.0','>=0.19.1'
34
- spec.add_runtime_dependency 'aws-sdk', '~> 2.11.417'
34
+ spec.add_runtime_dependency 'aws-sdk', '~> 2'
35
35
  spec.add_runtime_dependency 'signet', '~> 0.11.0'
36
36
  spec.add_runtime_dependency 'google-cloud-env', '~> 1.2.1'
37
37
  spec.add_runtime_dependency 'google-cloud-core', '~> 1.3.2'
@@ -753,14 +753,11 @@ module Cnvrg
753
753
  desc 'data init', 'Init dataset directory', :hide => true
754
754
  method_option :public, :type => :boolean, :aliases => ["-p", "--public"], :default => false
755
755
 
756
- def init_data(public, bucket: nil, title: nil)
756
+ def init_data(public, bucket: nil)
757
757
  begin
758
758
  verify_logged_in(false)
759
759
  log_start(__method__, args, options)
760
760
  dataset_name = File.basename(Dir.getwd)
761
- if title.present?
762
- dataset_name = title
763
- end
764
761
  if File.directory?(Dir.getwd + "/.cnvrg")
765
762
  config = YAML.load_file("#{Dir.getwd}/.cnvrg/config.yml")
766
763
  log_message("Directory is already linked to #{config[:dataset_slug]}", Thor::Shell::Color::RED)
@@ -848,6 +845,7 @@ module Cnvrg
848
845
  begin
849
846
  verify_logged_in(false)
850
847
  log_start(__method__, args, options)
848
+ return if check_pod_restart[1] ## It means that all datasets downloaded successfully
851
849
  commit = options["commit"] || commit
852
850
  only_tree = options["only_tree"] || only_tree
853
851
  read = options["read"] || read || false
@@ -1184,7 +1182,7 @@ module Cnvrg
1184
1182
  end
1185
1183
 
1186
1184
  desc '', '', :hide => true
1187
- def data_put(dataset_url, files: [], dir: '', commit: '', chunk_size: 1000)
1185
+ def data_put(dataset_url, files: [], dir: '', chunk_size: 1000)
1188
1186
  begin
1189
1187
  verify_logged_in(false)
1190
1188
  log_start(__method__, args, options)
@@ -1203,44 +1201,31 @@ module Cnvrg
1203
1201
  end
1204
1202
  log_message("Uploading #{@files.size} files", Thor::Shell::Color::GREEN)
1205
1203
  number_of_chunks = (@files.size.to_f / chunk_size).ceil
1206
- if commit.blank?
1207
- response = @datafiles.start_commit(false, true, chunks: number_of_chunks)
1208
- unless response #means we failed in the start commit.
1209
- raise SignalException.new(1, "Cant put files into dataset, check the dataset id")
1210
- end
1211
- @commit = response['result']['commit_sha1']
1212
- elsif commit.eql? "latest"
1213
- response = @datafiles.last_valid_commit()
1214
- unless response #means we failed in the start commit.
1215
- raise SignalException.new(1, "Cant put files into commit:#{commit}, check the dataset id and commitc")
1216
- end
1217
- @commit = response['result']['sha1']
1218
- else
1219
- @commit = commit
1204
+ response = @datafiles.start_commit(false, true, chunks: number_of_chunks)
1205
+ unless response #means we failed in the start commit.
1206
+ raise SignalException.new(1, "Cant put files into server, check the dataset slug")
1220
1207
  end
1208
+ @commit = response['result']['commit_sha1']
1221
1209
  #dir shouldnt have starting or ending slash.
1222
1210
  dir = dir[0..-2] if dir.end_with? '/'
1223
1211
  dir = dir[1..-1] if dir.start_with? '/'
1224
1212
 
1213
+ progressbar = ProgressBar.create(:title => "Upload Progress",
1214
+ :progress_mark => '=',
1215
+ :format => "%b>>%i| %p%% %t",
1216
+ :starting_at => 0,
1217
+ :total => @files.size,
1218
+ :autofinish => true)
1225
1219
  @files.each_slice(chunk_size).each do |list_files|
1226
1220
  temp_tree = @dataset.generate_chunked_idx(list_files, prefix: dir)
1227
1221
  #will throw a signal exception if something goes wrong.
1228
- @datafiles.upload_multiple_files(@commit, temp_tree, force: true, prefix: dir, total: @files.size)
1222
+ @datafiles.upload_multiple_files(@commit, temp_tree, force: true, progressbar: progressbar, prefix: dir)
1229
1223
  end
1230
- if commit.blank?
1231
- res = @datafiles.put_commit(@commit)
1232
- unless res.is_success?
1233
- raise SignalException.new(1, res.msg)
1234
- end
1235
- else
1236
- res = @datafiles.end_commit(@commit,false, success: true )
1237
- msg = res['result']
1238
- response = Cnvrg::Result.new(Cnvrg::CLI.is_response_success(res, true), msg)
1239
- unless response.is_success?
1240
- raise SignalException.new(1, res.msg)
1241
- end
1224
+ res = @datafiles.put_commit(@commit)
1225
+ unless res.is_success?
1226
+ raise SignalException.new(1, res.msg)
1242
1227
  end
1243
- log_message("Uploading files finished Successfully", Thor::Shell::Color::GREEN)
1228
+ log_message("Upload finished Successfully", Thor::Shell::Color::GREEN)
1244
1229
  rescue SignalException => e
1245
1230
  log_message(e.message, Thor::Shell::Color::RED)
1246
1231
  return false
@@ -1796,6 +1781,7 @@ module Cnvrg
1796
1781
  begin
1797
1782
  verify_logged_in(false)
1798
1783
  log_start(__method__, args, options)
1784
+ return if check_pod_restart[0] ## It means that project downloaded successfully
1799
1785
  url_parts = project_url.split("/")
1800
1786
  project_index = Cnvrg::Helpers.look_for_in_path(project_url, "projects")
1801
1787
  slug = url_parts[project_index + 1]
@@ -1964,11 +1950,12 @@ module Cnvrg
1964
1950
  log_message('Syncing dataset', Thor::Shell::Color::BLUE, !options["verbose"])
1965
1951
  if !force and !init
1966
1952
  # w(verbose=false, new_branch=false,sync=false, commit=nil,all_files=true)
1967
- total_deleted, total_downloaded = invoke :download_data_new,[verbose, new_branch, true, commit, all_files], :new_branch=>new_branch, :direct=>false, :force =>force
1953
+ invoke :download_data_new,[verbose, new_branch, true, commit, all_files], :new_branch=>new_branch, :direct=>false, :force =>force
1968
1954
  end
1955
+
1969
1956
  # w(new_branch, verbose,sync,force, tags, chunk_size)
1970
- invoke :upload_data_new,[new_branch, verbose, true, force, tags, chunk_size, message:message, total_deleted: total_deleted, total_downloaded: total_downloaded],
1971
- :new_branch=>new_branch, :direct=>false, :force =>force, :sync =>true, :tags =>tags, :parallel => parallel, :message => message
1957
+ invoke :upload_data_new,[new_branch, verbose, true, force, tags, chunk_size, message:message], :new_branch=>new_branch,
1958
+ :direct=>false, :force =>force, :sync =>true, :tags =>tags, :parallel => parallel, :message => message
1972
1959
 
1973
1960
  end
1974
1961
  desc 'upload_data_new', 'upload_data_new', :hide => true
@@ -1980,47 +1967,31 @@ module Cnvrg
1980
1967
  method_option :parallel, :type => :numeric, :aliases => ["-p", "--parallel"], :desc => "uparallel upload at the same time", :default => 15
1981
1968
  method_option :message, :type => :string, :aliases => ["--message"], :desc => "create commit with message", :default => nil
1982
1969
 
1983
- def upload_data_new(new_branch, verbose, sync, force, tags, chunk_size, message:nil, total_deleted: 0, total_downloaded: 0)
1970
+ def upload_data_new(new_branch, verbose, sync, force, tags, chunk_size, message:nil)
1984
1971
  begin
1985
- commit, files_list = invoke :start_commit_data,[], :new_branch=> new_branch, :direct=>false, :force =>force, :chunk_size => chunk_size, :message => message
1986
- files_to_upload, upload_errors = invoke :upload_data_files,[commit, files_list: files_list],:new_branch=>new_branch, :verbose =>verbose, :force =>force, :sync =>sync, :chunk_size => chunk_size
1987
-
1988
- upload_size = files_to_upload + upload_errors.try(:size) rescue 0
1989
- invoke :end_commit_data,[commit, success: true, uploaded_files: files_to_upload, sync: sync], :new_branch=>new_branch, :force =>force
1990
- if tags
1991
- log_message('Uploading Tags', Thor::Shell::Color::BLUE)
1992
- dataset_dir = is_cnvrg_dir(Dir.pwd)
1993
- @dataset = Dataset.new(dataset_dir)
1994
- begin
1995
- tag_file = File.open(options[:tags], "r+")
1996
- status = @dataset.upload_tags_via_yml(tag_file)
1997
- rescue
1998
- log_message('Tags file not found', Thor::Shell::Color::RED)
1999
- return
2000
- end
2001
- if status
2002
- log_message('Tags are successfully uploaded', Thor::Shell::Color::GREEN)
2003
- else
2004
- log_message('There was some error in uploading Tags', Thor::Shell::Color::RED)
2005
- end
2006
- end
2007
- if total_deleted > 0
2008
- log_message("#{total_deleted} files deleted successfully.", Thor::Shell::Color::GREEN)
2009
- end
2010
-
2011
- if total_downloaded > 0
2012
- log_message("#{total_downloaded} files downloaded successfully.", Thor::Shell::Color::GREEN)
2013
- end
2014
- if upload_size > 0
2015
- log_message("#{files_to_upload}/#{upload_size} files uploaded successfully.", Thor::Shell::Color::GREEN)
1972
+ commit = invoke :start_commit_data,[], :new_branch=> new_branch, :direct=>false, :force =>force, :chunk_size => chunk_size, :message => message
1973
+ upload_res = invoke :upload_data_files,[commit],:new_branch=>new_branch, :verbose =>verbose, :force =>force, :sync =>sync, :chunk_size => chunk_size
1974
+ if upload_res < 0
1975
+ return
1976
+ end
1977
+ invoke :end_commit_data,[commit, success: true, uploaded_files: upload_res], :new_branch=>new_branch, :force =>force
1978
+ if tags
1979
+ log_message('Uploading Tags', Thor::Shell::Color::BLUE)
1980
+ dataset_dir = is_cnvrg_dir(Dir.pwd)
1981
+ @dataset = Dataset.new(dataset_dir)
1982
+ begin
1983
+ tag_file = File.open(options[:tags], "r+")
1984
+ status = @dataset.upload_tags_via_yml(tag_file)
1985
+ rescue
1986
+ log_message('Tags file not found', Thor::Shell::Color::RED)
1987
+ return
2016
1988
  end
2017
-
2018
- if upload_errors.try(:size) > 0
2019
- log_message("#{upload_errors.try(:size)}/#{upload_size} files didn't upload:", Thor::Shell::Color::RED)
2020
- upload_errors.each do |file_hash|
2021
- log_message("#{file_hash[:absolute_path]}", Thor::Shell::Color::RED)
2022
- end
1989
+ if status
1990
+ log_message('Tags are successfully uploaded', Thor::Shell::Color::GREEN)
1991
+ else
1992
+ log_message('There was some error in uploading Tags', Thor::Shell::Color::RED)
2023
1993
  end
1994
+ end
2024
1995
  rescue => e
2025
1996
  Cnvrg::CLI.log_message(e.message, 'red')
2026
1997
  Cnvrg::Logger.log_error(e)
@@ -2050,16 +2021,17 @@ module Cnvrg
2050
2021
  verify_logged_in(true)
2051
2022
  log_start(__method__, args, options)
2052
2023
  dataset_dir = is_cnvrg_dir(Dir.pwd)
2024
+ direct = options[:direct]
2053
2025
  new_branch = options["new_branch"] || false
2054
2026
  force = options["force"] || false
2055
2027
  chunk_size = options["chunk_size"] || false
2056
2028
  message = options["message"]
2029
+ commit_sha1 = nil
2057
2030
  @dataset = Dataset.new(dataset_dir)
2058
2031
  @dataset.backup_idx
2059
2032
  @files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
2060
2033
  next_commit = @dataset.get_next_commit #if there was a partial commit..
2061
- files_list = @dataset.list_all_files
2062
- chunks = (files_list.length.to_f / chunk_size).ceil
2034
+ chunks = (@dataset.list_all_files.length.to_f / chunk_size).ceil
2063
2035
  resp = @files.start_commit(new_branch, force, chunks: chunks, dataset: @dataset, message: message)
2064
2036
  if !resp['result']['can_commit']
2065
2037
  log_message("Cant upload files because a new version of this dataset exists, please download it or upload with --force", Thor::Shell::Color::RED)
@@ -2070,7 +2042,7 @@ module Cnvrg
2070
2042
  @dataset.set_partial_commit(next_commit)
2071
2043
  end
2072
2044
  @dataset.set_next_commit(commit_sha1)
2073
- return commit_sha1, files_list
2045
+ return commit_sha1
2074
2046
  end
2075
2047
 
2076
2048
 
@@ -2078,7 +2050,7 @@ module Cnvrg
2078
2050
  method_option :new_branch, :type => :boolean, :aliases => ["-nb"], :desc => "create new branch of commits"
2079
2051
  method_option :force, :type => :boolean, :aliases => ["-f","--force"], :default => false
2080
2052
 
2081
- def end_commit_data(commit, success: true, uploaded_files: 0, sync: false)
2053
+ def end_commit_data(commit, success: true, uploaded_files: 0)
2082
2054
  begin
2083
2055
  verify_logged_in(true)
2084
2056
  log_start(__method__, args, options)
@@ -2093,12 +2065,7 @@ module Cnvrg
2093
2065
  @dataset.revert_next_commit #removes the next commit
2094
2066
  log_message("#{check} Dataset is up to date", Thor::Shell::Color::GREEN)
2095
2067
  else
2096
- if sync
2097
- message = "#{check} Data sync finished"
2098
- else
2099
- message = "#{check} Data upload finished"
2100
- end
2101
- log_message(message, Thor::Shell::Color::GREEN)
2068
+ log_message("#{check} Data files were updated successfully", Thor::Shell::Color::GREEN)
2102
2069
  @dataset.remove_next_commit #takes the next commit and put it as current commit
2103
2070
  @dataset.set_partial_commit(nil)
2104
2071
  @dataset.backup_idx
@@ -2151,53 +2118,54 @@ module Cnvrg
2151
2118
  # method_option :tags_yml, :type => :boolean, :aliases => ["--file_tag_yml"], :default => false
2152
2119
  method_option :parallel, :type => :numeric, :aliases => ["-p", "--parallel"], :desc => "uparallel upload at the same time", :default => 15
2153
2120
 
2154
- def upload_data_files(new_commit, files_list: [])
2121
+ def upload_data_files(new_commit, *files)
2155
2122
  begin
2156
- verify_logged_in(true)
2157
- log_start(__method__, args, options)
2158
- dataset_dir = is_cnvrg_dir(Dir.pwd)
2159
- @dataset = Dataset.new(dataset_dir)
2160
- @files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
2161
- new_commit ||= @dataset.get_next_commit
2162
- partial_commit = @dataset.get_partial_commit
2163
- if new_commit.blank?
2164
- log_message("You must specify commit, run start_commit to create new commit", Thor::Shell::Color::RED)
2165
- return false
2166
- end
2167
- chunk_size = options[:chunk_size]
2168
- chunk_size = [chunk_size, 1].max
2169
- new_branch = options["new_branch"] || false
2170
- new_tree = {}
2171
- force = options["force"] || false
2172
- parallel_threads = options["parallel"] || ParallelThreads
2173
- all_files = files_list
2174
- all_files = @dataset.list_all_files if files_list.blank?
2175
- files_uploaded = 0
2176
- upload_errors = []
2177
-
2178
- all_files.each_slice(chunk_size).each do |list_files|
2179
- Cnvrg::Logger.log_info("Uploading files into #{@dataset.slug}, #{files_uploaded} files uploaded")
2180
- temp_tree = @dataset.generate_chunked_idx(list_files, threads: parallel_threads)
2181
- upload_resp, upload_error_files = @files.upload_multiple_files(new_commit, temp_tree,
2182
- threads: parallel_threads,
2183
- force: force,
2184
- new_branch: new_branch,
2185
- partial_commit: partial_commit,
2186
- total: all_files.length)
2187
-
2188
- files_uploaded += upload_resp
2189
- upload_errors += upload_error_files if upload_error_files.present?
2190
- temp_tree.each do |k, v|
2191
- new_tree[k] = (v.present?) ? {sha1: v.try(:fetch, :sha1, nil), commit_time: nil} : nil
2192
- end
2193
- end
2194
-
2195
- @dataset.write_tree(new_tree) #we dont want to re-run it every time so just on finish.
2123
+ verify_logged_in(true)
2124
+ log_start(__method__, args, options)
2125
+ dataset_dir = is_cnvrg_dir(Dir.pwd)
2126
+ @dataset = Dataset.new(dataset_dir)
2127
+ @files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
2128
+ new_commit ||= @dataset.get_next_commit
2129
+ partial_commit = @dataset.get_partial_commit
2130
+ if new_commit.blank?
2131
+ log_message("You must specify commit, run start_commit to create new commit", Thor::Shell::Color::RED)
2132
+ return false
2133
+ end
2134
+ force = options[:force] || false
2135
+ chunk_size = options[:chunk_size]
2136
+ chunk_size = [chunk_size, 1].max
2137
+ new_branch = options["new_branch"] || false
2138
+ new_tree = {}
2139
+ force = options["force"] || false
2140
+ parallel_threads = options["parallel"] || ParallelThreads
2141
+ all_files = @dataset.list_all_files
2142
+ progressbar = ProgressBar.create(:title => "Upload Progress",
2143
+ :progress_mark => '=',
2144
+ :format => "%b>>%i| %p%% %t",
2145
+ :starting_at => 0,
2146
+ :total => all_files.length,
2147
+ :autofinish => true)
2148
+ files_uploaded = 0
2149
+ all_files.each_slice(chunk_size).each do |list_files|
2150
+ Cnvrg::Logger.log_info("Uploading files into #{@dataset.slug}, #{files_uploaded} files uploaded")
2151
+ temp_tree = @dataset.generate_chunked_idx(list_files, threads: parallel_threads)
2152
+ upload_resp = @files.upload_multiple_files(new_commit, temp_tree,
2153
+ threads: parallel_threads,
2154
+ force: force,
2155
+ new_branch: new_branch,
2156
+ progressbar: progressbar,
2157
+ partial_commit: partial_commit)
2158
+ files_uploaded += upload_resp
2159
+ temp_tree.each do |k, v|
2160
+ new_tree[k] = (v.present?) ? {sha1: v.try(:fetch, :sha1, nil), commit_time: nil} : nil
2161
+ end
2162
+ end
2163
+ @dataset.write_tree(new_tree) #we dont want to re-run it every time so just on finish.
2196
2164
  rescue => e
2197
2165
  Cnvrg::Logger.log_error(e)
2198
2166
  raise e
2199
2167
  end
2200
- return files_uploaded, upload_errors.try(:flatten).try(:compact)
2168
+ return files_uploaded
2201
2169
  end
2202
2170
 
2203
2171
 
@@ -2325,9 +2293,7 @@ module Cnvrg
2325
2293
  :total => (to_upload.size + deleted.size),
2326
2294
  :autofinish => true)
2327
2295
  @files.upload_multiple_files(to_upload, commit_sha1, progress: progressbar)
2328
-
2329
2296
  @files.delete_files_from_server(deleted, commit_sha1)
2330
-
2331
2297
  progressbar.finish
2332
2298
  res = @files.end_commit(commit_sha1, force: force, message: commit_msg)
2333
2299
  unless Cnvrg::CLI.is_response_success(res, false)
@@ -2465,51 +2431,52 @@ module Cnvrg
2465
2431
  dataset_dir = is_cnvrg_dir(Dir.pwd)
2466
2432
  @dataset = Dataset.new(dataset_dir)
2467
2433
  @files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
2434
+ all_files = all_files
2435
+ @dataset.generate_idx
2468
2436
  res = @dataset.compare_idx_download(all_files: all_files, desired_commit: commit)
2469
2437
  unless CLI.is_response_success(res, false)
2470
2438
  log_message("Cant find the desired commit, please check it or try to download without it.", Thor::Shell::Color::RED)
2471
2439
  exit(1)
2472
2440
  end
2473
- result = res["result"]
2474
- tree = result["tree"]
2475
- commit = result["commit"]
2476
- update_total = [tree['added'], tree["updated_on_server"], tree["conflicts"], tree["deleted"]].compact.flatten.size
2441
+ result = res["result"]["tree"]
2442
+ commit = res["result"]["commit"]
2443
+ can_commit = res["result"]["can_commit"] || false #can commit means that our next commit is newer than the latest commit
2444
+ # so if can_commit is true it means that we have to be up-to-date with the dataset.
2445
+ update_total = [result['added'], result["updated_on_server"], result["conflicts"], result["deleted"]].compact.flatten.size
2477
2446
  successful_changes = 0
2478
- if update_total == 0
2479
- log_message("Dataset is up to date", Thor::Shell::Color::GREEN, !sync)
2480
- return 0, 0
2447
+ if update_total == 0 or can_commit
2448
+ log_message("Dataset is up to date", Thor::Shell::Color::GREEN) if !sync
2449
+ return true
2450
+ elsif options["verbose"]
2451
+ log_message("Downloading #{update_total} files", Thor::Shell::Color::BLUE)
2481
2452
  else
2482
- log_message("Downloading #{update_total} files", Thor::Shell::Color::BLUE, options["verbose"])
2483
2453
  log_message("Syncing Dataset", Thor::Shell::Color::BLUE, !sync)
2484
2454
  end
2485
- Cnvrg::Logger.log_info("Current commit: #{@dataset.last_local_commit}, destination commit: #{commit}")
2486
- Cnvrg::Logger.log_info("Compare idx res: #{tree}")
2455
+ Cnvrg::Logger.log_info("Current commit: #{@dataset.get_current_commit}, destination commit: #{commit}")
2456
+ Cnvrg::Logger.log_info("Compare idx res: #{result}")
2487
2457
  progressbar = ProgressBar.create(:title => "Download Progress",
2488
- :progress_mark => '=',
2489
- :format => "%b>>%i| %p%% %t",
2490
- :starting_at => 0,
2491
- :total => update_total,
2492
- :autofinish => true)
2458
+ :progress_mark => '=',
2459
+ :format => "%b>>%i| %p%% %t",
2460
+ :starting_at => 0,
2461
+ :total => update_total,
2462
+ :autofinish => true)
2463
+ conflicts = @files.mark_conflicts(result)
2493
2464
 
2494
- conflicts = @files.mark_conflicts(tree)
2495
2465
  log_message("Found some conflicts, check .conflict files.", Thor::Shell::Color::BLUE) if conflicts > 0
2496
- update_res = @files.download_files_in_chunks(tree["updated_on_server"], progress: progressbar) if tree["updated_on_server"].present?
2497
- added_res = @files.download_files_in_chunks(tree["added"], progress: progressbar) if tree["added"].present?
2498
- deleted = tree["deleted"].to_a
2466
+ update_res = @files.download_files_in_chunks(result["updated_on_server"], progress: progressbar) if result["updated_on_server"].present?
2467
+ added_res = @files.download_files_in_chunks(result["added"], progress: progressbar) if result["added"].present?
2468
+ # conflict_res = @files.download_files_in_chunks(result["conflicts"], conflict: true) if result["conflicts"].present?
2469
+ deleted = result["deleted"].to_a
2499
2470
  delete_res = @files.delete_commit_files_local(deleted)
2500
-
2471
+ progressbar.progress += deleted.size if progressbar.present?
2501
2472
  if !delete_res
2502
2473
  log_message("Couldn't delete #{deleted.join(" ")}", Thor::Shell::Color::RED)
2503
2474
  log_message("Couldn't download, Rolling Back all changes.", Thor::Shell::Color::RED)
2504
2475
  exit(1)
2505
2476
  end
2506
-
2507
- progressbar.progress += deleted.size if progressbar.present? and deleted.size > 0
2508
-
2509
2477
  success = (update_res.blank? or update_res.is_success?)
2510
2478
  success &= (delete_res.blank? or delete_res.is_success?)
2511
2479
  success &= (added_res.blank? or added_res.is_success?)
2512
-
2513
2480
  if success
2514
2481
  # update idx with latest commit
2515
2482
  @dataset.update_idx_with_commit!(commit)
@@ -2519,24 +2486,17 @@ module Cnvrg
2519
2486
  log_message(successful_changes.join("\n"), Thor::Shell::Color::GREEN)
2520
2487
  log_message("Total of #{successful_changes.size} / #{update_total} files.", Thor::Shell::Color::GREEN)
2521
2488
  else
2522
- log_message("#{check} Downloaded changes successfully", Thor::Shell::Color::GREEN, !sync)
2489
+ log_message("#{check} Downloaded changes successfully", Thor::Shell::Color::GREEN, ((sync or options["sync"]) ? false : true))
2523
2490
  end
2524
-
2525
- total_deleted = deleted.try(:size)
2526
- total_downloaded = tree["added"].try(:size) || 0
2527
- total_downloaded += tree["updated_on_server"].try(:size) if tree["updated_on_server"].present?
2528
-
2529
- return total_deleted, total_downloaded
2530
- else
2531
- return []
2491
+ return true
2532
2492
  end
2533
2493
  rescue SignalException => e
2534
2494
  Cnvrg::Logger.log_error(e)
2535
2495
  say "\nAborting", Thor::Shell::Color::BLUE
2536
2496
  exit(1)
2537
2497
  rescue => e
2498
+ log_message("Error occurred, \nAborting", Thor::Shell::Color::BLUE)
2538
2499
  Cnvrg::Logger.log_error(e)
2539
- log_message("Error occurred, \nAborting", Thor::Shell::Color::RED)
2540
2500
  exit(1)
2541
2501
  end
2542
2502
  end
@@ -2894,15 +2854,14 @@ module Cnvrg
2894
2854
  method_option :job_type, :type => :string, :aliases => ["-jt", "--job_type"], :default => nil
2895
2855
  method_option :files, :type => :string, :aliases => ["--files"], :default => nil
2896
2856
  method_option :output_dir, :type => :string, :aliases => ["--output_dir"], :default => nil
2897
- def sync(direct = true)
2857
+ def sync(direct = true)
2898
2858
  verify_logged_in(true) if direct
2899
2859
  @project = Project.new(get_project_home)
2900
2860
  log_start(__method__, args, options)
2901
2861
  log_message('Checking for new updates from remote version', Thor::Shell::Color::BLUE, options["verbose"])
2902
2862
  log_message('Syncing project', Thor::Shell::Color::BLUE, !options["verbose"])
2903
- job_slug = options['job_slug'] || ENV['CNVRG_JOB_ID']
2904
- job_type = options['job_type'] || ENV['CNVRG_JOB_TYPE']
2905
- is_git = ENV['CNVRG_GIT_PROJECT'] == "true" || @project.is_git
2863
+ job_slug = options['job_slug']
2864
+ job_type = options['job_type']
2906
2865
  in_exp = options["in_exp"] || (job_slug.present? and job_type.present?)
2907
2866
  in_exp = false if job_type.present? and job_type == "NotebookSession"
2908
2867
  run_download = true
@@ -2938,7 +2897,7 @@ module Cnvrg
2938
2897
  method_option :image, :type => :string, :aliases => ["--image"], :default => nil
2939
2898
  method_option :grid, :type => :string, :aliases => ["-g", "--grid"], :default => ""
2940
2899
  method_option :data, :type => :string, :aliases => ["-d", "--data"], :default => ""
2941
- method_option :datasets, :type => :string, :aliases => ["--datasets"], :desc => "'[{\"id\": \"dataset id\", \"commit\": \"commit id\", \"query\": \"query name\", \"tree_only\": true, \"use_cached\": true]'", :default => ""
2900
+ method_option :datasets, :type => :string, :aliases => ["--datasets"], :desc => "'[{\"id\": \"dataset id\", \"commit\": \"commit id\", \"query\": \"query name\", \"tree_only\": true]'", :default => ""
2942
2901
  method_option :data_commit, :type => :string, :aliases => ["--data_commit"], :default => ""
2943
2902
  method_option :ignore, :type => :string, :aliases => ["-i", "--ignore"], :desc => "ignore following files", :default => ""
2944
2903
  method_option :force, :type => :boolean, :aliases => ["-f", "--force"], :default => false
@@ -3422,6 +3381,10 @@ module Cnvrg
3422
3381
  output_dir = "output"
3423
3382
  end
3424
3383
  image = options["image"] || nil
3384
+ if image.blank?
3385
+ image = "cnvrg"
3386
+ end
3387
+
3425
3388
  forced_commit = nil
3426
3389
  if sync_before and !project.is_git
3427
3390
  if force
@@ -5523,7 +5486,7 @@ module Cnvrg
5523
5486
  end
5524
5487
  end
5525
5488
 
5526
- def self.log_message(message, type = Thor::Shell::Color::BLUE)
5489
+ def self.log_message(message, type = Thor::Shell::Color::BLUE, to_print: true)
5527
5490
  if $LOG.blank?
5528
5491
  ### handle case when $LOG is not initialized
5529
5492
  CLI.new.log_handler
@@ -5545,12 +5508,11 @@ module Cnvrg
5545
5508
  color = nil
5546
5509
  $LOG.info message: message, type: "unknown"
5547
5510
  end
5548
- say "#{color}#{message}#{Thor::Shell::Color::CLEAR}"
5511
+ say "#{color}#{message}#{Thor::Shell::Color::CLEAR}" if to_print
5549
5512
  end
5550
5513
 
5551
5514
  def log_message(message, type=Thor::Shell::Color::GREEN, to_print = true)
5552
- return if not to_print
5553
- CLI.log_message(message, type)
5515
+ CLI.log_message(message, type, to_print: to_print)
5554
5516
  end
5555
5517
 
5556
5518
  def log_error(e)
@@ -22,14 +22,11 @@ module Cnvrg
22
22
  desc "data init", "Set current directory as dataset directory"
23
23
  method_option :public, :type => :boolean, :aliases => ["-p", "--public"], :default => false
24
24
  method_option :bucket, :type => :string, :aliases => ["-b", "--bucket"], :default => ""
25
- method_option :title, :type => :string, :aliases => ["-t", "--title"], :default => ""
26
-
27
25
  def init
28
26
  cli = Cnvrg::CLI.new()
29
27
  public = options["public"]
30
28
  bucket = options["bucket"]
31
- title = options["title"]
32
- cli.init_data(public, bucket: bucket, title: title)
29
+ cli.init_data(public, bucket: bucket)
33
30
  end
34
31
 
35
32
  desc "data link DATASET_SLUG", "Set current directory as dataset directory"
@@ -185,12 +182,10 @@ module Cnvrg
185
182
 
186
183
  desc 'data put DATASET_URL FILES_PREFIX', 'Upload selected files from local dataset directory to remote server'
187
184
  method_option :dir, :type => :string, :aliases => ["-d", "--dir"], :default => ''
188
- method_option :commit, :type => :string, :aliases => ["-c", "--commit"], :default => ''
189
185
  def put(dataset_url, *files)
190
186
  cli = Cnvrg::CLI.new()
191
187
  dir = options[:dir]
192
- commit = options[:commit]
193
- cli.data_put(dataset_url, files: files, dir: dir, commit: commit)
188
+ cli.data_put(dataset_url, files: files, dir: dir)
194
189
  end
195
190
 
196
191
  desc 'data clone_query --query=QUERY_SLUG DATASET_URL', 'Clone dataset with specific query'
@@ -114,82 +114,54 @@ module Cnvrg
114
114
  end
115
115
 
116
116
 
117
- def upload_multiple_files(commit_sha1, tree, threads: ParallelThreads, force: false, new_branch: false, prefix: '', partial_commit: nil, total: nil)
117
+ def upload_multiple_files(commit_sha1, tree, threads: ParallelThreads, force: false, new_branch: false, progressbar: nil, prefix: '', partial_commit: nil)
118
118
  begin
119
119
  Cnvrg::Logger.log_info("Sending Upload Files request")
120
- error = nil
121
- upload_resp = nil
122
- 10.times do
123
- upload_resp = Cnvrg::API.request(@base_resource + "upload_files", 'POST_JSON', {commit_sha1: commit_sha1, tree: tree, force: force, is_branch: new_branch, partial_commit: partial_commit})
124
- if Cnvrg::CLI.is_response_success(upload_resp, false)
125
- error = nil
126
- break
127
- end
128
- error = upload_resp
120
+ upload_resp = Cnvrg::API.request(@base_resource + "upload_files", 'POST_JSON', {commit_sha1: commit_sha1, tree: tree, force: force, is_branch: new_branch, partial_commit: partial_commit})
121
+ unless Cnvrg::CLI.is_response_success(upload_resp, false)
129
122
  Cnvrg::Logger.log_method(bind: binding)
130
- Cnvrg::Logger.log_info("Got an error message from server, #{upload_resp.try(:fetch, "message")}, trying again")
123
+ raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
131
124
  end
132
- raise Exception.new("Can't upload data files: #{error["message"]}") if error.present?
133
-
134
125
  Cnvrg::Logger.log_info("Uploading files")
135
126
  results = upload_resp['result'].with_indifferent_access
136
-
137
127
  if results['files'].blank?
138
- return 0, []
139
- end
140
-
141
- if @temp_upload_progressbar.blank?
142
- @temp_upload_progressbar = ProgressBar.create(:title => "Upload Progress",
143
- :progress_mark => '=',
144
- :format => "%b>>%i| %p%% %t",
145
- :starting_at => 0,
146
- :total => total,
147
- :autofinish => true)
128
+ progressbar.progress += tree.keys.length if progressbar.present?
129
+ return 0
148
130
  end
149
-
150
131
  files = results['files']
151
- upload_error_files = []
152
- @temp_upload_progressbar.progress += tree.keys.length - files.length if @temp_upload_progressbar.present?
153
- Parallel.map((files.keys), in_threads: threads) do |k|
132
+ progressbar.progress += tree.keys.length - files.length if progressbar.present?
133
+ Parallel.map((files.keys), {in_threads: threads}) do |k|
154
134
  o = tree[k].merge(files[k])
155
- success = upload_single_file(o)
156
- if not success
157
- upload_error_files << {absolute_path: o[:absolute_path]}
158
- files.except!(k)
159
- tree.except!(k)
160
- Cnvrg::Logger.log_error_message("Error while upload single file #{o["path"]}")
161
- end
162
- @temp_upload_progressbar.progress += 1 if @temp_upload_progressbar.present?
135
+ upload_single_file(o)
136
+ progressbar.progress += 1 if progressbar.present?
163
137
  end
164
138
  blob_ids = files.values.map {|f| f['bv_id']}
165
- if blob_ids.present?
166
- dirs = tree.keys.select {|k| tree[k].nil?} || []
167
- Cnvrg::Logger.info("Sending Upload files save")
168
- upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids, dirs: dirs})
169
- unless Cnvrg::CLI.is_response_success(upload_resp, false)
170
- Cnvrg::Logger.log_method(bind: binding)
171
- raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
172
- end
139
+ dirs = tree.keys.select {|k| tree[k].nil?} || []
140
+ Cnvrg::Logger.info("Sending Upload files save")
141
+ upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids, dirs: dirs})
142
+ unless Cnvrg::CLI.is_response_success(upload_resp, false)
143
+ Cnvrg::Logger.log_method(bind: binding)
144
+ raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
173
145
  end
174
146
  Cnvrg::Logger.log_info("Upload Success")
175
- return files.try(:keys).try(:length), upload_error_files
147
+ return files.keys.length
176
148
  rescue => e
177
149
  Cnvrg::Logger.log_method(bind: binding)
178
150
  Cnvrg::Logger.log_error(e)
179
151
  raise e
180
152
  end
153
+
181
154
  end
182
155
 
183
156
  def upload_single_file(file)
184
157
  begin
185
- file = file.as_json
186
- Cnvrg::Logger.log_info("Uploading #{file["absolute_path"]}")
187
- @downloader.safe_upload(file["path"], file["absolute_path"])
188
- Cnvrg::Logger.log_info("#{file["absolute_path"]} uploaded.")
158
+ file = file.as_json
159
+ Cnvrg::Logger.log_info("Uploading #{file["absolute_path"]}")
160
+ @downloader.upload(file["path"], file["absolute_path"])
161
+ Cnvrg::Logger.log_info("#{file["absolute_path"]} uploaded.")
189
162
  rescue => e
190
163
  Cnvrg::Logger.log_error_message("Error while upload single file #{file["path"]}")
191
164
  Cnvrg::Logger.log_error(e)
192
- return false
193
165
  end
194
166
  end
195
167
 
@@ -830,7 +802,6 @@ module Cnvrg
830
802
  end
831
803
  return true
832
804
  end
833
-
834
805
  def delete_commit_files_local(deleted)
835
806
  begin
836
807
  FileUtils.rm_rf(deleted) unless (deleted.nil? or deleted.empty?)
@@ -838,6 +809,9 @@ module Cnvrg
838
809
  rescue => e
839
810
  return Cnvrg::Result.new(false, '')
840
811
  end
812
+
813
+ return Cnvrg::Result.new(true, '')
814
+
841
815
  end
842
816
 
843
817
  def download_dir(dataset_home, absolute_path)
@@ -882,7 +856,6 @@ module Cnvrg
882
856
  Cnvrg::CLI.is_response_success(response, true)
883
857
  return response
884
858
  end
885
-
886
859
  def start_commit(new_branch,force=false,delete_commit=nil, chunks: 0, dataset: @dataset, message:nil)
887
860
  begin
888
861
  #if we are pushing with force or to branch we dont need to send current/next commit cause we want to
@@ -890,24 +863,14 @@ module Cnvrg
890
863
  idx = (force || new_branch) ? {} : dataset.get_idx
891
864
  commit = idx[:commit]
892
865
  next_commit = idx[:next_commit]
893
- response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {dataset_slug: @dataset_slug, new_branch: new_branch,force:force,
894
- username: @owner,current_commit: commit, next_commit: next_commit, total_chunks: chunks, message: message})
895
- Cnvrg::CLI.is_response_success(response, true)
896
- return response
897
- rescue => e
898
- return false
899
- end
900
- end
901
- def last_valid_commit()
902
- begin
903
- #if we are pushing with force or to branch we dont need to send current/next commit cause we want to
904
- # create a new commit.
905
- response = Cnvrg::API.request("#{base_resource}/last_valid_commit", 'GET')
906
- Cnvrg::CLI.is_response_success(response, true)
907
- return response
866
+ response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {dataset_slug: @dataset_slug, new_branch: new_branch,force:force,
867
+ username: @owner,current_commit: commit, next_commit: next_commit, total_chunks: chunks, message: message})
868
+ Cnvrg::CLI.is_response_success(response, true)
869
+ return response
908
870
  rescue => e
909
871
  return false
910
872
  end
873
+
911
874
  end
912
875
 
913
876
  def end_commit(commit_sha1,force, success: true, uploaded_files: 0 )
@@ -103,12 +103,8 @@ module Cnvrg
103
103
 
104
104
  def backup_idx
105
105
  Cnvrg::Logger.log_info("Backup idx")
106
- if File.exists? "#{self.local_path}/.cnvrg/idx.yml"
107
- FileUtils.cp "#{self.local_path}/.cnvrg/idx.yml", "#{self.local_path}/.cnvrg/idx.yml.backup"
108
- else
109
- idx = {commit: nil, tree: {}}
110
- File.open("#{self.local_path}/.cnvrg/idx.yml.backup", 'w') {|f| f.write idx.to_yaml}
111
- end
106
+ idx = self.get_idx
107
+ File.open("#{self.local_path}/.cnvrg/idx.yml.backup", 'w') {|f| f.write idx.to_yaml}
112
108
  end
113
109
 
114
110
  def restore_idx
@@ -784,6 +780,14 @@ module Cnvrg
784
780
  File.open("#{self.local_path}/.cnvrg/idx.yml", 'w') {|f| f.write idx_hash.to_yaml}
785
781
  end
786
782
 
783
+ def get_current_commit()
784
+ if !File.exist? "#{self.local_path}/.cnvrg/idx.yml"
785
+ return nil
786
+ end
787
+ idx_hash = YAML.load_file("#{self.local_path}/.cnvrg/idx.yml")
788
+ return idx_hash[:commit]
789
+ end
790
+
787
791
  def compare_commit(commit)
788
792
  if commit.nil? or commit.empty?
789
793
  commit = last_local_commit
@@ -2,8 +2,6 @@
2
2
  module Cnvrg
3
3
  module Downloader
4
4
  OLD_SERVER_VERSION_MESSAGE = "Your server version is not relevant for this cli version please contact support for further help."
5
- MAXIMUM_BACKOFF = 64
6
- RETRIES = ENV['UPLOAD_FILE_RETRIES'].try(:to_i) || 20
7
5
  attr_accessor :bucket, :client
8
6
  class Client
9
7
  def initialize(params)
@@ -43,35 +41,6 @@ module Cnvrg
43
41
  Cnvrg::Helpers.decrypt(@key, @iv, str)
44
42
  end
45
43
 
46
- def safe_upload(storage_path, local_path)
47
- n = 1
48
- error = nil
49
- while n <= RETRIES
50
- begin
51
- self.upload(storage_path, local_path)
52
- error = nil
53
- break
54
- rescue => e
55
- backoff_time_seconds = backoff_time(n)
56
-
57
- message = "Got error: #{e.class.name} with message: #{e.message} while uploading a single file: #{local_path}, retry: #{n} of: #{RETRIES}"
58
- if n < RETRIES
59
- message += ", next retry in: #{backoff_time_seconds} seconds"
60
- else
61
- message += ", done retry, continuing to the next file"
62
- end
63
- Cnvrg::Logger.log_error_message(message)
64
-
65
- sleep backoff_time_seconds
66
-
67
- n += 1
68
- error = e
69
- end
70
- end
71
- raise error if error.present?
72
- true
73
- end
74
-
75
44
  def self.factory(params)
76
45
  params = params.as_json
77
46
  case params["storage"]
@@ -84,18 +53,6 @@ module Cnvrg
84
53
  return Cnvrg::Downloader::Clients::GcpClient.new(project_id: params["project_id"], credentials: params["credentials"], bucket_name: params["bucket_name"], sts: params["sts"])
85
54
  end
86
55
  end
87
-
88
- private
89
-
90
- def random_number_milliseconds
91
- rand(1000) / 1000.0
92
- end
93
-
94
-
95
- def backoff_time(n)
96
- return [((2**n)+random_number_milliseconds), MAXIMUM_BACKOFF].min
97
- end
98
-
99
56
  end
100
57
  end
101
58
  end
@@ -20,11 +20,7 @@ module Cnvrg
20
20
  end
21
21
 
22
22
  def upload(storage_path, local_path)
23
- begin
24
- client.create_block_blob(@container, storage_path, File.open(local_path, "rb"))
25
- rescue => e
26
- raise e
27
- end
23
+ client.create_block_blob(@container, storage_path, File.open(local_path, "rb"))
28
24
  end
29
25
 
30
26
  def fetch_files(prefix: nil, marker: nil, limit: 10000)
@@ -11,7 +11,7 @@ module Cnvrg
11
11
  @tempfile = nil
12
12
  @bucket_name = Cnvrg::Helpers.decrypt(@key, @iv, bucket_name)
13
13
  init_gcp_credentials
14
- @storage = Google::Cloud::Storage.new(project_id: @project_id, credentials: @credentials, retries: 20)
14
+ @storage = Google::Cloud::Storage.new(project_id: @project_id, credentials: @credentials)
15
15
  @bucket = @storage.bucket(@bucket_name)
16
16
  @bucket.name
17
17
  rescue => e
@@ -38,11 +38,7 @@ module Cnvrg
38
38
  end
39
39
 
40
40
  def upload(storage_path, local_path)
41
- begin
42
- @bucket.create_file(local_path, storage_path)
43
- rescue => e
44
- raise e
45
- end
41
+ @bucket.create_file(local_path, storage_path)
46
42
  end
47
43
  end
48
44
  end
@@ -53,10 +53,7 @@ module Cnvrg
53
53
  ### storage path is the path inside s3 (after the bucket)
54
54
  # local path is fullpath for the file /home/ubuntu/user.../hazilim.py
55
55
  o = aws_bucket.object(storage_path)
56
- success = o.upload_file(local_path, @upload_options)
57
- return success
58
- rescue => e
59
- raise e
56
+ o.upload_file(local_path, @upload_options)
60
57
  end
61
58
 
62
59
  def fetch_files(prefix: nil, marker: nil, limit: 1000)
@@ -6,7 +6,6 @@ require 'net/http'
6
6
  require 'cnvrg/result'
7
7
  module Cnvrg
8
8
  class Files
9
- ParallelThreads = Cnvrg::Helpers.parallel_threads
10
9
  VALID_FILE_NAME = /[\x00\\:\*\?\"<>\|]/
11
10
  LARGE_FILE=1024*1024*5
12
11
  MULTIPART_SPLIT=10000000
@@ -86,7 +85,6 @@ module Cnvrg
86
85
  Cnvrg::Logger.log_info("Upload files to older server..")
87
86
  return self.upload_files_old(files_list, commit_sha1, progress: progress)
88
87
  end
89
-
90
88
  files_list = files_list.map{|x| [x,self.parse_file(x)]}.to_h
91
89
  resp = Cnvrg::API.request(@base_resource + "upload_files", 'POST', {files: files_list, commit: commit_sha1})
92
90
  unless Cnvrg::CLI.is_response_success(resp, false)
@@ -97,11 +95,10 @@ module Cnvrg
97
95
  files = res['files']
98
96
 
99
97
  #upload files
100
- blob_ids = Parallel.map(files.keys, in_threads: ParallelThreads) do |file|
98
+ blob_ids = Parallel.map(files.keys, self.get_upload_options) do |file|
101
99
  begin
102
100
  Cnvrg::Helpers.try_until_success{self.upload_single_file(files[file].merge(files_list[file]))}
103
101
  rescue => e
104
-
105
102
  Cnvrg::CLI.log_message("Failed to upload #{file}: #{e.message}", 'red')
106
103
  Cnvrg::Logger.log_error(e)
107
104
  Cnvrg::Logger.log_method(bind: binding)
@@ -880,7 +877,6 @@ module Cnvrg
880
877
  end
881
878
 
882
879
  end
883
-
884
880
  def delete_commit_files_local(deleted)
885
881
  begin
886
882
  FileUtils.rm_rf(deleted) unless (deleted.nil? or deleted.empty?)
@@ -888,14 +884,16 @@ module Cnvrg
888
884
  rescue => e
889
885
  return false
890
886
  end
891
- end
892
887
 
893
- def start_commit(new_branch,force:false, exp_start_commit:nil, job_slug: nil, job_type: nil, start_commit: nil, message: nil)
894
- response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {project_slug: @project_slug, new_branch: new_branch,force:force,
895
- username: @owner, exp_start_commit:exp_start_commit, job_slug: job_slug, job_type: job_type, start_commit: start_commit, message: message})
896
- Cnvrg::CLI.is_response_success(response,false)
897
- return response
888
+ return true
889
+
898
890
  end
891
+ def start_commit(new_branch,force:false, exp_start_commit:nil, job_slug: nil, job_type: nil, start_commit: nil, message: nil)
892
+ response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {project_slug: @project_slug, new_branch: new_branch,force:force,
893
+ username: @owner, exp_start_commit:exp_start_commit, job_slug: job_slug, job_type: job_type, start_commit: start_commit, message: message})
894
+ Cnvrg::CLI.is_response_success(response,false)
895
+ return response
896
+ end
899
897
 
900
898
  def end_commit(commit_sha1,force:false,message:"")
901
899
  response = Cnvrg::API.request("#{base_resource}/commit/end", 'POST', {commit_sha1: commit_sha1,force:force,message:message})
@@ -1,7 +1,4 @@
1
1
  require 'fileutils'
2
- require 'pathname'
3
-
4
-
5
2
  module Cnvrg
6
3
  class Project
7
4
  attr_reader :slug, :owner, :title, :local_path, :working_dir, :is_git, :is_branch, :machines
@@ -329,13 +326,10 @@ module Cnvrg
329
326
  def get_storage_client
330
327
  response = Cnvrg::API.request("users/#{@owner}/projects/#{@slug}/client", 'GET')
331
328
  if Cnvrg::CLI.is_response_success(response, false)
332
-
333
329
  client_params = response['client']
334
330
  else
335
-
336
331
  client_params = get_storage_client_fallback
337
332
  end
338
-
339
333
  Cnvrg::Downloader::Client.factory(client_params)
340
334
  end
341
335
 
@@ -433,8 +427,10 @@ module Cnvrg
433
427
  list_ignore_new = list_ignore.map{|x| x.gsub("//","/")} rescue []
434
428
  # list.each do |e|
435
429
  Parallel.map(list, in_threads: IDXParallelThreads) do |e|
436
- label = e.sub(self.local_path + "/", "")
437
-
430
+ label = e.gsub(self.local_path + "/", "")
431
+ if not Cnvrg::Files.valid_file_name?(label)
432
+ raise StandardError.new("#{label} is not a valid file name")
433
+ end
438
434
  if list_ignore_new.include? label
439
435
  next
440
436
  end
@@ -1,3 +1,4 @@
1
1
  module Cnvrg
2
- VERSION = '1.6.3.1'
2
+ VERSION = '1.6.10'
3
3
  end
4
+
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: cnvrg
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.3.1
4
+ version: 1.6.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yochay Ettun
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2020-04-01 00:00:00.000000000 Z
13
+ date: 2020-01-12 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: bundler
@@ -210,14 +210,14 @@ dependencies:
210
210
  requirements:
211
211
  - - "~>"
212
212
  - !ruby/object:Gem::Version
213
- version: 2.11.417
213
+ version: '2'
214
214
  type: :runtime
215
215
  prerelease: false
216
216
  version_requirements: !ruby/object:Gem::Requirement
217
217
  requirements:
218
218
  - - "~>"
219
219
  - !ruby/object:Gem::Version
220
- version: 2.11.417
220
+ version: '2'
221
221
  - !ruby/object:Gem::Dependency
222
222
  name: signet
223
223
  requirement: !ruby/object:Gem::Requirement