cnvrg 1.6.0.13 → 1.6.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/cnvrg.gemspec +1 -1
- data/lib/cnvrg/cli.rb +168 -131
- data/lib/cnvrg/data.rb +7 -2
- data/lib/cnvrg/datafiles.rb +67 -30
- data/lib/cnvrg/dataset.rb +6 -10
- data/lib/cnvrg/downloader/client.rb +43 -0
- data/lib/cnvrg/downloader/clients/azure_client.rb +5 -1
- data/lib/cnvrg/downloader/clients/gcp_client.rb +6 -2
- data/lib/cnvrg/downloader/clients/s3_client.rb +4 -1
- data/lib/cnvrg/files.rb +11 -9
- data/lib/cnvrg/project.rb +5 -6
- data/lib/cnvrg/version.rb +1 -1
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4292d9949e1478ec370c60a31590f1463e8c38729429d457fe4b4c69a400cee7
|
4
|
+
data.tar.gz: f1581daae4274c0ba34615575481f6da1acaf72ba25c86171e1a38bb99105fec
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8652f82423f102949a61725653b4e04380b40c112cee9b152d72be5ce59b3869f796ae04be786f21e682e90c8342b89cd6530139c63ee5e92d8ce675a53ff0a9
|
7
|
+
data.tar.gz: 4858a5d92c12c550d62221bc046c08601ac7fc4b09bd2a2bf28076036655cd8e931fbaf5efd9538d40ffdbffdcb4282a7bcb1c776cb6545cb0408012bafa6260
|
data/cnvrg.gemspec
CHANGED
@@ -31,7 +31,7 @@ Gem::Specification.new do |spec|
|
|
31
31
|
spec.add_runtime_dependency 'open4', '~> 1.3', '>= 1.3.4'
|
32
32
|
spec.add_runtime_dependency 'highline', '~> 1.7', '>= 1.7.8'
|
33
33
|
spec.add_runtime_dependency 'thor', '~> 0.19.0','>=0.19.1'
|
34
|
-
spec.add_runtime_dependency 'aws-sdk', '~> 2'
|
34
|
+
spec.add_runtime_dependency 'aws-sdk', '~> 2.11.417'
|
35
35
|
spec.add_runtime_dependency 'signet', '~> 0.11.0'
|
36
36
|
spec.add_runtime_dependency 'google-cloud-env', '~> 1.2.1'
|
37
37
|
spec.add_runtime_dependency 'google-cloud-core', '~> 1.3.2'
|
data/lib/cnvrg/cli.rb
CHANGED
@@ -753,11 +753,14 @@ module Cnvrg
|
|
753
753
|
desc 'data init', 'Init dataset directory', :hide => true
|
754
754
|
method_option :public, :type => :boolean, :aliases => ["-p", "--public"], :default => false
|
755
755
|
|
756
|
-
def init_data(public, bucket: nil)
|
756
|
+
def init_data(public, bucket: nil, title: nil)
|
757
757
|
begin
|
758
758
|
verify_logged_in(false)
|
759
759
|
log_start(__method__, args, options)
|
760
760
|
dataset_name = File.basename(Dir.getwd)
|
761
|
+
if title.present?
|
762
|
+
dataset_name = title
|
763
|
+
end
|
761
764
|
if File.directory?(Dir.getwd + "/.cnvrg")
|
762
765
|
config = YAML.load_file("#{Dir.getwd}/.cnvrg/config.yml")
|
763
766
|
log_message("Directory is already linked to #{config[:dataset_slug]}", Thor::Shell::Color::RED)
|
@@ -845,7 +848,6 @@ module Cnvrg
|
|
845
848
|
begin
|
846
849
|
verify_logged_in(false)
|
847
850
|
log_start(__method__, args, options)
|
848
|
-
return if check_pod_restart[1] ## It means that all datasets downloaded successfully
|
849
851
|
commit = options["commit"] || commit
|
850
852
|
only_tree = options["only_tree"] || only_tree
|
851
853
|
read = options["read"] || read || false
|
@@ -1182,7 +1184,7 @@ module Cnvrg
|
|
1182
1184
|
end
|
1183
1185
|
|
1184
1186
|
desc '', '', :hide => true
|
1185
|
-
def data_put(dataset_url, files: [], dir: '', chunk_size: 1000)
|
1187
|
+
def data_put(dataset_url, files: [], dir: '', commit: '', chunk_size: 1000)
|
1186
1188
|
begin
|
1187
1189
|
verify_logged_in(false)
|
1188
1190
|
log_start(__method__, args, options)
|
@@ -1201,31 +1203,44 @@ module Cnvrg
|
|
1201
1203
|
end
|
1202
1204
|
log_message("Uploading #{@files.size} files", Thor::Shell::Color::GREEN)
|
1203
1205
|
number_of_chunks = (@files.size.to_f / chunk_size).ceil
|
1204
|
-
|
1205
|
-
|
1206
|
-
|
1206
|
+
if commit.blank?
|
1207
|
+
response = @datafiles.start_commit(false, true, chunks: number_of_chunks)
|
1208
|
+
unless response #means we failed in the start commit.
|
1209
|
+
raise SignalException.new(1, "Cant put files into dataset, check the dataset id")
|
1210
|
+
end
|
1211
|
+
@commit = response['result']['commit_sha1']
|
1212
|
+
elsif commit.eql? "latest"
|
1213
|
+
response = @datafiles.last_valid_commit()
|
1214
|
+
unless response #means we failed in the start commit.
|
1215
|
+
raise SignalException.new(1, "Cant put files into commit:#{commit}, check the dataset id and commitc")
|
1216
|
+
end
|
1217
|
+
@commit = response['result']['sha1']
|
1218
|
+
else
|
1219
|
+
@commit = commit
|
1207
1220
|
end
|
1208
|
-
@commit = response['result']['commit_sha1']
|
1209
1221
|
#dir shouldnt have starting or ending slash.
|
1210
1222
|
dir = dir[0..-2] if dir.end_with? '/'
|
1211
1223
|
dir = dir[1..-1] if dir.start_with? '/'
|
1212
1224
|
|
1213
|
-
progressbar = ProgressBar.create(:title => "Upload Progress",
|
1214
|
-
:progress_mark => '=',
|
1215
|
-
:format => "%b>>%i| %p%% %t",
|
1216
|
-
:starting_at => 0,
|
1217
|
-
:total => @files.size,
|
1218
|
-
:autofinish => true)
|
1219
1225
|
@files.each_slice(chunk_size).each do |list_files|
|
1220
1226
|
temp_tree = @dataset.generate_chunked_idx(list_files, prefix: dir)
|
1221
1227
|
#will throw a signal exception if something goes wrong.
|
1222
|
-
@datafiles.upload_multiple_files(@commit, temp_tree, force: true,
|
1228
|
+
@datafiles.upload_multiple_files(@commit, temp_tree, force: true, prefix: dir, total: @files.size)
|
1223
1229
|
end
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1230
|
+
if commit.blank?
|
1231
|
+
res = @datafiles.put_commit(@commit)
|
1232
|
+
unless res.is_success?
|
1233
|
+
raise SignalException.new(1, res.msg)
|
1234
|
+
end
|
1235
|
+
else
|
1236
|
+
res = @datafiles.end_commit(@commit,false, success: true )
|
1237
|
+
msg = res['result']
|
1238
|
+
response = Cnvrg::Result.new(Cnvrg::CLI.is_response_success(res, true), msg)
|
1239
|
+
unless response.is_success?
|
1240
|
+
raise SignalException.new(1, res.msg)
|
1241
|
+
end
|
1227
1242
|
end
|
1228
|
-
log_message("
|
1243
|
+
log_message("Uploading files finished Successfully", Thor::Shell::Color::GREEN)
|
1229
1244
|
rescue SignalException => e
|
1230
1245
|
log_message(e.message, Thor::Shell::Color::RED)
|
1231
1246
|
return false
|
@@ -1781,7 +1796,6 @@ module Cnvrg
|
|
1781
1796
|
begin
|
1782
1797
|
verify_logged_in(false)
|
1783
1798
|
log_start(__method__, args, options)
|
1784
|
-
return if check_pod_restart[0] ## It means that project downloaded successfully
|
1785
1799
|
url_parts = project_url.split("/")
|
1786
1800
|
project_index = Cnvrg::Helpers.look_for_in_path(project_url, "projects")
|
1787
1801
|
slug = url_parts[project_index + 1]
|
@@ -1950,12 +1964,11 @@ module Cnvrg
|
|
1950
1964
|
log_message('Syncing dataset', Thor::Shell::Color::BLUE, !options["verbose"])
|
1951
1965
|
if !force and !init
|
1952
1966
|
# w(verbose=false, new_branch=false,sync=false, commit=nil,all_files=true)
|
1953
|
-
invoke :download_data_new,[verbose, new_branch, true, commit, all_files], :new_branch=>new_branch, :direct=>false, :force =>force
|
1967
|
+
total_deleted, total_downloaded = invoke :download_data_new,[verbose, new_branch, true, commit, all_files], :new_branch=>new_branch, :direct=>false, :force =>force
|
1954
1968
|
end
|
1955
|
-
|
1956
1969
|
# w(new_branch, verbose,sync,force, tags, chunk_size)
|
1957
|
-
invoke :upload_data_new,[new_branch, verbose, true, force, tags, chunk_size, message:message
|
1958
|
-
:direct=>false, :force =>force, :sync =>true, :tags =>tags, :parallel => parallel, :message => message
|
1970
|
+
invoke :upload_data_new,[new_branch, verbose, true, force, tags, chunk_size, message:message, total_deleted: total_deleted, total_downloaded: total_downloaded],
|
1971
|
+
:new_branch=>new_branch, :direct=>false, :force =>force, :sync =>true, :tags =>tags, :parallel => parallel, :message => message
|
1959
1972
|
|
1960
1973
|
end
|
1961
1974
|
desc 'upload_data_new', 'upload_data_new', :hide => true
|
@@ -1967,31 +1980,47 @@ module Cnvrg
|
|
1967
1980
|
method_option :parallel, :type => :numeric, :aliases => ["-p", "--parallel"], :desc => "uparallel upload at the same time", :default => 15
|
1968
1981
|
method_option :message, :type => :string, :aliases => ["--message"], :desc => "create commit with message", :default => nil
|
1969
1982
|
|
1970
|
-
def upload_data_new(new_branch, verbose, sync, force, tags, chunk_size, message:nil)
|
1983
|
+
def upload_data_new(new_branch, verbose, sync, force, tags, chunk_size, message:nil, total_deleted: 0, total_downloaded: 0)
|
1971
1984
|
begin
|
1972
|
-
|
1973
|
-
|
1974
|
-
|
1975
|
-
|
1976
|
-
|
1977
|
-
|
1978
|
-
|
1979
|
-
|
1980
|
-
|
1981
|
-
|
1982
|
-
|
1983
|
-
|
1984
|
-
|
1985
|
-
|
1986
|
-
|
1987
|
-
|
1985
|
+
commit, files_list = invoke :start_commit_data,[], :new_branch=> new_branch, :direct=>false, :force =>force, :chunk_size => chunk_size, :message => message
|
1986
|
+
files_to_upload, upload_errors = invoke :upload_data_files,[commit, files_list: files_list],:new_branch=>new_branch, :verbose =>verbose, :force =>force, :sync =>sync, :chunk_size => chunk_size
|
1987
|
+
|
1988
|
+
upload_size = files_to_upload + upload_errors.try(:size) rescue 0
|
1989
|
+
invoke :end_commit_data,[commit, success: true, uploaded_files: files_to_upload, sync: sync], :new_branch=>new_branch, :force =>force
|
1990
|
+
if tags
|
1991
|
+
log_message('Uploading Tags', Thor::Shell::Color::BLUE)
|
1992
|
+
dataset_dir = is_cnvrg_dir(Dir.pwd)
|
1993
|
+
@dataset = Dataset.new(dataset_dir)
|
1994
|
+
begin
|
1995
|
+
tag_file = File.open(options[:tags], "r+")
|
1996
|
+
status = @dataset.upload_tags_via_yml(tag_file)
|
1997
|
+
rescue
|
1998
|
+
log_message('Tags file not found', Thor::Shell::Color::RED)
|
1999
|
+
return
|
2000
|
+
end
|
2001
|
+
if status
|
2002
|
+
log_message('Tags are successfully uploaded', Thor::Shell::Color::GREEN)
|
2003
|
+
else
|
2004
|
+
log_message('There was some error in uploading Tags', Thor::Shell::Color::RED)
|
2005
|
+
end
|
1988
2006
|
end
|
1989
|
-
if
|
1990
|
-
log_message(
|
1991
|
-
|
1992
|
-
|
2007
|
+
if total_deleted > 0
|
2008
|
+
log_message("#{total_deleted} files deleted successfully.", Thor::Shell::Color::GREEN)
|
2009
|
+
end
|
2010
|
+
|
2011
|
+
if total_downloaded > 0
|
2012
|
+
log_message("#{total_downloaded} files downloaded successfully.", Thor::Shell::Color::GREEN)
|
2013
|
+
end
|
2014
|
+
if upload_size > 0
|
2015
|
+
log_message("#{files_to_upload}/#{upload_size} files uploaded successfully.", Thor::Shell::Color::GREEN)
|
2016
|
+
end
|
2017
|
+
|
2018
|
+
if upload_errors.try(:size) > 0
|
2019
|
+
log_message("#{upload_errors.try(:size)}/#{upload_size} files didn't upload:", Thor::Shell::Color::RED)
|
2020
|
+
upload_errors.each do |file_hash|
|
2021
|
+
log_message("#{file_hash[:absolute_path]}", Thor::Shell::Color::RED)
|
2022
|
+
end
|
1993
2023
|
end
|
1994
|
-
end
|
1995
2024
|
rescue => e
|
1996
2025
|
Cnvrg::CLI.log_message(e.message, 'red')
|
1997
2026
|
Cnvrg::Logger.log_error(e)
|
@@ -2021,17 +2050,16 @@ module Cnvrg
|
|
2021
2050
|
verify_logged_in(true)
|
2022
2051
|
log_start(__method__, args, options)
|
2023
2052
|
dataset_dir = is_cnvrg_dir(Dir.pwd)
|
2024
|
-
direct = options[:direct]
|
2025
2053
|
new_branch = options["new_branch"] || false
|
2026
2054
|
force = options["force"] || false
|
2027
2055
|
chunk_size = options["chunk_size"] || false
|
2028
2056
|
message = options["message"]
|
2029
|
-
commit_sha1 = nil
|
2030
2057
|
@dataset = Dataset.new(dataset_dir)
|
2031
2058
|
@dataset.backup_idx
|
2032
2059
|
@files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
|
2033
2060
|
next_commit = @dataset.get_next_commit #if there was a partial commit..
|
2034
|
-
|
2061
|
+
files_list = @dataset.list_all_files
|
2062
|
+
chunks = (files_list.length.to_f / chunk_size).ceil
|
2035
2063
|
resp = @files.start_commit(new_branch, force, chunks: chunks, dataset: @dataset, message: message)
|
2036
2064
|
if !resp['result']['can_commit']
|
2037
2065
|
log_message("Cant upload files because a new version of this dataset exists, please download it or upload with --force", Thor::Shell::Color::RED)
|
@@ -2042,7 +2070,7 @@ module Cnvrg
|
|
2042
2070
|
@dataset.set_partial_commit(next_commit)
|
2043
2071
|
end
|
2044
2072
|
@dataset.set_next_commit(commit_sha1)
|
2045
|
-
return commit_sha1
|
2073
|
+
return commit_sha1, files_list
|
2046
2074
|
end
|
2047
2075
|
|
2048
2076
|
|
@@ -2050,7 +2078,7 @@ module Cnvrg
|
|
2050
2078
|
method_option :new_branch, :type => :boolean, :aliases => ["-nb"], :desc => "create new branch of commits"
|
2051
2079
|
method_option :force, :type => :boolean, :aliases => ["-f","--force"], :default => false
|
2052
2080
|
|
2053
|
-
def end_commit_data(commit, success: true, uploaded_files: 0)
|
2081
|
+
def end_commit_data(commit, success: true, uploaded_files: 0, sync: false)
|
2054
2082
|
begin
|
2055
2083
|
verify_logged_in(true)
|
2056
2084
|
log_start(__method__, args, options)
|
@@ -2065,7 +2093,12 @@ module Cnvrg
|
|
2065
2093
|
@dataset.revert_next_commit #removes the next commit
|
2066
2094
|
log_message("#{check} Dataset is up to date", Thor::Shell::Color::GREEN)
|
2067
2095
|
else
|
2068
|
-
|
2096
|
+
if sync
|
2097
|
+
message = "#{check} Data sync finished"
|
2098
|
+
else
|
2099
|
+
message = "#{check} Data upload finished"
|
2100
|
+
end
|
2101
|
+
log_message(message, Thor::Shell::Color::GREEN)
|
2069
2102
|
@dataset.remove_next_commit #takes the next commit and put it as current commit
|
2070
2103
|
@dataset.set_partial_commit(nil)
|
2071
2104
|
@dataset.backup_idx
|
@@ -2118,54 +2151,53 @@ module Cnvrg
|
|
2118
2151
|
# method_option :tags_yml, :type => :boolean, :aliases => ["--file_tag_yml"], :default => false
|
2119
2152
|
method_option :parallel, :type => :numeric, :aliases => ["-p", "--parallel"], :desc => "uparallel upload at the same time", :default => 15
|
2120
2153
|
|
2121
|
-
def upload_data_files(new_commit,
|
2154
|
+
def upload_data_files(new_commit, files_list: [])
|
2122
2155
|
begin
|
2123
|
-
|
2124
|
-
|
2125
|
-
|
2126
|
-
|
2127
|
-
|
2128
|
-
|
2129
|
-
|
2130
|
-
|
2131
|
-
|
2132
|
-
|
2133
|
-
|
2134
|
-
|
2135
|
-
|
2136
|
-
|
2137
|
-
|
2138
|
-
|
2139
|
-
|
2140
|
-
|
2141
|
-
|
2142
|
-
|
2143
|
-
|
2144
|
-
|
2145
|
-
|
2146
|
-
|
2147
|
-
|
2148
|
-
|
2149
|
-
|
2150
|
-
|
2151
|
-
|
2152
|
-
|
2153
|
-
|
2154
|
-
|
2155
|
-
|
2156
|
-
|
2157
|
-
|
2158
|
-
|
2159
|
-
|
2160
|
-
|
2161
|
-
|
2162
|
-
|
2163
|
-
@dataset.write_tree(new_tree) #we dont want to re-run it every time so just on finish.
|
2156
|
+
verify_logged_in(true)
|
2157
|
+
log_start(__method__, args, options)
|
2158
|
+
dataset_dir = is_cnvrg_dir(Dir.pwd)
|
2159
|
+
@dataset = Dataset.new(dataset_dir)
|
2160
|
+
@files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
|
2161
|
+
new_commit ||= @dataset.get_next_commit
|
2162
|
+
partial_commit = @dataset.get_partial_commit
|
2163
|
+
if new_commit.blank?
|
2164
|
+
log_message("You must specify commit, run start_commit to create new commit", Thor::Shell::Color::RED)
|
2165
|
+
return false
|
2166
|
+
end
|
2167
|
+
chunk_size = options[:chunk_size]
|
2168
|
+
chunk_size = [chunk_size, 1].max
|
2169
|
+
new_branch = options["new_branch"] || false
|
2170
|
+
new_tree = {}
|
2171
|
+
force = options["force"] || false
|
2172
|
+
parallel_threads = options["parallel"] || ParallelThreads
|
2173
|
+
all_files = files_list
|
2174
|
+
all_files = @dataset.list_all_files if files_list.blank?
|
2175
|
+
files_uploaded = 0
|
2176
|
+
upload_errors = []
|
2177
|
+
|
2178
|
+
all_files.each_slice(chunk_size).each do |list_files|
|
2179
|
+
Cnvrg::Logger.log_info("Uploading files into #{@dataset.slug}, #{files_uploaded} files uploaded")
|
2180
|
+
temp_tree = @dataset.generate_chunked_idx(list_files, threads: parallel_threads)
|
2181
|
+
upload_resp, upload_error_files = @files.upload_multiple_files(new_commit, temp_tree,
|
2182
|
+
threads: parallel_threads,
|
2183
|
+
force: force,
|
2184
|
+
new_branch: new_branch,
|
2185
|
+
partial_commit: partial_commit,
|
2186
|
+
total: all_files.length)
|
2187
|
+
|
2188
|
+
files_uploaded += upload_resp
|
2189
|
+
upload_errors += upload_error_files if upload_error_files.present?
|
2190
|
+
temp_tree.each do |k, v|
|
2191
|
+
new_tree[k] = (v.present?) ? {sha1: v.try(:fetch, :sha1, nil), commit_time: nil} : nil
|
2192
|
+
end
|
2193
|
+
end
|
2194
|
+
|
2195
|
+
@dataset.write_tree(new_tree) #we dont want to re-run it every time so just on finish.
|
2164
2196
|
rescue => e
|
2165
2197
|
Cnvrg::Logger.log_error(e)
|
2166
2198
|
raise e
|
2167
2199
|
end
|
2168
|
-
return files_uploaded
|
2200
|
+
return files_uploaded, upload_errors.try(:flatten).try(:compact)
|
2169
2201
|
end
|
2170
2202
|
|
2171
2203
|
|
@@ -2293,7 +2325,9 @@ module Cnvrg
|
|
2293
2325
|
:total => (to_upload.size + deleted.size),
|
2294
2326
|
:autofinish => true)
|
2295
2327
|
@files.upload_multiple_files(to_upload, commit_sha1, progress: progressbar)
|
2328
|
+
|
2296
2329
|
@files.delete_files_from_server(deleted, commit_sha1)
|
2330
|
+
|
2297
2331
|
progressbar.finish
|
2298
2332
|
res = @files.end_commit(commit_sha1, force: force, message: commit_msg)
|
2299
2333
|
unless Cnvrg::CLI.is_response_success(res, false)
|
@@ -2431,52 +2465,51 @@ module Cnvrg
|
|
2431
2465
|
dataset_dir = is_cnvrg_dir(Dir.pwd)
|
2432
2466
|
@dataset = Dataset.new(dataset_dir)
|
2433
2467
|
@files = Cnvrg::Datafiles.new(@dataset.owner, @dataset.slug, dataset: @dataset)
|
2434
|
-
all_files = all_files
|
2435
|
-
@dataset.generate_idx
|
2436
2468
|
res = @dataset.compare_idx_download(all_files: all_files, desired_commit: commit)
|
2437
2469
|
unless CLI.is_response_success(res, false)
|
2438
2470
|
log_message("Cant find the desired commit, please check it or try to download without it.", Thor::Shell::Color::RED)
|
2439
2471
|
exit(1)
|
2440
2472
|
end
|
2441
|
-
result = res["result"]
|
2442
|
-
|
2443
|
-
|
2444
|
-
|
2445
|
-
update_total = [result['added'], result["updated_on_server"], result["conflicts"], result["deleted"]].compact.flatten.size
|
2473
|
+
result = res["result"]
|
2474
|
+
tree = result["tree"]
|
2475
|
+
commit = result["commit"]
|
2476
|
+
update_total = [tree['added'], tree["updated_on_server"], tree["conflicts"], tree["deleted"]].compact.flatten.size
|
2446
2477
|
successful_changes = 0
|
2447
|
-
if update_total == 0
|
2448
|
-
log_message("Dataset is up to date", Thor::Shell::Color::GREEN
|
2449
|
-
return
|
2450
|
-
elsif options["verbose"]
|
2451
|
-
log_message("Downloading #{update_total} files", Thor::Shell::Color::BLUE)
|
2478
|
+
if update_total == 0
|
2479
|
+
log_message("Dataset is up to date", Thor::Shell::Color::GREEN, !sync)
|
2480
|
+
return 0, 0
|
2452
2481
|
else
|
2482
|
+
log_message("Downloading #{update_total} files", Thor::Shell::Color::BLUE, options["verbose"])
|
2453
2483
|
log_message("Syncing Dataset", Thor::Shell::Color::BLUE, !sync)
|
2454
2484
|
end
|
2455
|
-
Cnvrg::Logger.log_info("Current commit: #{@dataset.
|
2456
|
-
Cnvrg::Logger.log_info("Compare idx res: #{
|
2485
|
+
Cnvrg::Logger.log_info("Current commit: #{@dataset.last_local_commit}, destination commit: #{commit}")
|
2486
|
+
Cnvrg::Logger.log_info("Compare idx res: #{tree}")
|
2457
2487
|
progressbar = ProgressBar.create(:title => "Download Progress",
|
2458
|
-
|
2459
|
-
|
2460
|
-
|
2461
|
-
|
2462
|
-
|
2463
|
-
conflicts = @files.mark_conflicts(result)
|
2488
|
+
:progress_mark => '=',
|
2489
|
+
:format => "%b>>%i| %p%% %t",
|
2490
|
+
:starting_at => 0,
|
2491
|
+
:total => update_total,
|
2492
|
+
:autofinish => true)
|
2464
2493
|
|
2494
|
+
conflicts = @files.mark_conflicts(tree)
|
2465
2495
|
log_message("Found some conflicts, check .conflict files.", Thor::Shell::Color::BLUE) if conflicts > 0
|
2466
|
-
update_res = @files.download_files_in_chunks(
|
2467
|
-
added_res = @files.download_files_in_chunks(
|
2468
|
-
|
2469
|
-
deleted = result["deleted"].to_a
|
2496
|
+
update_res = @files.download_files_in_chunks(tree["updated_on_server"], progress: progressbar) if tree["updated_on_server"].present?
|
2497
|
+
added_res = @files.download_files_in_chunks(tree["added"], progress: progressbar) if tree["added"].present?
|
2498
|
+
deleted = tree["deleted"].to_a
|
2470
2499
|
delete_res = @files.delete_commit_files_local(deleted)
|
2471
|
-
|
2500
|
+
|
2472
2501
|
if !delete_res
|
2473
2502
|
log_message("Couldn't delete #{deleted.join(" ")}", Thor::Shell::Color::RED)
|
2474
2503
|
log_message("Couldn't download, Rolling Back all changes.", Thor::Shell::Color::RED)
|
2475
2504
|
exit(1)
|
2476
2505
|
end
|
2506
|
+
|
2507
|
+
progressbar.progress += deleted.size if progressbar.present? and deleted.size > 0
|
2508
|
+
|
2477
2509
|
success = (update_res.blank? or update_res.is_success?)
|
2478
2510
|
success &= (delete_res.blank? or delete_res.is_success?)
|
2479
2511
|
success &= (added_res.blank? or added_res.is_success?)
|
2512
|
+
|
2480
2513
|
if success
|
2481
2514
|
# update idx with latest commit
|
2482
2515
|
@dataset.update_idx_with_commit!(commit)
|
@@ -2486,17 +2519,24 @@ module Cnvrg
|
|
2486
2519
|
log_message(successful_changes.join("\n"), Thor::Shell::Color::GREEN)
|
2487
2520
|
log_message("Total of #{successful_changes.size} / #{update_total} files.", Thor::Shell::Color::GREEN)
|
2488
2521
|
else
|
2489
|
-
log_message("#{check} Downloaded changes successfully", Thor::Shell::Color::GREEN,
|
2522
|
+
log_message("#{check} Downloaded changes successfully", Thor::Shell::Color::GREEN, !sync)
|
2490
2523
|
end
|
2491
|
-
|
2524
|
+
|
2525
|
+
total_deleted = deleted.try(:size)
|
2526
|
+
total_downloaded = tree["added"].try(:size) || 0
|
2527
|
+
total_downloaded += tree["updated_on_server"].try(:size) if tree["updated_on_server"].present?
|
2528
|
+
|
2529
|
+
return total_deleted, total_downloaded
|
2530
|
+
else
|
2531
|
+
return []
|
2492
2532
|
end
|
2493
2533
|
rescue SignalException => e
|
2494
2534
|
Cnvrg::Logger.log_error(e)
|
2495
2535
|
say "\nAborting", Thor::Shell::Color::BLUE
|
2496
2536
|
exit(1)
|
2497
2537
|
rescue => e
|
2498
|
-
log_message("Error occurred, \nAborting", Thor::Shell::Color::BLUE)
|
2499
2538
|
Cnvrg::Logger.log_error(e)
|
2539
|
+
log_message("Error occurred, \nAborting", Thor::Shell::Color::RED)
|
2500
2540
|
exit(1)
|
2501
2541
|
end
|
2502
2542
|
end
|
@@ -2854,7 +2894,7 @@ module Cnvrg
|
|
2854
2894
|
method_option :job_type, :type => :string, :aliases => ["-jt", "--job_type"], :default => nil
|
2855
2895
|
method_option :files, :type => :string, :aliases => ["--files"], :default => nil
|
2856
2896
|
method_option :output_dir, :type => :string, :aliases => ["--output_dir"], :default => nil
|
2857
|
-
def
|
2897
|
+
def sync(direct = true)
|
2858
2898
|
verify_logged_in(true) if direct
|
2859
2899
|
@project = Project.new(get_project_home)
|
2860
2900
|
log_start(__method__, args, options)
|
@@ -2898,7 +2938,7 @@ module Cnvrg
|
|
2898
2938
|
method_option :image, :type => :string, :aliases => ["--image"], :default => nil
|
2899
2939
|
method_option :grid, :type => :string, :aliases => ["-g", "--grid"], :default => ""
|
2900
2940
|
method_option :data, :type => :string, :aliases => ["-d", "--data"], :default => ""
|
2901
|
-
method_option :datasets, :type => :string, :aliases => ["--datasets"], :desc => "'[{\"id\": \"dataset id\", \"commit\": \"commit id\", \"query\": \"query name\", \"tree_only\": true]'", :default => ""
|
2941
|
+
method_option :datasets, :type => :string, :aliases => ["--datasets"], :desc => "'[{\"id\": \"dataset id\", \"commit\": \"commit id\", \"query\": \"query name\", \"tree_only\": true, \"use_cached\": true]'", :default => ""
|
2902
2942
|
method_option :data_commit, :type => :string, :aliases => ["--data_commit"], :default => ""
|
2903
2943
|
method_option :ignore, :type => :string, :aliases => ["-i", "--ignore"], :desc => "ignore following files", :default => ""
|
2904
2944
|
method_option :force, :type => :boolean, :aliases => ["-f", "--force"], :default => false
|
@@ -3382,10 +3422,6 @@ module Cnvrg
|
|
3382
3422
|
output_dir = "output"
|
3383
3423
|
end
|
3384
3424
|
image = options["image"] || nil
|
3385
|
-
if image.blank?
|
3386
|
-
image = "cnvrg"
|
3387
|
-
end
|
3388
|
-
|
3389
3425
|
forced_commit = nil
|
3390
3426
|
if sync_before and !project.is_git
|
3391
3427
|
if force
|
@@ -5487,7 +5523,7 @@ module Cnvrg
|
|
5487
5523
|
end
|
5488
5524
|
end
|
5489
5525
|
|
5490
|
-
def self.log_message(message, type = Thor::Shell::Color::BLUE
|
5526
|
+
def self.log_message(message, type = Thor::Shell::Color::BLUE)
|
5491
5527
|
if $LOG.blank?
|
5492
5528
|
### handle case when $LOG is not initialized
|
5493
5529
|
CLI.new.log_handler
|
@@ -5509,11 +5545,12 @@ module Cnvrg
|
|
5509
5545
|
color = nil
|
5510
5546
|
$LOG.info message: message, type: "unknown"
|
5511
5547
|
end
|
5512
|
-
say "#{color}#{message}#{Thor::Shell::Color::CLEAR}"
|
5548
|
+
say "#{color}#{message}#{Thor::Shell::Color::CLEAR}"
|
5513
5549
|
end
|
5514
5550
|
|
5515
5551
|
def log_message(message, type=Thor::Shell::Color::GREEN, to_print = true)
|
5516
|
-
|
5552
|
+
return if not to_print
|
5553
|
+
CLI.log_message(message, type)
|
5517
5554
|
end
|
5518
5555
|
|
5519
5556
|
def log_error(e)
|
data/lib/cnvrg/data.rb
CHANGED
@@ -22,11 +22,14 @@ module Cnvrg
|
|
22
22
|
desc "data init", "Set current directory as dataset directory"
|
23
23
|
method_option :public, :type => :boolean, :aliases => ["-p", "--public"], :default => false
|
24
24
|
method_option :bucket, :type => :string, :aliases => ["-b", "--bucket"], :default => ""
|
25
|
+
method_option :title, :type => :string, :aliases => ["-t", "--title"], :default => ""
|
26
|
+
|
25
27
|
def init
|
26
28
|
cli = Cnvrg::CLI.new()
|
27
29
|
public = options["public"]
|
28
30
|
bucket = options["bucket"]
|
29
|
-
|
31
|
+
title = options["title"]
|
32
|
+
cli.init_data(public, bucket: bucket, title: title)
|
30
33
|
end
|
31
34
|
|
32
35
|
desc "data link DATASET_SLUG", "Set current directory as dataset directory"
|
@@ -182,10 +185,12 @@ module Cnvrg
|
|
182
185
|
|
183
186
|
desc 'data put DATASET_URL FILES_PREFIX', 'Upload selected files from local dataset directory to remote server'
|
184
187
|
method_option :dir, :type => :string, :aliases => ["-d", "--dir"], :default => ''
|
188
|
+
method_option :commit, :type => :string, :aliases => ["-c", "--commit"], :default => ''
|
185
189
|
def put(dataset_url, *files)
|
186
190
|
cli = Cnvrg::CLI.new()
|
187
191
|
dir = options[:dir]
|
188
|
-
|
192
|
+
commit = options[:commit]
|
193
|
+
cli.data_put(dataset_url, files: files, dir: dir, commit: commit)
|
189
194
|
end
|
190
195
|
|
191
196
|
desc 'data clone_query --query=QUERY_SLUG DATASET_URL', 'Clone dataset with specific query'
|
data/lib/cnvrg/datafiles.rb
CHANGED
@@ -114,54 +114,82 @@ module Cnvrg
|
|
114
114
|
end
|
115
115
|
|
116
116
|
|
117
|
-
def upload_multiple_files(commit_sha1, tree, threads: ParallelThreads, force: false, new_branch: false,
|
117
|
+
def upload_multiple_files(commit_sha1, tree, threads: ParallelThreads, force: false, new_branch: false, prefix: '', partial_commit: nil, total: nil)
|
118
118
|
begin
|
119
119
|
Cnvrg::Logger.log_info("Sending Upload Files request")
|
120
|
-
|
121
|
-
|
120
|
+
error = nil
|
121
|
+
upload_resp = nil
|
122
|
+
10.times do
|
123
|
+
upload_resp = Cnvrg::API.request(@base_resource + "upload_files", 'POST_JSON', {commit_sha1: commit_sha1, tree: tree, force: force, is_branch: new_branch, partial_commit: partial_commit})
|
124
|
+
if Cnvrg::CLI.is_response_success(upload_resp, false)
|
125
|
+
error = nil
|
126
|
+
break
|
127
|
+
end
|
128
|
+
error = upload_resp
|
122
129
|
Cnvrg::Logger.log_method(bind: binding)
|
123
|
-
|
130
|
+
Cnvrg::Logger.log_info("Got an error message from server, #{upload_resp.try(:fetch, "message")}, trying again")
|
124
131
|
end
|
132
|
+
raise Exception.new("Can't upload data files: #{error["message"]}") if error.present?
|
133
|
+
|
125
134
|
Cnvrg::Logger.log_info("Uploading files")
|
126
135
|
results = upload_resp['result'].with_indifferent_access
|
136
|
+
|
127
137
|
if results['files'].blank?
|
128
|
-
|
129
|
-
|
138
|
+
return 0, []
|
139
|
+
end
|
140
|
+
|
141
|
+
if @temp_upload_progressbar.blank?
|
142
|
+
@temp_upload_progressbar = ProgressBar.create(:title => "Upload Progress",
|
143
|
+
:progress_mark => '=',
|
144
|
+
:format => "%b>>%i| %p%% %t",
|
145
|
+
:starting_at => 0,
|
146
|
+
:total => total,
|
147
|
+
:autofinish => true)
|
130
148
|
end
|
149
|
+
|
131
150
|
files = results['files']
|
132
|
-
|
133
|
-
|
151
|
+
upload_error_files = []
|
152
|
+
@temp_upload_progressbar.progress += tree.keys.length - files.length if @temp_upload_progressbar.present?
|
153
|
+
Parallel.map((files.keys), in_threads: threads) do |k|
|
134
154
|
o = tree[k].merge(files[k])
|
135
|
-
upload_single_file(o)
|
136
|
-
|
155
|
+
success = upload_single_file(o)
|
156
|
+
if not success
|
157
|
+
upload_error_files << {absolute_path: o[:absolute_path]}
|
158
|
+
files.except!(k)
|
159
|
+
tree.except!(k)
|
160
|
+
Cnvrg::Logger.log_error_message("Error while upload single file #{o["path"]}")
|
161
|
+
end
|
162
|
+
@temp_upload_progressbar.progress += 1 if @temp_upload_progressbar.present?
|
137
163
|
end
|
138
164
|
blob_ids = files.values.map {|f| f['bv_id']}
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
Cnvrg::
|
144
|
-
|
165
|
+
if blob_ids.present?
|
166
|
+
dirs = tree.keys.select {|k| tree[k].nil?} || []
|
167
|
+
Cnvrg::Logger.info("Sending Upload files save")
|
168
|
+
upload_resp = Cnvrg::API.request(@base_resource + "upload_files_save", "POST", {commit: commit_sha1, blob_ids: blob_ids, dirs: dirs})
|
169
|
+
unless Cnvrg::CLI.is_response_success(upload_resp, false)
|
170
|
+
Cnvrg::Logger.log_method(bind: binding)
|
171
|
+
raise Exception.new("Got an error message from server, #{upload_resp.try(:fetch, "message")}")
|
172
|
+
end
|
145
173
|
end
|
146
174
|
Cnvrg::Logger.log_info("Upload Success")
|
147
|
-
return files.keys.length
|
175
|
+
return files.try(:keys).try(:length), upload_error_files
|
148
176
|
rescue => e
|
149
177
|
Cnvrg::Logger.log_method(bind: binding)
|
150
178
|
Cnvrg::Logger.log_error(e)
|
151
179
|
raise e
|
152
180
|
end
|
153
|
-
|
154
181
|
end
|
155
182
|
|
156
183
|
def upload_single_file(file)
|
157
184
|
begin
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
185
|
+
file = file.as_json
|
186
|
+
Cnvrg::Logger.log_info("Uploading #{file["absolute_path"]}")
|
187
|
+
@downloader.safe_upload(file["path"], file["absolute_path"])
|
188
|
+
Cnvrg::Logger.log_info("#{file["absolute_path"]} uploaded.")
|
162
189
|
rescue => e
|
163
190
|
Cnvrg::Logger.log_error_message("Error while upload single file #{file["path"]}")
|
164
191
|
Cnvrg::Logger.log_error(e)
|
192
|
+
return false
|
165
193
|
end
|
166
194
|
end
|
167
195
|
|
@@ -802,6 +830,7 @@ module Cnvrg
|
|
802
830
|
end
|
803
831
|
return true
|
804
832
|
end
|
833
|
+
|
805
834
|
def delete_commit_files_local(deleted)
|
806
835
|
begin
|
807
836
|
FileUtils.rm_rf(deleted) unless (deleted.nil? or deleted.empty?)
|
@@ -809,9 +838,6 @@ module Cnvrg
|
|
809
838
|
rescue => e
|
810
839
|
return Cnvrg::Result.new(false, '')
|
811
840
|
end
|
812
|
-
|
813
|
-
return Cnvrg::Result.new(true, '')
|
814
|
-
|
815
841
|
end
|
816
842
|
|
817
843
|
def download_dir(dataset_home, absolute_path)
|
@@ -856,6 +882,7 @@ module Cnvrg
|
|
856
882
|
Cnvrg::CLI.is_response_success(response, true)
|
857
883
|
return response
|
858
884
|
end
|
885
|
+
|
859
886
|
def start_commit(new_branch,force=false,delete_commit=nil, chunks: 0, dataset: @dataset, message:nil)
|
860
887
|
begin
|
861
888
|
#if we are pushing with force or to branch we dont need to send current/next commit cause we want to
|
@@ -863,14 +890,24 @@ module Cnvrg
|
|
863
890
|
idx = (force || new_branch) ? {} : dataset.get_idx
|
864
891
|
commit = idx[:commit]
|
865
892
|
next_commit = idx[:next_commit]
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
893
|
+
response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {dataset_slug: @dataset_slug, new_branch: new_branch,force:force,
|
894
|
+
username: @owner,current_commit: commit, next_commit: next_commit, total_chunks: chunks, message: message})
|
895
|
+
Cnvrg::CLI.is_response_success(response, true)
|
896
|
+
return response
|
897
|
+
rescue => e
|
898
|
+
return false
|
899
|
+
end
|
900
|
+
end
|
901
|
+
def last_valid_commit()
|
902
|
+
begin
|
903
|
+
#if we are pushing with force or to branch we dont need to send current/next commit cause we want to
|
904
|
+
# create a new commit.
|
905
|
+
response = Cnvrg::API.request("#{base_resource}/last_valid_commit", 'GET')
|
906
|
+
Cnvrg::CLI.is_response_success(response, true)
|
907
|
+
return response
|
870
908
|
rescue => e
|
871
909
|
return false
|
872
910
|
end
|
873
|
-
|
874
911
|
end
|
875
912
|
|
876
913
|
def end_commit(commit_sha1,force, success: true, uploaded_files: 0 )
|
data/lib/cnvrg/dataset.rb
CHANGED
@@ -103,8 +103,12 @@ module Cnvrg
|
|
103
103
|
|
104
104
|
def backup_idx
|
105
105
|
Cnvrg::Logger.log_info("Backup idx")
|
106
|
-
|
107
|
-
|
106
|
+
if File.exists? "#{self.local_path}/.cnvrg/idx.yml"
|
107
|
+
FileUtils.cp "#{self.local_path}/.cnvrg/idx.yml", "#{self.local_path}/.cnvrg/idx.yml.backup"
|
108
|
+
else
|
109
|
+
idx = {commit: nil, tree: {}}
|
110
|
+
File.open("#{self.local_path}/.cnvrg/idx.yml.backup", 'w') {|f| f.write idx.to_yaml}
|
111
|
+
end
|
108
112
|
end
|
109
113
|
|
110
114
|
def restore_idx
|
@@ -780,14 +784,6 @@ module Cnvrg
|
|
780
784
|
File.open("#{self.local_path}/.cnvrg/idx.yml", 'w') {|f| f.write idx_hash.to_yaml}
|
781
785
|
end
|
782
786
|
|
783
|
-
def get_current_commit()
|
784
|
-
if !File.exist? "#{self.local_path}/.cnvrg/idx.yml"
|
785
|
-
return nil
|
786
|
-
end
|
787
|
-
idx_hash = YAML.load_file("#{self.local_path}/.cnvrg/idx.yml")
|
788
|
-
return idx_hash[:commit]
|
789
|
-
end
|
790
|
-
|
791
787
|
def compare_commit(commit)
|
792
788
|
if commit.nil? or commit.empty?
|
793
789
|
commit = last_local_commit
|
@@ -2,6 +2,8 @@
|
|
2
2
|
module Cnvrg
|
3
3
|
module Downloader
|
4
4
|
OLD_SERVER_VERSION_MESSAGE = "Your server version is not relevant for this cli version please contact support for further help."
|
5
|
+
MAXIMUM_BACKOFF = 64
|
6
|
+
RETRIES = ENV['UPLOAD_FILE_RETRIES'].try(:to_i) || 20
|
5
7
|
attr_accessor :bucket, :client
|
6
8
|
class Client
|
7
9
|
def initialize(params)
|
@@ -41,6 +43,35 @@ module Cnvrg
|
|
41
43
|
Cnvrg::Helpers.decrypt(@key, @iv, str)
|
42
44
|
end
|
43
45
|
|
46
|
+
def safe_upload(storage_path, local_path)
|
47
|
+
n = 1
|
48
|
+
error = nil
|
49
|
+
while n <= RETRIES
|
50
|
+
begin
|
51
|
+
self.upload(storage_path, local_path)
|
52
|
+
error = nil
|
53
|
+
break
|
54
|
+
rescue => e
|
55
|
+
backoff_time_seconds = backoff_time(n)
|
56
|
+
|
57
|
+
message = "Got error: #{e.class.name} with message: #{e.message} while uploading a single file: #{local_path}, retry: #{n} of: #{RETRIES}"
|
58
|
+
if n < RETRIES
|
59
|
+
message += ", next retry in: #{backoff_time_seconds} seconds"
|
60
|
+
else
|
61
|
+
message += ", done retry, continuing to the next file"
|
62
|
+
end
|
63
|
+
Cnvrg::Logger.log_error_message(message)
|
64
|
+
|
65
|
+
sleep backoff_time_seconds
|
66
|
+
|
67
|
+
n += 1
|
68
|
+
error = e
|
69
|
+
end
|
70
|
+
end
|
71
|
+
raise error if error.present?
|
72
|
+
true
|
73
|
+
end
|
74
|
+
|
44
75
|
def self.factory(params)
|
45
76
|
params = params.as_json
|
46
77
|
case params["storage"]
|
@@ -53,6 +84,18 @@ module Cnvrg
|
|
53
84
|
return Cnvrg::Downloader::Clients::GcpClient.new(project_id: params["project_id"], credentials: params["credentials"], bucket_name: params["bucket_name"], sts: params["sts"])
|
54
85
|
end
|
55
86
|
end
|
87
|
+
|
88
|
+
private
|
89
|
+
|
90
|
+
def random_number_milliseconds
|
91
|
+
rand(1000) / 1000.0
|
92
|
+
end
|
93
|
+
|
94
|
+
|
95
|
+
def backoff_time(n)
|
96
|
+
return [((2**n)+random_number_milliseconds), MAXIMUM_BACKOFF].min
|
97
|
+
end
|
98
|
+
|
56
99
|
end
|
57
100
|
end
|
58
101
|
end
|
@@ -20,7 +20,11 @@ module Cnvrg
|
|
20
20
|
end
|
21
21
|
|
22
22
|
def upload(storage_path, local_path)
|
23
|
-
|
23
|
+
begin
|
24
|
+
client.create_block_blob(@container, storage_path, File.open(local_path, "rb"))
|
25
|
+
rescue => e
|
26
|
+
raise e
|
27
|
+
end
|
24
28
|
end
|
25
29
|
|
26
30
|
def fetch_files(prefix: nil, marker: nil, limit: 10000)
|
@@ -11,7 +11,7 @@ module Cnvrg
|
|
11
11
|
@tempfile = nil
|
12
12
|
@bucket_name = Cnvrg::Helpers.decrypt(@key, @iv, bucket_name)
|
13
13
|
init_gcp_credentials
|
14
|
-
@storage = Google::Cloud::Storage.new(project_id: @project_id, credentials: @credentials)
|
14
|
+
@storage = Google::Cloud::Storage.new(project_id: @project_id, credentials: @credentials, retries: 20)
|
15
15
|
@bucket = @storage.bucket(@bucket_name)
|
16
16
|
@bucket.name
|
17
17
|
rescue => e
|
@@ -38,7 +38,11 @@ module Cnvrg
|
|
38
38
|
end
|
39
39
|
|
40
40
|
def upload(storage_path, local_path)
|
41
|
-
|
41
|
+
begin
|
42
|
+
@bucket.create_file(local_path, storage_path)
|
43
|
+
rescue => e
|
44
|
+
raise e
|
45
|
+
end
|
42
46
|
end
|
43
47
|
end
|
44
48
|
end
|
@@ -53,7 +53,10 @@ module Cnvrg
|
|
53
53
|
### storage path is the path inside s3 (after the bucket)
|
54
54
|
# local path is fullpath for the file /home/ubuntu/user.../hazilim.py
|
55
55
|
o = aws_bucket.object(storage_path)
|
56
|
-
o.upload_file(local_path, @upload_options)
|
56
|
+
success = o.upload_file(local_path, @upload_options)
|
57
|
+
return success
|
58
|
+
rescue => e
|
59
|
+
raise e
|
57
60
|
end
|
58
61
|
|
59
62
|
def fetch_files(prefix: nil, marker: nil, limit: 1000)
|
data/lib/cnvrg/files.rb
CHANGED
@@ -6,6 +6,7 @@ require 'net/http'
|
|
6
6
|
require 'cnvrg/result'
|
7
7
|
module Cnvrg
|
8
8
|
class Files
|
9
|
+
ParallelThreads = Cnvrg::Helpers.parallel_threads
|
9
10
|
VALID_FILE_NAME = /[\x00\\:\*\?\"<>\|]/
|
10
11
|
LARGE_FILE=1024*1024*5
|
11
12
|
MULTIPART_SPLIT=10000000
|
@@ -85,6 +86,7 @@ module Cnvrg
|
|
85
86
|
Cnvrg::Logger.log_info("Upload files to older server..")
|
86
87
|
return self.upload_files_old(files_list, commit_sha1, progress: progress)
|
87
88
|
end
|
89
|
+
|
88
90
|
files_list = files_list.map{|x| [x,self.parse_file(x)]}.to_h
|
89
91
|
resp = Cnvrg::API.request(@base_resource + "upload_files", 'POST', {files: files_list, commit: commit_sha1})
|
90
92
|
unless Cnvrg::CLI.is_response_success(resp, false)
|
@@ -95,10 +97,11 @@ module Cnvrg
|
|
95
97
|
files = res['files']
|
96
98
|
|
97
99
|
#upload files
|
98
|
-
blob_ids = Parallel.map(files.keys,
|
100
|
+
blob_ids = Parallel.map(files.keys, in_threads: ParallelThreads) do |file|
|
99
101
|
begin
|
100
102
|
Cnvrg::Helpers.try_until_success{self.upload_single_file(files[file].merge(files_list[file]))}
|
101
103
|
rescue => e
|
104
|
+
|
102
105
|
Cnvrg::CLI.log_message("Failed to upload #{file}: #{e.message}", 'red')
|
103
106
|
Cnvrg::Logger.log_error(e)
|
104
107
|
Cnvrg::Logger.log_method(bind: binding)
|
@@ -877,6 +880,7 @@ module Cnvrg
|
|
877
880
|
end
|
878
881
|
|
879
882
|
end
|
883
|
+
|
880
884
|
def delete_commit_files_local(deleted)
|
881
885
|
begin
|
882
886
|
FileUtils.rm_rf(deleted) unless (deleted.nil? or deleted.empty?)
|
@@ -884,16 +888,14 @@ module Cnvrg
|
|
884
888
|
rescue => e
|
885
889
|
return false
|
886
890
|
end
|
891
|
+
end
|
887
892
|
|
888
|
-
|
889
|
-
|
893
|
+
def start_commit(new_branch,force:false, exp_start_commit:nil, job_slug: nil, job_type: nil, start_commit: nil, message: nil)
|
894
|
+
response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {project_slug: @project_slug, new_branch: new_branch,force:force,
|
895
|
+
username: @owner, exp_start_commit:exp_start_commit, job_slug: job_slug, job_type: job_type, start_commit: start_commit, message: message})
|
896
|
+
Cnvrg::CLI.is_response_success(response,false)
|
897
|
+
return response
|
890
898
|
end
|
891
|
-
def start_commit(new_branch,force:false, exp_start_commit:nil, job_slug: nil, job_type: nil, start_commit: nil, message: nil)
|
892
|
-
response = Cnvrg::API.request("#{base_resource}/commit/start", 'POST', {project_slug: @project_slug, new_branch: new_branch,force:force,
|
893
|
-
username: @owner, exp_start_commit:exp_start_commit, job_slug: job_slug, job_type: job_type, start_commit: start_commit, message: message})
|
894
|
-
Cnvrg::CLI.is_response_success(response,false)
|
895
|
-
return response
|
896
|
-
end
|
897
899
|
|
898
900
|
def end_commit(commit_sha1,force:false,message:"")
|
899
901
|
response = Cnvrg::API.request("#{base_resource}/commit/end", 'POST', {commit_sha1: commit_sha1,force:force,message:message})
|
data/lib/cnvrg/project.rb
CHANGED
@@ -329,10 +329,13 @@ module Cnvrg
|
|
329
329
|
def get_storage_client
|
330
330
|
response = Cnvrg::API.request("users/#{@owner}/projects/#{@slug}/client", 'GET')
|
331
331
|
if Cnvrg::CLI.is_response_success(response, false)
|
332
|
+
|
332
333
|
client_params = response['client']
|
333
334
|
else
|
335
|
+
|
334
336
|
client_params = get_storage_client_fallback
|
335
337
|
end
|
338
|
+
|
336
339
|
Cnvrg::Downloader::Client.factory(client_params)
|
337
340
|
end
|
338
341
|
|
@@ -429,13 +432,9 @@ module Cnvrg
|
|
429
432
|
end
|
430
433
|
list_ignore_new = list_ignore.map{|x| x.gsub("//","/")} rescue []
|
431
434
|
# list.each do |e|
|
432
|
-
project_root = Pathname.new(self.local_path)
|
433
435
|
Parallel.map(list, in_threads: IDXParallelThreads) do |e|
|
434
|
-
|
435
|
-
|
436
|
-
if not Cnvrg::Files.valid_file_name?(label)
|
437
|
-
raise StandardError.new("#{label} is not a valid file name")
|
438
|
-
end
|
436
|
+
label = e.sub(self.local_path + "/", "")
|
437
|
+
|
439
438
|
if list_ignore_new.include? label
|
440
439
|
next
|
441
440
|
end
|
data/lib/cnvrg/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: cnvrg
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.6.
|
4
|
+
version: 1.6.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Yochay Ettun
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date: 2020-
|
13
|
+
date: 2020-03-31 00:00:00.000000000 Z
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
16
16
|
name: bundler
|
@@ -210,14 +210,14 @@ dependencies:
|
|
210
210
|
requirements:
|
211
211
|
- - "~>"
|
212
212
|
- !ruby/object:Gem::Version
|
213
|
-
version:
|
213
|
+
version: 2.11.417
|
214
214
|
type: :runtime
|
215
215
|
prerelease: false
|
216
216
|
version_requirements: !ruby/object:Gem::Requirement
|
217
217
|
requirements:
|
218
218
|
- - "~>"
|
219
219
|
- !ruby/object:Gem::Version
|
220
|
-
version:
|
220
|
+
version: 2.11.417
|
221
221
|
- !ruby/object:Gem::Dependency
|
222
222
|
name: signet
|
223
223
|
requirement: !ruby/object:Gem::Requirement
|