td-client 0.8.85 → 0.9.0dev1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. checksums.yaml +4 -4
  2. data/lib/td/client.rb +8 -16
  3. data/lib/td/client/api.rb +46 -62
  4. data/lib/td/client/api/bulk_import.rb +2 -1
  5. data/lib/td/client/api/bulk_load.rb +3 -3
  6. data/lib/td/client/api/export.rb +0 -12
  7. data/lib/td/client/api/import.rb +2 -3
  8. data/lib/td/client/api/job.rb +71 -145
  9. data/lib/td/client/api/schedule.rb +1 -1
  10. data/lib/td/client/api_error.rb +0 -5
  11. data/lib/td/client/model.rb +28 -91
  12. data/lib/td/client/version.rb +1 -1
  13. data/spec/spec_helper.rb +5 -5
  14. data/spec/td/client/account_api_spec.rb +5 -5
  15. data/spec/td/client/api_spec.rb +51 -69
  16. data/spec/td/client/api_ssl_connection_spec.rb +1 -1
  17. data/spec/td/client/bulk_import_spec.rb +29 -28
  18. data/spec/td/client/bulk_load_spec.rb +35 -60
  19. data/spec/td/client/db_api_spec.rb +1 -1
  20. data/spec/td/client/export_api_spec.rb +1 -11
  21. data/spec/td/client/import_api_spec.rb +10 -85
  22. data/spec/td/client/job_api_spec.rb +61 -567
  23. data/spec/td/client/model_job_spec.rb +10 -27
  24. data/spec/td/client/model_schedule_spec.rb +2 -2
  25. data/spec/td/client/partial_delete_api_spec.rb +1 -1
  26. data/spec/td/client/result_api_spec.rb +3 -3
  27. data/spec/td/client/sched_api_spec.rb +4 -12
  28. data/spec/td/client/server_status_api_spec.rb +2 -2
  29. data/spec/td/client/spec_resources.rb +0 -1
  30. data/spec/td/client/table_api_spec.rb +14 -14
  31. data/spec/td/client/user_api_spec.rb +12 -12
  32. data/spec/td/client_sched_spec.rb +6 -31
  33. data/spec/td/client_spec.rb +0 -1
  34. metadata +97 -42
  35. data/spec/td/client/api_error_spec.rb +0 -77
  36. data/spec/td/client/model_schema_spec.rb +0 -134
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 60a54746395659411ee0e31937d4456acf1161db
4
- data.tar.gz: 86d0d880f287ef9ee6919fdfac91ad1211d887fa
3
+ metadata.gz: fbabfd3204808160bd8fd4f5a7cd3c5c48eb1b11
4
+ data.tar.gz: bd585ffc36231bc199d78b36f0e67a8205d6939a
5
5
  SHA512:
6
- metadata.gz: 177e498d108552a2b549755455a34e6ac811cf8f06510cf2cb612b53f33ac13876c524fb8390f84459efb52ec00b2316ed8b0fb8a77daf2b11c56674e47bc318
7
- data.tar.gz: fd06041a13ded4aeddb1514ce10f0225140d72a70a4dafa2305c77935415b59230d9c31b54378ab5ed04217b1cca4f06a19d1147f54e6c476209ab490adf816a
6
+ metadata.gz: d88e942c5fa4cdb4aa793419d6213ef80b048449e9383eef3cced73e30dc4f8b0a4f8ba39e2ea13ad5ce4fafb462b13f5e48acd213c87e3e9cf52b02a3f9470c
7
+ data.tar.gz: 356cfac4a8a32e3c574b0f5928ad80bf75e040952fc438c632b7ef3e3e7310950253103870c62277b75632299febb670cf3baaade3aecb67f45123010587d13c
@@ -182,10 +182,10 @@ class Client
182
182
  results = @api.list_jobs(from, to, status, conditions)
183
183
  results.map {|job_id, type, status, query, start_at, end_at, cpu_time,
184
184
  result_size, result_url, priority, retry_limit, org, db,
185
- duration, num_records|
185
+ duration|
186
186
  Job.new(self, job_id, type, query, status, nil, nil, start_at, end_at, cpu_time,
187
187
  result_size, nil, result_url, nil, priority, retry_limit, org, db,
188
- duration, num_records)
188
+ duration)
189
189
  }
190
190
  end
191
191
 
@@ -194,9 +194,9 @@ class Client
194
194
  def job(job_id)
195
195
  job_id = job_id.to_s
196
196
  type, query, status, url, debug, start_at, end_at, cpu_time,
197
- result_size, result_url, hive_result_schema, priority, retry_limit, org, db, num_records = @api.show_job(job_id)
197
+ result_size, result_url, hive_result_schema, priority, retry_limit, org, db = @api.show_job(job_id)
198
198
  Job.new(self, job_id, type, query, status, url, debug, start_at, end_at, cpu_time,
199
- result_size, nil, result_url, hive_result_schema, priority, retry_limit, org, db, num_records)
199
+ result_size, nil, result_url, hive_result_schema, priority, retry_limit, org, db)
200
200
  end
201
201
 
202
202
  # @param [String] job_id
@@ -254,14 +254,6 @@ class Client
254
254
  Job.new(self, job_id, :export, nil)
255
255
  end
256
256
 
257
- # @param [String] target_job_id
258
- # @param [Hash] opts
259
- # @return [Job]
260
- def result_export(target_job_id, opts={})
261
- job_id = @api.result_export(target_job_id, opts)
262
- Job.new(self, job_id, :result_export, nil)
263
- end
264
-
265
257
  # @param [String] db_name
266
258
  # @param [String] table_name
267
259
  # @param [Fixnum] to
@@ -364,7 +356,7 @@ class Client
364
356
  raise ArgumentError, "'cron' option is required" unless opts[:cron] || opts['cron']
365
357
  raise ArgumentError, "'query' option is required" unless opts[:query] || opts['query']
366
358
  start = @api.create_schedule(name, opts)
367
- return start && Time.parse(start)
359
+ return Time.parse(start)
368
360
  end
369
361
 
370
362
  # @param [String] name
@@ -582,9 +574,9 @@ class Client
582
574
  @api.bulk_load_show(name)
583
575
  end
584
576
 
585
- # name: String, settings: Hash -> BulkLoad
586
- def bulk_load_update(name, settings)
587
- @api.bulk_load_update(name, settings)
577
+ # name: String, job: BulkLoad -> BulkLoad
578
+ def bulk_load_update(name, job)
579
+ @api.bulk_load_update(name, job)
588
580
  end
589
581
 
590
582
  # name: String -> BulkLoad
@@ -36,19 +36,16 @@ class API
36
36
  include API::Table
37
37
  include API::User
38
38
 
39
- DEFAULT_ENDPOINT = 'api.treasuredata.com'
40
- DEFAULT_IMPORT_ENDPOINT = 'api-import.treasuredata.com'
39
+ DEFAULT_ENDPOINT = 'api.treasure-data.com'
40
+ DEFAULT_IMPORT_ENDPOINT = 'api-import.treasure-data.com'
41
41
 
42
- # Deprecated. Use DEFAULT_ENDPOINT and DEFAULT_IMPORT_ENDPOINT instead
43
- NEW_DEFAULT_ENDPOINT = DEFAULT_ENDPOINT
44
- NEW_DEFAULT_IMPORT_ENDPOINT = DEFAULT_IMPORT_ENDPOINT
45
- OLD_ENDPOINT = 'api.treasure-data.com'
42
+ NEW_DEFAULT_ENDPOINT = 'api.treasuredata.com'
43
+ NEW_DEFAULT_IMPORT_ENDPOINT = 'api-import.treasuredata.com'
46
44
 
47
45
  class IncompleteError < APIError; end
48
46
 
49
47
  # @param [String] apikey
50
48
  # @param [Hash] opts
51
- # for backward compatibility
52
49
  def initialize(apikey, opts={})
53
50
  require 'json'
54
51
  require 'time'
@@ -95,18 +92,12 @@ class API
95
92
  # generic URI
96
93
  @host, @port = endpoint.split(':', 2)
97
94
  @port = @port.to_i
98
- if opts[:ssl] === false || @host == TreasureData::API::OLD_ENDPOINT
99
- # for backward compatibility, old endpoint specified without ssl option, use http
100
- #
101
- # opts[:ssl] would be nil if user doesn't specify ssl options,
102
- # but connecting to https is the new default behavior (since 0.9)
103
- # so check ssl option by `if opts[:ssl] === false` instead of `if opts[:ssl]`
104
- # that means if user desire to use http, give `:ssl => false` for initializer such as API.new("APIKEY", :ssl => false)
105
- @port = 80 if @port == 0
106
- @ssl = false
107
- else
95
+ if opts[:ssl]
108
96
  @port = 443 if @port == 0
109
97
  @ssl = true
98
+ else
99
+ @port = 80 if @port == 0
100
+ @ssl = false
110
101
  end
111
102
  @base_path = ''
112
103
  end
@@ -149,27 +140,15 @@ class API
149
140
  end
150
141
 
151
142
  name = name.to_s
152
- if max_len
153
- if name.length < min_len || name.length > max_len
154
- raise ParameterValidationError,
155
- "#{target.capitalize} name must be between #{min_len} and #{max_len} characters long. Got #{name.length} " +
156
- (name.length == 1 ? "character" : "characters") + "."
157
- end
158
- else
159
- if min_len == 1
160
- if name.empty?
161
- raise ParameterValidationError,
143
+ if name.empty?
144
+ raise ParameterValidationError,
162
145
  "Empty #{target} name is not allowed"
163
- end
164
- else
165
- if name.length < min_len
166
- raise ParameterValidationError,
167
- "#{target.capitalize} name must be longer than #{min_len} characters. Got #{name.length} " +
146
+ end
147
+ if name.length < min_len || name.length > max_len
148
+ raise ParameterValidationError,
149
+ "#{target.capitalize} name must be between #{min_len} and #{max_len} characters long. Got #{name.length} " +
168
150
  (name.length == 1 ? "character" : "characters") + "."
169
- end
170
- end
171
151
  end
172
-
173
152
  unless name =~ /^([a-z0-9_]+)$/
174
153
  raise ParameterValidationError,
175
154
  "#{target.capitalize} name must only consist of lower-case alpha-numeric characters and '_'."
@@ -195,18 +174,7 @@ class API
195
174
 
196
175
  # @param [String] name
197
176
  def self.validate_column_name(name)
198
- target = 'column'
199
- name = name.to_s
200
- if name.empty?
201
- raise ParameterValidationError,
202
- "Empty #{target} name is not allowed"
203
- end
204
- name
205
- end
206
-
207
- # @param [String] name
208
- def self.validate_sql_alias_name(name)
209
- validate_name("sql_alias", 1, nil, name)
177
+ validate_name("column", 1, 255, name)
210
178
  end
211
179
 
212
180
  # @param [String] name
@@ -231,6 +199,25 @@ class API
231
199
  normalize_database_name(name)
232
200
  end
233
201
 
202
+ # TODO support array types
203
+ # @param [String] name
204
+ def self.normalize_type_name(name)
205
+ case name
206
+ when /int/i, /integer/i
207
+ "int"
208
+ when /long/i, /bigint/i
209
+ "long"
210
+ when /string/i
211
+ "string"
212
+ when /float/i
213
+ "float"
214
+ when /double/i
215
+ "double"
216
+ else
217
+ raise "Type name must either of int, long, string float or double"
218
+ end
219
+ end
220
+
234
221
  # for fluent-plugin-td / td command to check table existence with import onlt user
235
222
  # @return [String]
236
223
  def self.create_empty_gz_data
@@ -254,7 +241,6 @@ private
254
241
  do_get(url, params, &block)
255
242
  end
256
243
  end
257
-
258
244
  # @param [String] url
259
245
  # @param [Hash] params
260
246
  # @yield [response]
@@ -309,7 +295,7 @@ private
309
295
  retry_delay *= 2
310
296
  redo # restart from beginning of do-while loop
311
297
  end
312
- rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Timeout::Error, EOFError, OpenSSL::SSL::SSLError, SocketError, IncompleteError, HTTPClient::TimeoutError => e
298
+ rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Timeout::Error, EOFError, OpenSSL::SSL::SSLError, SocketError, IncompleteError => e
313
299
  if block_given?
314
300
  raise e
315
301
  end
@@ -605,15 +591,18 @@ private
605
591
  end
606
592
 
607
593
  def parse_error_response(res)
594
+ error = {}
595
+
608
596
  begin
609
- error = JSON.load(res.body)
610
- if error
611
- error['message'] = error['error'] unless error['message']
597
+ js = JSON.load(res.body)
598
+ if js.nil?
599
+ error['message'] = res.reason
612
600
  else
613
- error = {'message' => res.reason}
601
+ error['message'] = js['message'] || js['error']
602
+ error['stacktrace'] = js['stacktrace']
614
603
  end
615
604
  rescue JSON::ParserError
616
- error = {'message' => res.body}
605
+ error['message'] = res.body
617
606
  end
618
607
 
619
608
  error
@@ -635,7 +624,6 @@ private
635
624
  when "404"
636
625
  NotFoundError
637
626
  when "409"
638
- message = "#{message}: conflicts_with job:#{error["details"]["conflicts_with"]}" if error["details"] && error["details"]["conflicts_with"]
639
627
  AlreadyExistsError
640
628
  when "401"
641
629
  AuthError
@@ -647,15 +635,11 @@ private
647
635
  end
648
636
  end
649
637
 
650
- exc = nil
651
- if error_class.method_defined?(:conflicts_with) && error["details"] && error["details"]["conflicts_with"]
652
- exc = error_class.new(message, error['stacktrace'], error["details"]["conflicts_with"])
653
- elsif error_class.method_defined?(:api_backtrace)
654
- exc = error_class.new(message, error['stacktrace'])
638
+ if error_class.method_defined?(:api_backtrace)
639
+ raise error_class.new(message, error['stacktrace'])
655
640
  else
656
- exc = error_class.new(message)
641
+ raise error_class, message
657
642
  end
658
- raise exc
659
643
  end
660
644
 
661
645
  if ''.respond_to?(:encode)
@@ -161,7 +161,8 @@ module BulkImport
161
161
  end
162
162
  end
163
163
  require File.expand_path('../compat_gzip_reader', File.dirname(__FILE__))
164
- u = MessagePack::Unpacker.new(Zlib::GzipReader.new(StringIO.new(body)))
164
+ io = StringIO.new(Zlib::GzipReader.new(StringIO.new(body)).read)
165
+ u = MessagePack::Unpacker.new(io)
165
166
  if block
166
167
  begin
167
168
  u.each(&block)
@@ -116,10 +116,10 @@ module BulkLoad
116
116
  JSON.load(res.body)
117
117
  end
118
118
 
119
- # name: String, settings: Hash -> Hash
120
- def bulk_load_update(name, settings)
119
+ # name: String, job: Hash -> Hash
120
+ def bulk_load_update(name, job)
121
121
  path = session_path(name)
122
- res = api { put(path, settings.to_json) }
122
+ res = api { put(path, job.to_json) }
123
123
  unless res.ok?
124
124
  raise_error("BulkLoadSession: #{name} update failed", res)
125
125
  end
@@ -22,17 +22,5 @@ module Export
22
22
  return js['job_id'].to_s
23
23
  end
24
24
 
25
- # => jobId:String
26
- # @param [String] target_job_id
27
- # @param [Hash] opts
28
- # @return [String] job_id
29
- def result_export(target_job_id, opts={})
30
- code, body, res = post("/v3/job/result_export/#{target_job_id}", opts)
31
- if code != "200"
32
- raise_error("Result Export failed", res)
33
- end
34
- js = checked_json(body, %w[job_id])
35
- return js['job_id'].to_s
36
- end
37
25
  end
38
26
  end
@@ -21,9 +21,8 @@ module Import
21
21
  opts = {}
22
22
  if @host == DEFAULT_ENDPOINT
23
23
  opts[:host] = DEFAULT_IMPORT_ENDPOINT
24
- elsif @host == TreasureData::API::OLD_ENDPOINT # backward compatibility
25
- opts[:host] = 'api-import.treasure-data.com'
26
- opts[:ssl] = false
24
+ elsif @host == NEW_DEFAULT_ENDPOINT
25
+ opts[:host] = NEW_DEFAULT_IMPORT_ENDPOINT
27
26
  end
28
27
  code, body, res = put(path, stream, size, opts)
29
28
  if code[0] != ?2
@@ -36,10 +36,9 @@ module Job
36
36
  priority = m['priority']
37
37
  retry_limit = m['retry_limit']
38
38
  duration = m['duration']
39
- num_records = m['num_records']
40
39
  result << [job_id, type, status, query, start_at, end_at, cpu_time,
41
40
  result_size, result_url, priority, retry_limit, nil, database,
42
- duration, num_records]
41
+ duration]
43
42
  }
44
43
  return result
45
44
  end
@@ -64,7 +63,6 @@ module Job
64
63
  end_at = js['end_at']
65
64
  cpu_time = js['cpu_time']
66
65
  result_size = js['result_size'] # compressed result size in msgpack.gz format
67
- num_records = js['num_records']
68
66
  result = js['result'] # result target URL
69
67
  hive_result_schema = (js['hive_result_schema'] || '')
70
68
  if hive_result_schema.empty?
@@ -99,7 +97,7 @@ module Job
99
97
  priority = js['priority']
100
98
  retry_limit = js['retry_limit']
101
99
  return [type, query, status, url, debug, start_at, end_at, cpu_time,
102
- result_size, result, hive_result_schema, priority, retry_limit, nil, database, num_records]
100
+ result_size, result, hive_result_schema, priority, retry_limit, nil, database]
103
101
  end
104
102
 
105
103
  # @param [String] job_id
@@ -117,13 +115,14 @@ module Job
117
115
  # @param [String] job_id
118
116
  # @return [Array]
119
117
  def job_result(job_id)
120
- result = []
121
- unpacker = MessagePack::Unpacker.new
122
- job_result_download(job_id) do |chunk|
123
- unpacker.feed_each(chunk) do |row|
124
- result << row
125
- end
118
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'})
119
+ if code != "200"
120
+ raise_error("Get job result failed", res)
126
121
  end
122
+ result = []
123
+ MessagePack::Unpacker.new.feed_each(body) {|row|
124
+ result << row
125
+ }
127
126
  return result
128
127
  end
129
128
 
@@ -134,17 +133,24 @@ module Job
134
133
  # @param [IO] io
135
134
  # @param [Proc] block
136
135
  # @return [nil, String]
137
- def job_result_format(job_id, format, io=nil)
136
+ def job_result_format(job_id, format, io=nil, &block)
138
137
  if io
139
- job_result_download(job_id, format) do |chunk, total|
140
- io.write chunk
141
- yield total if block_given?
142
- end
138
+ infl = nil
139
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format}) {|res, chunk, current_total_chunk_size|
140
+ if res.code != 200
141
+ raise_error("Get job result failed", res)
142
+ end
143
+
144
+ infl ||= create_inflalte_or_null_inflate(res)
145
+
146
+ io.write infl.inflate(chunk)
147
+ block.call(current_total_chunk_size) if block_given?
148
+ }
143
149
  nil
144
150
  else
145
- body = String.new
146
- job_result_download(job_id, format) do |chunk|
147
- body << chunk
151
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format})
152
+ if code != "200"
153
+ raise_error("Get job result failed", res)
148
154
  end
149
155
  body
150
156
  end
@@ -157,11 +163,22 @@ module Job
157
163
  # @return [nil]
158
164
  def job_result_each(job_id, &block)
159
165
  upkr = MessagePack::Unpacker.new
160
- # default to decompressing the response since format is fixed to 'msgpack'
161
- job_result_download(job_id) do |chunk|
162
- upkr.feed_each(chunk, &block)
163
- end
166
+ infl = nil
167
+
168
+ get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'}) {|res, chunk, current_total_chunk_size|
169
+ if res.code != 200
170
+ raise_error("Get job result failed", res)
171
+ end
172
+
173
+ # default to decompressing the response since format is fixed to 'msgpack'
174
+ infl ||= create_inflate(res)
175
+
176
+ inflated_fragment = infl.inflate(chunk)
177
+ upkr.feed_each(inflated_fragment, &block)
178
+ }
164
179
  nil
180
+ ensure
181
+ infl.close if infl
165
182
  end
166
183
 
167
184
  # block is optional and must accept 1 argument
@@ -169,30 +186,50 @@ module Job
169
186
  # @param [String] job_id
170
187
  # @param [Proc] block
171
188
  # @return [nil]
172
- def job_result_each_with_compr_size(job_id)
189
+ def job_result_each_with_compr_size(job_id, &block)
173
190
  upkr = MessagePack::Unpacker.new
174
- # default to decompressing the response since format is fixed to 'msgpack'
175
- job_result_download(job_id) do |chunk, total|
176
- upkr.feed_each(chunk) {|unpacked|
177
- yield unpacked, total if block_given?
191
+ infl = nil
192
+
193
+ get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'}) {|res, chunk, current_total_chunk_size|
194
+ if res.code != 200
195
+ raise_error("Get job result failed", res)
196
+ end
197
+
198
+ # default to decompressing the response since format is fixed to 'msgpack'
199
+ infl ||= create_inflate(res)
200
+
201
+ inflated_fragment = infl.inflate(chunk)
202
+ upkr.feed_each(inflated_fragment) {|unpacked|
203
+ block.call(unpacked, current_total_chunk_size) if block_given?
178
204
  }
179
- end
205
+ }
180
206
  nil
207
+ ensure
208
+ infl.close if infl
181
209
  end
182
210
 
183
211
  # @param [String] job_id
184
212
  # @param [String] format
185
213
  # @return [String]
186
- def job_result_raw(job_id, format, io = nil)
187
- body = io ? nil : String.new
188
- job_result_download(job_id, format, false) do |chunk, total|
214
+ def job_result_raw(job_id, format, io = nil, &block)
215
+ body = nil
216
+
217
+ get("/v3/job/result/#{e job_id}", {'format'=>format}) {|res, chunk, current_total_chunk_size|
218
+ if res.code != 200
219
+ raise_error("Get job result failed", res)
220
+ end
221
+
189
222
  if io
190
223
  io.write(chunk)
191
- yield total if block_given?
224
+ block.call(current_total_chunk_size) if block_given?
192
225
  else
193
- body << chunk
226
+ if body
227
+ body += chunk
228
+ else
229
+ body = chunk
230
+ end
194
231
  end
195
- end
232
+ }
196
233
  body
197
234
  end
198
235
 
@@ -250,117 +287,6 @@ module Job
250
287
 
251
288
  private
252
289
 
253
- def validate_content_length_with_range(response, current_total_chunk_size)
254
- if expected_size = response.header['Content-Range'][0]
255
- expected_size = expected_size[/\d+$/].to_i
256
- elsif expected_size = response.header['Content-Length'][0]
257
- expected_size = expected_size.to_i
258
- end
259
-
260
- if expected_size.nil?
261
- elsif current_total_chunk_size < expected_size
262
- # too small
263
- # NOTE:
264
- # ext/openssl raises EOFError in case where underlying connection
265
- # causes an error, but httpclient ignores it.
266
- # https://github.com/nahi/httpclient/blob/v3.2.8/lib/httpclient/session.rb#L1003
267
- raise EOFError, 'httpclient IncompleteError'
268
- elsif current_total_chunk_size > expected_size
269
- # too large
270
- raise_error("Get job result failed", response)
271
- end
272
- end
273
-
274
- def job_result_download(job_id, format='msgpack', autodecode=true)
275
- client, header = new_client
276
- client.send_timeout = @send_timeout
277
- client.receive_timeout = @read_timeout
278
- header['Accept-Encoding'] = 'deflate, gzip'
279
-
280
- url = build_endpoint("/v3/job/result/#{e job_id}", @host)
281
- params = {'format' => format}
282
-
283
- unless ENV['TD_CLIENT_DEBUG'].nil?
284
- puts "DEBUG: REST GET call:"
285
- puts "DEBUG: header: " + header.to_s
286
- puts "DEBUG: url: " + url.to_s
287
- puts "DEBUG: params: " + params.to_s
288
- end
289
-
290
- # up to 7 retries with exponential (base 2) back-off starting at 'retry_delay'
291
- retry_delay = @retry_delay
292
- cumul_retry_delay = 0
293
- current_total_chunk_size = 0
294
- infl = nil
295
- begin # LOOP of Network/Server errors
296
- response = nil
297
- client.get(url, params, header) do |res, chunk|
298
- unless response
299
- case res.status
300
- when 200
301
- if current_total_chunk_size != 0
302
- # try to resume but the server returns 200
303
- raise_error("Get job result failed", res)
304
- end
305
- when 206 # resuming
306
- else
307
- if res.status/100 == 5 && cumul_retry_delay < @max_cumul_retry_delay
308
- $stderr.puts "Error #{res.status}: #{get_error(res)}. Retrying after #{retry_delay} seconds..."
309
- sleep retry_delay
310
- cumul_retry_delay += retry_delay
311
- retry_delay *= 2
312
- redo
313
- end
314
- raise_error("Get job result failed", res)
315
- end
316
- if infl.nil? && autodecode
317
- case res.header['Content-Encoding'][0].to_s.downcase
318
- when 'gzip'
319
- infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
320
- when 'deflate'
321
- infl = Zlib::Inflate.new
322
- end
323
- end
324
- end
325
- response = res
326
- current_total_chunk_size += chunk.bytesize
327
- chunk = infl.inflate(chunk) if infl
328
- yield chunk, current_total_chunk_size
329
- end
330
-
331
- # completed?
332
- validate_content_length_with_range(response, current_total_chunk_size)
333
- rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Timeout::Error, EOFError, OpenSSL::SSL::SSLError, SocketError => e
334
- if response # at least a chunk is downloaded
335
- if etag = response.header['ETag'][0]
336
- header['If-Range'] = etag
337
- header['Range'] = "bytes=#{current_total_chunk_size}-"
338
- end
339
- end
340
-
341
- $stderr.print "#{e.class}: #{e.message}. "
342
- if cumul_retry_delay < @max_cumul_retry_delay
343
- $stderr.puts "Retrying after #{retry_delay} seconds..."
344
- sleep retry_delay
345
- cumul_retry_delay += retry_delay
346
- retry_delay *= 2
347
- retry
348
- end
349
- raise
350
- end
351
-
352
- unless ENV['TD_CLIENT_DEBUG'].nil?
353
- puts "DEBUG: REST GET response:"
354
- puts "DEBUG: header: " + response.header.to_s
355
- puts "DEBUG: status: " + response.code.to_s
356
- puts "DEBUG: body: " + response.body.to_s
357
- end
358
-
359
- nil
360
- ensure
361
- infl.close if infl
362
- end
363
-
364
290
  class NullInflate
365
291
  def inflate(chunk)
366
292
  chunk