hive-ruby 1.0.0.pre.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Rakefile +141 -11
- data/hive-ruby.gemspec +3 -3
- data/lib/hive/api.rb +15 -2
- data/lib/hive/base_error.rb +10 -3
- data/lib/hive/block_api.rb +56 -10
- data/lib/hive/broadcast.rb +27 -23
- data/lib/hive/fallback.rb +2 -4
- data/lib/hive/jsonrpc.rb +1 -1
- data/lib/hive/marshal.rb +3 -3
- data/lib/hive/operation.rb +14 -5
- data/lib/hive/operation/claim_reward_balance.rb +2 -2
- data/lib/hive/operation/comment_options.rb +1 -1
- data/lib/hive/operation/escrow_release.rb +2 -2
- data/lib/hive/operation/escrow_transfer.rb +2 -2
- data/lib/hive/rpc/http_client.rb +7 -5
- data/lib/hive/stream.rb +125 -18
- data/lib/hive/version.rb +1 -1
- metadata +14 -20
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8bf5d72142c42463a6146346f0bdd4e9943da776894b26f19a6bf3bac7416670
|
4
|
+
data.tar.gz: a06c05f424bf354b792fc3a49dde1a064c1bd974a922c4a01ec9ef6281fc3611
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: fff041bd517d2101d0d84b521d17880b0bb7d641751bfa5a1c22529fd3e451290f28556830726ca02f38a40fa39e521bf55f852885374d93842a3c4f6bd1b7dc
|
7
|
+
data.tar.gz: a6d4e6be972a57411d6c51894e5ba12c610496e06f4889f1e5c72493044c8fbcb1d5438b51d329209d66ce38fb88c165a6994dfa80c7f2ec2b6900f33763513a
|
data/Rakefile
CHANGED
@@ -112,20 +112,20 @@ namespace :test do
|
|
112
112
|
[k, v] if keys.include? k.to_sym
|
113
113
|
end.compact.to_h
|
114
114
|
|
115
|
-
|
116
|
-
base =
|
115
|
+
hbd_exchange_rate = witness[:hbd_exchange_rate] || witness[:hbd_exchange_rate]
|
116
|
+
base = hbd_exchange_rate[:base].to_f
|
117
117
|
|
118
|
-
if (quote =
|
118
|
+
if (quote = hbd_exchange_rate[:quote].to_f) > 0
|
119
119
|
rate = (base / quote).round(3)
|
120
|
-
witnesses[witness.owner][:
|
120
|
+
witnesses[witness.owner][:hbd_exchange_rate] = rate
|
121
121
|
else
|
122
|
-
witnesses[witness.owner][:
|
122
|
+
witnesses[witness.owner][:hbd_exchange_rate] = nil
|
123
123
|
end
|
124
124
|
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
witnesses[witness.owner][:
|
125
|
+
last_hbd_exchange_update = witness[:last_hbd_exchange_update] || witness[:last_hbd_exchange_update]
|
126
|
+
last_hbd_exchange_update = Time.parse(last_hbd_exchange_update + 'Z')
|
127
|
+
last_hbd_exchange_elapsed = '%.2f hours ago' % ((Time.now.utc - last_hbd_exchange_update) / 60)
|
128
|
+
witnesses[witness.owner][:last_hbd_exchange_elapsed] = last_hbd_exchange_elapsed
|
129
129
|
end
|
130
130
|
end
|
131
131
|
end
|
@@ -153,9 +153,13 @@ namespace :stream do
|
|
153
153
|
first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
|
154
154
|
stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode)
|
155
155
|
api = Hive::Api.new(url: ENV['TEST_NODE'])
|
156
|
+
block_api = Hive::BlockApi.new(url: ENV['TEST_NODE'])
|
156
157
|
last_block_num = nil
|
157
158
|
last_timestamp = nil
|
158
159
|
range_complete = false
|
160
|
+
round_pool = {}
|
161
|
+
aging_blocks = {}
|
162
|
+
aged_block_interval = 630
|
159
163
|
|
160
164
|
api.get_dynamic_global_properties do |properties|
|
161
165
|
current_block_num = if mode == :head
|
@@ -165,13 +169,14 @@ namespace :stream do
|
|
165
169
|
end
|
166
170
|
|
167
171
|
# First pass replays latest a random number of blocks to test chunking.
|
168
|
-
first_block_num ||= current_block_num - (rand *
|
172
|
+
first_block_num ||= current_block_num - (rand * 2000).to_i
|
169
173
|
|
170
174
|
range = first_block_num..current_block_num
|
171
175
|
puts "Initial block range: #{range.size}"
|
172
176
|
|
173
177
|
stream.blocks(at_block_num: range.first) do |block, block_num|
|
174
178
|
current_timestamp = Time.parse(block.timestamp + 'Z')
|
179
|
+
round_pool[current_timestamp] = {block_num: block_num, block: block}
|
175
180
|
|
176
181
|
if !range_complete && block_num > range.last
|
177
182
|
puts 'Done with initial range.'
|
@@ -188,9 +193,35 @@ namespace :stream do
|
|
188
193
|
exit
|
189
194
|
end
|
190
195
|
|
191
|
-
|
196
|
+
round_pool.each do |k, v|
|
197
|
+
aging_blocks[k] = v if Time.now - k > aged_block_interval
|
198
|
+
end
|
199
|
+
|
200
|
+
round_pool = round_pool.select{|k, v| Time.now - k <= aged_block_interval}.to_h
|
201
|
+
drift = last_timestamp.nil? ? 0 : (current_timestamp - last_timestamp) - Hive::Stream::BLOCK_INTERVAL.to_f
|
202
|
+
|
203
|
+
puts "\t#{block_num} Timestamp: #{current_timestamp}, witness: #{block.witness}, aging blocks: #{aging_blocks.size}, drift: #{drift}"
|
204
|
+
|
192
205
|
last_block_num = block_num
|
193
206
|
last_timestamp = current_timestamp
|
207
|
+
|
208
|
+
if range_complete && aging_blocks.any?
|
209
|
+
aging_block_nums = aging_blocks.map{|k, v| v[:block_num]}
|
210
|
+
wire_block_range = (aging_block_nums.first..aging_block_nums.last)
|
211
|
+
|
212
|
+
block_api.get_block_headers(block_range: wire_block_range) do |wire_header, wire_block_num|
|
213
|
+
wire_timestamp = Time.parse(wire_header.timestamp + 'Z')
|
214
|
+
aging_block = aging_blocks[wire_timestamp][:block]
|
215
|
+
|
216
|
+
if wire_header.previous == aging_block.previous
|
217
|
+
puts "\t\tAged block test #{wire_block_num}: √"
|
218
|
+
aging_blocks.delete(wire_timestamp)
|
219
|
+
else
|
220
|
+
puts "\t\tAged block test #{wire_block_num}: detected block-reorganization (#{wire_header.previous} != #{aging_block.previous})"
|
221
|
+
exit
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
194
225
|
end
|
195
226
|
end
|
196
227
|
end
|
@@ -247,6 +278,8 @@ namespace :stream do
|
|
247
278
|
first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
|
248
279
|
stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode)
|
249
280
|
api = Hive::Api.new(url: ENV['TEST_NODE'])
|
281
|
+
ah_api = Hive::AccountHistoryApi.new(url: ENV['TEST_NODE'])
|
282
|
+
round_vops = {}
|
250
283
|
|
251
284
|
api.get_dynamic_global_properties do |properties|
|
252
285
|
current_block_num = if mode == :head
|
@@ -259,6 +292,31 @@ namespace :stream do
|
|
259
292
|
first_block_num ||= current_block_num - (rand * 200).to_i
|
260
293
|
|
261
294
|
stream.operations(at_block_num: first_block_num, only_virtual: true) do |op, trx_id, block_num|
|
295
|
+
# 126 is about two shuffle rounds (if mode == :head), we need to avoid
|
296
|
+
# the current block_num because we're still in the middle of reading
|
297
|
+
# all of the vops for that block.
|
298
|
+
if round_vops.size > 126 && !round_vops.include?(block_num)
|
299
|
+
ah_api.enum_virtual_ops(block_range_begin: round_vops.keys.min, block_range_end: round_vops.keys.max + 1, include_reversible: true) do |result|
|
300
|
+
round_vops.each do |k, v|
|
301
|
+
later_ops = result.ops.select{|vop| vop.block == k}
|
302
|
+
if (verify_count = later_ops.size) == v.size
|
303
|
+
puts "\t\t#{k} :: streamed vop count was #{v.size} √"
|
304
|
+
else
|
305
|
+
puts "\t\t#{k} :: streamed vop count was #{v.size}, later became #{verify_count}"
|
306
|
+
puts "\t\t\t#{v.map{|op| op.type}.join(', ')}"
|
307
|
+
puts "\t\tLater ops:\n\t\t\t#{later_ops.map{|vop| vop.op.type}.join(', ')}"
|
308
|
+
|
309
|
+
exit
|
310
|
+
end
|
311
|
+
end
|
312
|
+
end
|
313
|
+
|
314
|
+
round_vops = {}
|
315
|
+
end
|
316
|
+
|
317
|
+
round_vops[block_num] ||= []
|
318
|
+
round_vops[block_num] << op
|
319
|
+
|
262
320
|
puts "#{block_num} :: #{trx_id}; op: #{op.type}"
|
263
321
|
end
|
264
322
|
end
|
@@ -286,6 +344,78 @@ namespace :stream do
|
|
286
344
|
end
|
287
345
|
end
|
288
346
|
end
|
347
|
+
|
348
|
+
desc 'Test the ability to stream all operations (including virtual) that match a pattern.'
|
349
|
+
task :op_pattern, [:pattern, :mode, :at_block_num] do |t, args|
|
350
|
+
mode = (args[:mode] || 'irreversible').to_sym
|
351
|
+
first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
|
352
|
+
stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode)
|
353
|
+
api = Hive::Api.new(url: ENV['TEST_NODE'])
|
354
|
+
pattern = /#{args[:pattern]}/i
|
355
|
+
|
356
|
+
api.get_dynamic_global_properties do |properties|
|
357
|
+
current_block_num = if mode == :head
|
358
|
+
properties.head_block_number
|
359
|
+
else
|
360
|
+
properties.last_irreversible_block_num
|
361
|
+
end
|
362
|
+
|
363
|
+
# First pass replays latest a random number of blocks to test chunking.
|
364
|
+
first_block_num ||= current_block_num - (rand * 200).to_i
|
365
|
+
|
366
|
+
stream.operations(at_block_num: first_block_num, include_virtual: true) do |op, trx_id, block_num|
|
367
|
+
next unless op.to_json =~ pattern
|
368
|
+
|
369
|
+
puts "#{block_num} :: #{trx_id}; op: #{op.to_json}"
|
370
|
+
end
|
371
|
+
end
|
372
|
+
end
|
373
|
+
|
374
|
+
desc 'Test the ability to stream all effective_comment_vote_operation operations.'
|
375
|
+
task :effective_comment_vote_operation, [:mode, :at_block_num] do |t, args|
|
376
|
+
mode = (args[:mode] || 'irreversible').to_sym
|
377
|
+
first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
|
378
|
+
stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode, no_warn: true)
|
379
|
+
api = Hive::Api.new(url: ENV['TEST_NODE'])
|
380
|
+
|
381
|
+
api.get_dynamic_global_properties do |properties|
|
382
|
+
current_block_num = if mode == :head
|
383
|
+
properties.head_block_number
|
384
|
+
else
|
385
|
+
properties.last_irreversible_block_num
|
386
|
+
end
|
387
|
+
|
388
|
+
# First pass replays latest a random number of blocks to test chunking.
|
389
|
+
first_block_num ||= current_block_num - (rand * 200).to_i
|
390
|
+
|
391
|
+
stream.operations(at_block_num: first_block_num, include_virtual: true) do |op, trx_id, block_num|
|
392
|
+
next unless op.type == 'effective_comment_vote_operation'
|
393
|
+
pending_payout = Hive::Type::Amount.new(op.value.pending_payout)
|
394
|
+
|
395
|
+
puts "#{block_num} :: #{trx_id}; voter: #{op.value.voter}, author: #{op.value.author}, pending_payout: #{pending_payout}"
|
396
|
+
end
|
397
|
+
end
|
398
|
+
end
|
399
|
+
end
|
400
|
+
|
401
|
+
desc 'List hardforks.'
|
402
|
+
task :hardforks do
|
403
|
+
database_api = Hive::DatabaseApi.new(url: ENV['TEST_NODE'])
|
404
|
+
block_api = Hive::BlockApi.new(url: ENV['TEST_NODE'])
|
405
|
+
ah_api = Hive::AccountHistoryApi.new(url: ENV['TEST_NODE'])
|
406
|
+
last_hf_timestamp = block_api.get_block(block_num: 1) do |result|
|
407
|
+
Time.parse(result.block.timestamp + 'Z')
|
408
|
+
end
|
409
|
+
|
410
|
+
database_api.get_hardfork_properties do |properties|
|
411
|
+
processed_hardforks = properties.processed_hardforks
|
412
|
+
|
413
|
+
processed_hardforks.each_with_index do |timestamp, index|
|
414
|
+
timestamp = Time.parse(timestamp + 'Z')
|
415
|
+
|
416
|
+
puts "HF#{index}: #{timestamp}"
|
417
|
+
end
|
418
|
+
end
|
289
419
|
end
|
290
420
|
|
291
421
|
YARD::Rake::YardocTask.new do |t|
|
data/hive-ruby.gemspec
CHANGED
@@ -17,7 +17,7 @@ Gem::Specification.new do |spec|
|
|
17
17
|
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test)/}) }
|
18
18
|
spec.require_paths = ['lib']
|
19
19
|
|
20
|
-
spec.add_development_dependency 'bundler', '~> 1
|
20
|
+
spec.add_development_dependency 'bundler', '~> 2.1', '>= 2.1.4'
|
21
21
|
spec.add_development_dependency 'rake', '~> 13.0.1', '>= 12.3.0'
|
22
22
|
spec.add_development_dependency 'minitest', '~> 5.14', '>= 5.10.3'
|
23
23
|
spec.add_development_dependency 'minitest-line', '~> 0.6', '>= 0.6.4'
|
@@ -32,8 +32,8 @@ Gem::Specification.new do |spec|
|
|
32
32
|
|
33
33
|
spec.add_dependency 'json', '~> 2.1', '>= 2.1.0'
|
34
34
|
spec.add_dependency 'logging', '~> 2.2', '>= 2.2.0'
|
35
|
-
spec.add_dependency 'hashie', '
|
36
|
-
spec.add_dependency 'bitcoin-ruby', '~> 0.0', '
|
35
|
+
spec.add_dependency 'hashie', '>= 3.5'
|
36
|
+
spec.add_dependency 'bitcoin-ruby', '~> 0.0', '0.0.20'
|
37
37
|
spec.add_dependency 'ffi', '~> 1.9', '>= 1.9.23'
|
38
38
|
spec.add_dependency 'bindata', '~> 2.4', '>= 2.4.4'
|
39
39
|
spec.add_dependency 'base58', '~> 0.2', '>= 0.2.3'
|
data/lib/hive/api.rb
CHANGED
@@ -118,7 +118,9 @@ module Hive
|
|
118
118
|
|
119
119
|
if Jsonrpc::UNLISTED_APIS.include? @api_name
|
120
120
|
@methods ||= {}
|
121
|
-
@methods[@api_name] ||=
|
121
|
+
@methods[@api_name] ||= []
|
122
|
+
@methods[@api_name] += Fallback::API_METHODS[@api_name]
|
123
|
+
@methods[@api_name] = @methods[@api_name].uniq
|
122
124
|
end
|
123
125
|
|
124
126
|
unless !!@methods[@api_name]
|
@@ -191,7 +193,18 @@ module Hive
|
|
191
193
|
# Some argument are optional, but if the arguments passed are greater
|
192
194
|
# than the expected arguments size, we can warn.
|
193
195
|
if args_size > expected_args_size
|
194
|
-
|
196
|
+
if rpc_method_name == 'account_history_api.get_account_history' && expected_args_size == 3 && args_size == 6
|
197
|
+
# TODO Remove this condition if they ever fix this issue:
|
198
|
+
# https://gitlab.syncad.com/hive/hive/-/issues/100
|
199
|
+
elsif rpc_method_name == 'account_history_api.get_ops_in_block' && expected_args_size == 2 && args_size == 3
|
200
|
+
# TODO Remove this condition if they ever fix this issue:
|
201
|
+
# https://gitlab.syncad.com/hive/hive/-/issues/100
|
202
|
+
elsif rpc_method_name == 'account_history_api.enum_virtual_ops' && expected_args_size == 2 && args_size == 3
|
203
|
+
# TODO Remove this condition if they ever fix this issue:
|
204
|
+
# https://gitlab.syncad.com/hive/hive/-/issues/100
|
205
|
+
else
|
206
|
+
@error_pipe.puts "Warning #{rpc_method_name} expects arguments: #{expected_args_size}, got: #{args_size}"
|
207
|
+
end
|
195
208
|
end
|
196
209
|
rescue NoMethodError => e
|
197
210
|
error = Hive::ArgumentError.new("#{rpc_method_name} expects arguments: #{expected_args_size}", e)
|
data/lib/hive/base_error.rb
CHANGED
@@ -19,17 +19,21 @@ module Hive
|
|
19
19
|
end
|
20
20
|
|
21
21
|
if error.message.include? 'Internal Error'
|
22
|
-
raise Hive::
|
22
|
+
raise Hive::RemoteInternalError, error.message, build_backtrace(error)
|
23
23
|
end
|
24
24
|
|
25
25
|
if error.message.include? 'Server error'
|
26
|
-
raise Hive::
|
26
|
+
raise Hive::RemoteServerError, error.message, build_backtrace(error)
|
27
27
|
end
|
28
28
|
|
29
|
-
if error.message.include?
|
29
|
+
if error.message.include?('plugin not enabled') || error.message.include?('Could not find API')
|
30
30
|
raise Hive::PluginNotEnabledError, error.message, build_backtrace(error)
|
31
31
|
end
|
32
32
|
|
33
|
+
if error.message.include? 'Supported by hivemind'
|
34
|
+
raise Hive::MethodNotEnabledError, error.message, build_backtrace(error)
|
35
|
+
end
|
36
|
+
|
33
37
|
if error.message.include? 'argument'
|
34
38
|
raise Hive::ArgumentError, "#{context}: #{error.message}", build_backtrace(error)
|
35
39
|
end
|
@@ -210,9 +214,12 @@ module Hive
|
|
210
214
|
class IncorrectRequestIdError < BaseError; end
|
211
215
|
class IncorrectResponseIdError < BaseError; end
|
212
216
|
class RemoteNodeError < BaseError; end
|
217
|
+
class RemoteInternalError < BaseError; end
|
218
|
+
class RemoteServerError < BaseError; end
|
213
219
|
class UpstreamResponseError < RemoteNodeError; end
|
214
220
|
class RemoteDatabaseLockError < UpstreamResponseError; end
|
215
221
|
class PluginNotEnabledError < UpstreamResponseError; end
|
222
|
+
class MethodNotEnabledError < UpstreamResponseError; end
|
216
223
|
class RequestTimeoutUpstreamResponseError < UpstreamResponseError; end
|
217
224
|
class BadOrMissingUpstreamResponseError < UpstreamResponseError; end
|
218
225
|
class TransactionIndexDisabledError < BaseError; end
|
data/lib/hive/block_api.rb
CHANGED
@@ -6,6 +6,8 @@ module Hive
|
|
6
6
|
# Also see: {https://developers.hive.io/apidefinitions/block-api.html Block API Definitions}
|
7
7
|
class BlockApi < Api
|
8
8
|
MAX_RANGE_SIZE = 50
|
9
|
+
MAX_NO_BATCH_RANGE_SIZE = 200
|
10
|
+
MAX_NO_BATCH_NO_RANGE_SIZE = 1
|
9
11
|
|
10
12
|
def initialize(options = {})
|
11
13
|
self.class.api_name = :block_api
|
@@ -20,24 +22,30 @@ module Hive
|
|
20
22
|
get_block_objects(options.merge(object: :block_header), block)
|
21
23
|
end
|
22
24
|
|
23
|
-
# Uses
|
25
|
+
# Uses get_block_range (or batched requsts) on a range of blocks.
|
24
26
|
#
|
25
27
|
# @param options [Hash] The attributes to get a block range with.
|
26
28
|
# @option options [Range] :block_range starting on one block number and ending on an higher block number.
|
27
|
-
|
29
|
+
# @option options [Boolean] :use_batch use json-rpc batch instead of get_block_range (preferred)
|
30
|
+
def get_blocks(options = {block_range: (0..0), use_batch: false}, &block)
|
28
31
|
get_block_objects(options.merge(object: :block), block)
|
29
32
|
end
|
30
33
|
private
|
31
|
-
def get_block_objects(options = {block_range: (0..0)}, block = nil)
|
34
|
+
def get_block_objects(options = {block_range: (0..0), use_batch: false}, block = nil)
|
32
35
|
object = options[:object]
|
33
|
-
object_method = "get_#{object}".to_sym
|
34
36
|
block_range = options[:block_range] || (0..0)
|
37
|
+
use_batch = !!options[:use_batch]
|
38
|
+
|
39
|
+
object = :block_range if object == :block && !use_batch
|
40
|
+
object_method = "get_#{object}".to_sym
|
35
41
|
|
36
|
-
if (start = block_range.first) < 1
|
42
|
+
if !!block_range && block_range.any? && (start = block_range.first) < 1
|
37
43
|
raise Hive::ArgumentError, "Invalid starting block: #{start}"
|
38
44
|
end
|
39
45
|
|
40
|
-
chunks = if
|
46
|
+
chunks = if object == :block_range
|
47
|
+
block_range.each_slice(MAX_NO_BATCH_RANGE_SIZE)
|
48
|
+
elsif block_range.size > MAX_RANGE_SIZE
|
41
49
|
block_range.each_slice(MAX_RANGE_SIZE)
|
42
50
|
else
|
43
51
|
[block_range]
|
@@ -46,27 +54,65 @@ module Hive
|
|
46
54
|
for sub_range in chunks do
|
47
55
|
request_object = []
|
48
56
|
|
49
|
-
|
50
|
-
|
57
|
+
if !!use_batch
|
58
|
+
for i in sub_range do
|
59
|
+
@rpc_client.put(self.class.api_name, object_method, block_num: i, request_object: request_object)
|
60
|
+
end
|
61
|
+
else
|
62
|
+
case object
|
63
|
+
when :block_header
|
64
|
+
# Must use json-rpc batch for block headers request.
|
65
|
+
for i in sub_range do
|
66
|
+
@rpc_client.put(self.class.api_name, :get_block_header, block_num: i, request_object: request_object)
|
67
|
+
end
|
68
|
+
when :block, :block_range
|
69
|
+
if sub_range.size == 1
|
70
|
+
@rpc_client.put(self.class.api_name, :get_block, block_num: sub_range.first, request_object: request_object)
|
71
|
+
else
|
72
|
+
@rpc_client.put(self.class.api_name, :get_block_range, starting_block_num: sub_range.first, count: sub_range.size, request_object: request_object)
|
73
|
+
end
|
74
|
+
end
|
51
75
|
end
|
52
76
|
|
53
77
|
if !!block
|
54
78
|
index = 0
|
55
79
|
@rpc_client.rpc_batch_execute(api_name: self.class.api_name, request_object: request_object) do |result, error, id|
|
80
|
+
raise Hive::RemoteNodeError, error.to_json if !!error
|
81
|
+
|
56
82
|
block_num = sub_range.to_a[index]
|
57
83
|
index = index + 1
|
58
84
|
|
59
85
|
case object
|
60
86
|
when :block_header
|
61
|
-
block.call(result
|
87
|
+
block.call(result[:header], block_num)
|
62
88
|
else
|
63
|
-
|
89
|
+
if !!use_batch || !!result[:block]
|
90
|
+
block.call(result[:block] || result[object], block_num)
|
91
|
+
else
|
92
|
+
current_block_num = block_num
|
93
|
+
result[:blocks].each do |b|
|
94
|
+
# Now verify that the previous block_num really is the
|
95
|
+
# previous block.
|
96
|
+
|
97
|
+
decoded_previous_block_num = b.previous[0..7].to_i(16)
|
98
|
+
previous_block_num = current_block_num - 1
|
99
|
+
|
100
|
+
unless decoded_previous_block_num == previous_block_num
|
101
|
+
raise Hive::RemoteNodeError, "Wrong block_num. Got #{decoded_previous_block_num}, expected #{previous_block_num}"
|
102
|
+
end
|
103
|
+
|
104
|
+
block.call(b, current_block_num)
|
105
|
+
current_block_num = current_block_num + 1
|
106
|
+
end
|
107
|
+
end
|
64
108
|
end
|
65
109
|
end
|
66
110
|
else
|
67
111
|
blocks = []
|
68
112
|
|
69
113
|
@rpc_client.rpc_batch_execute(api_name: self.class.api_name, request_object: request_object) do |result, error, id|
|
114
|
+
raise Hive::RemoteNodeError, error.to_json if !!error
|
115
|
+
|
70
116
|
blocks << result
|
71
117
|
end
|
72
118
|
end
|
data/lib/hive/broadcast.rb
CHANGED
@@ -137,7 +137,7 @@ module Hive
|
|
137
137
|
# * :parent_permlink (String) (automatic) Parent permlink of the content, defaults to first tag.
|
138
138
|
# * :parent_author (String) (optional) Parent author of the content (only used if reply).
|
139
139
|
# * :max_accepted_payout (String) (1000000.000 HBD) Maximum accepted payout, set to '0.000 HBD' to deline payout
|
140
|
-
# * :
|
140
|
+
# * :percent_hbd (Numeric) (5000) Percent HIVE Dollars is used to set 50/50 or 100% HIVE Power
|
141
141
|
# * :allow_votes (Numeric) (true) Allow votes for this content.
|
142
142
|
# * :allow_curation_rewards (Numeric) (true) Allow curation rewards for this content.
|
143
143
|
# * :beneficiaries (Array<Hash>) Sets the beneficiaries of this content.
|
@@ -196,7 +196,7 @@ module Hive
|
|
196
196
|
author: params[:author],
|
197
197
|
permlink: params[:permlink],
|
198
198
|
max_accepted_payout: max_accepted_payout,
|
199
|
-
|
199
|
+
percent_hbd: params[:percent_hbd] || 10000,
|
200
200
|
# allow_replies: allow_replies,
|
201
201
|
allow_votes: allow_votes,
|
202
202
|
allow_curation_rewards: allow_curation_rewards,
|
@@ -646,7 +646,7 @@ module Hive
|
|
646
646
|
# props: {
|
647
647
|
# account_creation_fee: '0.000 HIVE',
|
648
648
|
# maximum_block_size: 131072,
|
649
|
-
#
|
649
|
+
# hbd_interest_rate:1000
|
650
650
|
# },
|
651
651
|
# fee: '0.000 HIVE',
|
652
652
|
# }
|
@@ -688,10 +688,10 @@ module Hive
|
|
688
688
|
# props: {
|
689
689
|
# account_creation_fee: '0.000 HIVE',
|
690
690
|
# maximum_block_size: 131072,
|
691
|
-
#
|
691
|
+
# hbd_interest_rate: 1000,
|
692
692
|
# account_subsidy_budget: 50000,
|
693
693
|
# account_subsidy_decay: 330782,
|
694
|
-
#
|
694
|
+
# hbd_exchange_rate: '1.000 HIVE',
|
695
695
|
# url: "https://hive.blog",
|
696
696
|
# new_signing_key: 'STM8LoQjQqJHvotqBo7HjnqmUbFW9oJ2theyqonzUd9DdJ7YYHsvD'
|
697
697
|
# }
|
@@ -719,15 +719,19 @@ module Hive
|
|
719
719
|
props[:account_creation_fee] = hexlify normalize_amount(options.merge amount: account_creation_fee, serialize: true)
|
720
720
|
end
|
721
721
|
|
722
|
-
if !!(
|
723
|
-
props[:
|
724
|
-
props[:
|
725
|
-
props[:
|
722
|
+
if !!(hbd_exchange_rate = props[:hbd_exchange_rate] rescue nil)
|
723
|
+
props[:hbd_exchange_rate][:base] = normalize_amount(options.merge amount: hbd_exchange_rate[:base], serialize: true)
|
724
|
+
props[:hbd_exchange_rate][:quote] = normalize_amount(options.merge amount: hbd_exchange_rate[:quote], serialize: true)
|
725
|
+
props[:hbd_exchange_rate] = hexlify props[:hbd_exchange_rate].to_json
|
726
726
|
end
|
727
727
|
|
728
728
|
%i(key new_signing_key).each do |key|
|
729
|
-
|
730
|
-
props[key]
|
729
|
+
begin
|
730
|
+
if !!props[key] && props[key].length == 53
|
731
|
+
props[key] = hexlify props[key][3..-1]
|
732
|
+
end
|
733
|
+
rescue => e
|
734
|
+
raise Hive::ArgumentError, "Unable to parse #{key}: #{e}"
|
731
735
|
end
|
732
736
|
end
|
733
737
|
|
@@ -953,8 +957,8 @@ module Hive
|
|
953
957
|
# * :to (String)
|
954
958
|
# * :agent (String)
|
955
959
|
# * :escrow_id (String)
|
956
|
-
# * :
|
957
|
-
# * :
|
960
|
+
# * :hbd_amount (String)
|
961
|
+
# * :hive_amount (String)
|
958
962
|
# * :fee (String)
|
959
963
|
# * :ratification_deadline (String)
|
960
964
|
# * :escrow_expiration (String)
|
@@ -976,8 +980,8 @@ module Hive
|
|
976
980
|
|
977
981
|
check_required_fields(params, *required_fields)
|
978
982
|
|
979
|
-
params[:
|
980
|
-
params[:
|
983
|
+
params[:hbd_amount] = normalize_amount(options.merge amount: params[:hbd_amount])
|
984
|
+
params[:hive_amount] = normalize_amount(options.merge amount: params[:hive_amount])
|
981
985
|
params[:fee] = normalize_amount(options.merge amount: params[:fee])
|
982
986
|
|
983
987
|
params[:ratification_deadline] = Time.parse(params[:ratification_deadline].to_s)
|
@@ -1028,8 +1032,8 @@ module Hive
|
|
1028
1032
|
# * :who (String)
|
1029
1033
|
# * :receiver (String)
|
1030
1034
|
# * :escrow_id (String)
|
1031
|
-
# * :
|
1032
|
-
# * :
|
1035
|
+
# * :hbd_amount (String)
|
1036
|
+
# * :hive_amount (String)
|
1033
1037
|
# @option options [Boolean] :pretend Just validate, do not broadcast.
|
1034
1038
|
# @see https://developers.hive.io/apidefinitions/broadcast-ops.html#broadcast_ops_escrow_release
|
1035
1039
|
def self.escrow_release(options, &block)
|
@@ -1037,8 +1041,8 @@ module Hive
|
|
1037
1041
|
params = options[:params]
|
1038
1042
|
check_required_fields(params, *required_fields)
|
1039
1043
|
|
1040
|
-
params[:
|
1041
|
-
params[:
|
1044
|
+
params[:hbd_amount] = normalize_amount(options.merge amount: params[:hbd_amount])
|
1045
|
+
params[:hive_amount] = normalize_amount(options.merge amount: params[:hive_amount])
|
1042
1046
|
|
1043
1047
|
ops = [[:escrow_release, params]]
|
1044
1048
|
|
@@ -1242,8 +1246,8 @@ module Hive
|
|
1242
1246
|
# @option options [String] :wif Posting wif
|
1243
1247
|
# @option options [Hash] :params
|
1244
1248
|
# * :account (String) Account claiming rewards.
|
1245
|
-
# * :
|
1246
|
-
# * :
|
1249
|
+
# * :reward_hive (Amount) Amount of HIVE to claim.
|
1250
|
+
# * :reward_hbd (Amount) Amount of HBD to claim.
|
1247
1251
|
# * :reward_vests (Amount) Amount of VESTS to claim.
|
1248
1252
|
# @option options [Boolean] :pretend Just validate, do not broadcast.
|
1249
1253
|
# @see https://developers.hive.io/apidefinitions/broadcast-ops.html#broadcast_ops_claim_reward_balance
|
@@ -1253,8 +1257,8 @@ module Hive
|
|
1253
1257
|
|
1254
1258
|
check_required_fields(params, *required_fields)
|
1255
1259
|
|
1256
|
-
params[:
|
1257
|
-
params[:
|
1260
|
+
params[:reward_hive] = normalize_amount(options.merge amount: params[:reward_hive])
|
1261
|
+
params[:reward_hbd] = normalize_amount(options.merge amount: params[:reward_hbd])
|
1258
1262
|
params[:reward_vests] = normalize_amount(options.merge amount: params[:reward_vests])
|
1259
1263
|
|
1260
1264
|
ops = [[:claim_reward_balance, params]]
|
data/lib/hive/fallback.rb
CHANGED
@@ -106,7 +106,6 @@ module Hive::Fallback
|
|
106
106
|
:find_limit_orders,
|
107
107
|
:find_owner_histories,
|
108
108
|
:find_savings_withdrawals,
|
109
|
-
:find_sbd_conversion_requests,
|
110
109
|
:find_vesting_delegation_expirations,
|
111
110
|
:find_vesting_delegations,
|
112
111
|
:find_votes,
|
@@ -134,7 +133,6 @@ module Hive::Fallback
|
|
134
133
|
:list_limit_orders,
|
135
134
|
:list_owner_histories,
|
136
135
|
:list_savings_withdrawals,
|
137
|
-
:list_sbd_conversion_requests,
|
138
136
|
:list_vesting_delegation_expirations,
|
139
137
|
:list_vesting_delegations,
|
140
138
|
:list_votes,
|
@@ -242,7 +240,7 @@ module Hive::Fallback
|
|
242
240
|
find_owner_histories: {owner: String},
|
243
241
|
find_proposals: {proposal_ids: []},
|
244
242
|
find_savings_withdrawals: {account: String},
|
245
|
-
|
243
|
+
find_hbd_conversion_requests: {account: String},
|
246
244
|
find_vesting_delegation_expirations: {account: String},
|
247
245
|
find_vesting_delegations: {account: String},
|
248
246
|
find_votes: {author: String, permlink: String},
|
@@ -272,7 +270,7 @@ module Hive::Fallback
|
|
272
270
|
list_proposal_votes: {start: NilClass, limit: Integer, order: String, order_direction: String, status: String},
|
273
271
|
list_proposals: {start: NilClass, limit: Integer, order: String, order_direction: String, status: String},
|
274
272
|
list_savings_withdrawals: {start: NilClass, limit: Integer, order: String},
|
275
|
-
|
273
|
+
list_hbd_conversion_requests: {start: NilClass, limit: Integer, order: String},
|
276
274
|
list_vesting_delegation_expirations: {start: NilClass, limit: Integer, order: String},
|
277
275
|
list_vesting_delegations: {start: NilClass, limit: Integer, order: String},
|
278
276
|
list_votes: {start: NilClass, limit: Integer, order: String},
|
data/lib/hive/jsonrpc.rb
CHANGED
data/lib/hive/marshal.rb
CHANGED
@@ -144,7 +144,7 @@ module Hive
|
|
144
144
|
{
|
145
145
|
account_creation_fee: amount,
|
146
146
|
maximum_block_size: uint32,
|
147
|
-
|
147
|
+
hbd_interest_rate: uint16
|
148
148
|
}
|
149
149
|
end
|
150
150
|
|
@@ -161,12 +161,12 @@ module Hive
|
|
161
161
|
when :account_creation_fee then Hive::Type::Amount.new(string)
|
162
162
|
# when :account_subsidy_budget then int32
|
163
163
|
# when :account_subsidy_decay, :maximum_block_size then uint32
|
164
|
-
when :
|
164
|
+
when :hbd_exchange_rate
|
165
165
|
JSON[string].tap do |rate|
|
166
166
|
rate["base"] = Hive::Type::Amount.new(rate["base"])
|
167
167
|
rate["quote"] = Hive::Type::Amount.new(rate["quote"])
|
168
168
|
end
|
169
|
-
# when :
|
169
|
+
# when :hbd_interest_rate then uint16
|
170
170
|
when :url, :key, :new_signing_key then string
|
171
171
|
else; warn "Unsupported witness property: #{key}"
|
172
172
|
end
|
data/lib/hive/operation.rb
CHANGED
@@ -5,7 +5,7 @@ module Hive
|
|
5
5
|
include Utils
|
6
6
|
|
7
7
|
# IDs derrived from:
|
8
|
-
# https://gitlab.syncad.com/hive/hive/-/blob/master/libraries/protocol/include/
|
8
|
+
# https://gitlab.syncad.com/hive/hive/-/blob/master/libraries/protocol/include/hive/protocol/operations.hpp
|
9
9
|
|
10
10
|
IDS = [
|
11
11
|
:vote_operation,
|
@@ -64,18 +64,18 @@ module Hive
|
|
64
64
|
:create_proposal_operation,
|
65
65
|
:update_proposal_votes_operation,
|
66
66
|
:remove_proposal_operation,
|
67
|
+
:update_proposal_operation,
|
67
68
|
|
68
69
|
# SMT operations
|
69
70
|
:claim_reward_balance2_operation,
|
70
71
|
|
71
72
|
:smt_setup_operation,
|
72
|
-
:smt_cap_reveal_operation,
|
73
|
-
:smt_refund_operation,
|
74
73
|
:smt_setup_emissions_operation,
|
75
74
|
:smt_set_setup_parameters_operation,
|
76
75
|
:smt_set_runtime_parameters_operation,
|
77
76
|
:smt_create_operation,
|
78
|
-
|
77
|
+
:smt_contribute_operation
|
78
|
+
] + VIRTUAL_OP_IDS = [
|
79
79
|
# virtual operations below this point
|
80
80
|
:fill_convert_request_operation,
|
81
81
|
:author_reward_operation,
|
@@ -92,7 +92,16 @@ module Hive
|
|
92
92
|
:return_vesting_delegation_operation,
|
93
93
|
:comment_benefactor_reward_operation,
|
94
94
|
:producer_reward_operation,
|
95
|
-
:clear_null_account_balance_operation
|
95
|
+
:clear_null_account_balance_operation,
|
96
|
+
:proposal_pay_operation,
|
97
|
+
:sps_fund_operation,
|
98
|
+
:hardfork_hive_operation,
|
99
|
+
:hardfork_hive_restore_operation,
|
100
|
+
:delayed_voting_operation,
|
101
|
+
:consolidate_treasury_balance_operation,
|
102
|
+
:effective_comment_vote_operation,
|
103
|
+
:ineffective_delete_comment_operation,
|
104
|
+
:sps_convert_operation
|
96
105
|
]
|
97
106
|
|
98
107
|
def self.op_id(op)
|
@@ -2,7 +2,7 @@ class Hive::Operation::CommentOptions < Hive::Operation
|
|
2
2
|
def_attr author: :string
|
3
3
|
def_attr permlink: :string
|
4
4
|
def_attr max_accepted_payout: :amount
|
5
|
-
def_attr
|
5
|
+
def_attr percent_hbd: :uint32
|
6
6
|
# def_attr allow_replies: :boolean
|
7
7
|
def_attr allow_votes: :boolean
|
8
8
|
def_attr allow_curation_rewards: :boolean
|
@@ -5,6 +5,6 @@ class Hive::Operation::EscrowRelease < Hive::Operation
|
|
5
5
|
def_attr who: :string
|
6
6
|
def_attr receiver: :string
|
7
7
|
def_attr escrow_id: :uint32
|
8
|
-
def_attr
|
9
|
-
def_attr
|
8
|
+
def_attr hbd_amount: :amount
|
9
|
+
def_attr hive_amount: :amount
|
10
10
|
end
|
@@ -1,8 +1,8 @@
|
|
1
1
|
class Hive::Operation::EscrowTransfer < Hive::Operation
|
2
2
|
def_attr from: :string
|
3
3
|
def_attr to: :string
|
4
|
-
def_attr
|
5
|
-
def_attr
|
4
|
+
def_attr hbd_amount: :amount
|
5
|
+
def_attr hive_amount: :amount
|
6
6
|
def_attr escrow_id: :uint32
|
7
7
|
def_attr agent: :string
|
8
8
|
def_attr fee: :amount
|
data/lib/hive/rpc/http_client.rb
CHANGED
@@ -18,7 +18,8 @@ module Hive
|
|
18
18
|
# @private
|
19
19
|
TIMEOUT_ERRORS = [Net::OpenTimeout, JSON::ParserError, Net::ReadTimeout,
|
20
20
|
Errno::EBADF, IOError, Errno::ENETDOWN, Hive::RemoteDatabaseLockError,
|
21
|
-
Hive::RequestTimeoutUpstreamResponseError, Hive::
|
21
|
+
Hive::RequestTimeoutUpstreamResponseError, Hive::RemoteServerError,
|
22
|
+
Hive::RemoteServerError]
|
22
23
|
|
23
24
|
# @private
|
24
25
|
POST_HEADERS = {
|
@@ -61,12 +62,13 @@ module Hive
|
|
61
62
|
response = nil
|
62
63
|
|
63
64
|
loop do
|
65
|
+
sub_options = options.dup
|
64
66
|
request = http_post(api_name)
|
65
67
|
|
66
68
|
request_object = if !!api_name && !!api_method
|
67
|
-
put(api_name, api_method,
|
68
|
-
elsif !!options && defined?(
|
69
|
-
|
69
|
+
put(api_name, api_method, sub_options)
|
70
|
+
elsif !!options && defined?(sub_options.delete)
|
71
|
+
sub_options.delete(:request_object)
|
70
72
|
end
|
71
73
|
|
72
74
|
if request_object.size > JSON_RPC_BATCH_SIZE_MAXIMUM
|
@@ -123,7 +125,7 @@ module Hive
|
|
123
125
|
raise_error_response rpc_method_name, rpc_args, r
|
124
126
|
rescue *TIMEOUT_ERRORS => e
|
125
127
|
timeout_detected = true
|
126
|
-
timeout_cause =
|
128
|
+
timeout_cause = JSON[e.message]['error'] + " while posting: #{rpc_args}" rescue e.to_s
|
127
129
|
|
128
130
|
break # fail fast
|
129
131
|
end
|
data/lib/hive/stream.rb
CHANGED
@@ -35,7 +35,9 @@ module Hive
|
|
35
35
|
MAX_RETRY_COUNT = 10
|
36
36
|
|
37
37
|
VOP_TRX_ID = ('0' * 40).freeze
|
38
|
-
|
38
|
+
MAX_VOP_READ_AHEAD = 100
|
39
|
+
SHUFFLE_ROUND_LENGTH = 21
|
40
|
+
|
39
41
|
# @param options [Hash] additional options
|
40
42
|
# @option options [Hive::DatabaseApi] :database_api
|
41
43
|
# @option options [Hive::BlockApi] :block_api
|
@@ -92,7 +94,7 @@ module Hive
|
|
92
94
|
def transactions(options = {}, &block)
|
93
95
|
blocks(options) do |block, block_num|
|
94
96
|
if block.nil?
|
95
|
-
warn "Batch missing block_num: #{block_num}, retrying ..."
|
97
|
+
warn "Batch missing block_num: #{block_num}, retrying ..." unless @no_warn
|
96
98
|
|
97
99
|
block = block_api.get_block(block_num: block_num) do |result|
|
98
100
|
result.block
|
@@ -173,7 +175,7 @@ module Hive
|
|
173
175
|
# stream = Hive::Stream.new
|
174
176
|
# stream.operations(types: :author_reward_operation, only_virtual: true) do |vop|
|
175
177
|
# v = vop.value
|
176
|
-
# puts "#{v.author} got paid for #{v.permlink}: #{[v.
|
178
|
+
# puts "#{v.author} got paid for #{v.permlink}: #{[v.hbd_payout, v.hive_payout, v.vesting_payout]}"
|
177
179
|
# end
|
178
180
|
#
|
179
181
|
# ... or multiple virtual operation types;
|
@@ -214,6 +216,10 @@ module Hive
|
|
214
216
|
only_virtual = false
|
215
217
|
include_virtual = false
|
216
218
|
last_block_num = nil
|
219
|
+
within_shuffle_round = nil
|
220
|
+
initial_head_block_number = database_api.get_dynamic_global_properties do |dgpo|
|
221
|
+
dgpo.head_block_number
|
222
|
+
end
|
217
223
|
|
218
224
|
case args.first
|
219
225
|
when Hash
|
@@ -226,7 +232,9 @@ module Hive
|
|
226
232
|
|
227
233
|
if only_virtual
|
228
234
|
block_numbers(options) do |block_num|
|
229
|
-
|
235
|
+
within_shuffle_round ||= initial_head_block_number - block_num < SHUFFLE_ROUND_LENGTH * 2
|
236
|
+
|
237
|
+
get_virtual_ops(types, block_num, within_shuffle_round, block)
|
230
238
|
end
|
231
239
|
else
|
232
240
|
transactions(options) do |transaction, trx_id, block_num|
|
@@ -236,8 +244,9 @@ module Hive
|
|
236
244
|
next unless last_block_num != block_num
|
237
245
|
|
238
246
|
last_block_num = block_num
|
247
|
+
within_shuffle_round ||= initial_head_block_number - block_num < SHUFFLE_ROUND_LENGTH * 2
|
239
248
|
|
240
|
-
get_virtual_ops(types, block_num, block) if include_virtual
|
249
|
+
get_virtual_ops(types, block_num, within_shuffle_round, block) if include_virtual
|
241
250
|
end
|
242
251
|
end
|
243
252
|
end
|
@@ -257,6 +266,7 @@ module Hive
|
|
257
266
|
object = options[:object]
|
258
267
|
object_method = "get_#{object}".to_sym
|
259
268
|
block_interval = BLOCK_INTERVAL
|
269
|
+
use_block_range = true
|
260
270
|
|
261
271
|
at_block_num, until_block_num = if !!block_range = options[:block_range]
|
262
272
|
[block_range.first, block_range.last]
|
@@ -281,9 +291,32 @@ module Hive
|
|
281
291
|
block_interval = BLOCK_INTERVAL
|
282
292
|
end
|
283
293
|
else
|
284
|
-
|
285
|
-
|
286
|
-
|
294
|
+
loop do
|
295
|
+
begin
|
296
|
+
if use_block_range
|
297
|
+
block_api.send(object_method, block_range: range) do |b, n|
|
298
|
+
block.call b, n
|
299
|
+
block_interval = BLOCK_INTERVAL
|
300
|
+
end
|
301
|
+
else
|
302
|
+
range.each do |block_num|
|
303
|
+
block_api.get_block(block_num: block_num) do |b, n|
|
304
|
+
block.call b.block, b.block.block_id[0..7].to_i(16)
|
305
|
+
block_interval = BLOCK_INTERVAL
|
306
|
+
end
|
307
|
+
end
|
308
|
+
end
|
309
|
+
rescue Hive::UnknownError => e
|
310
|
+
if e.message =~ /Could not find method get_block_range/
|
311
|
+
use_block_range = false
|
312
|
+
|
313
|
+
redo
|
314
|
+
end
|
315
|
+
|
316
|
+
raise e
|
317
|
+
end
|
318
|
+
|
319
|
+
break
|
287
320
|
end
|
288
321
|
end
|
289
322
|
|
@@ -325,22 +358,96 @@ module Hive
|
|
325
358
|
end
|
326
359
|
|
327
360
|
# @private
|
328
|
-
def get_virtual_ops(types, block_num, block)
|
361
|
+
def get_virtual_ops(types, block_num, within_shuffle_round, block)
|
329
362
|
retries = 0
|
363
|
+
vop_read_ahead = within_shuffle_round ? 1 : MAX_VOP_READ_AHEAD
|
364
|
+
|
365
|
+
@virtual_ops_cache ||= {}
|
366
|
+
@virtual_ops_cache = @virtual_ops_cache.reject do |k, v|
|
367
|
+
if k < block_num
|
368
|
+
warn "Found orphaned virtual operations for block_num #{k}: #{v.to_json}" unless @no_warn
|
369
|
+
|
370
|
+
true
|
371
|
+
end
|
372
|
+
|
373
|
+
false
|
374
|
+
end
|
330
375
|
|
331
376
|
loop do
|
332
|
-
|
377
|
+
vops_found = false
|
378
|
+
|
379
|
+
if account_history_api.class == Hive::AccountHistoryApi || @enum_virtual_ops_supported.nil? && @enum_virtual_ops_supported != false
|
380
|
+
begin
|
381
|
+
# Use account_history_api.enum_virtual_ops, if supported.
|
382
|
+
|
383
|
+
if @virtual_ops_cache.empty? || !@virtual_ops_cache.keys.include?(block_num)
|
384
|
+
(block_num..(block_num + vop_read_ahead)).each do |block_num|
|
385
|
+
@virtual_ops_cache[block_num] = []
|
386
|
+
end
|
387
|
+
|
388
|
+
enum_virtual_ops_options = {
|
389
|
+
block_range_begin: block_num,
|
390
|
+
block_range_end: block_num + vop_read_ahead,
|
391
|
+
# TODO Use: mode != :irreversible
|
392
|
+
include_reversible: true
|
393
|
+
}
|
394
|
+
|
395
|
+
account_history_api.enum_virtual_ops(enum_virtual_ops_options) do |result|
|
396
|
+
@enum_virtual_ops_supported = true
|
397
|
+
|
398
|
+
result.ops.each do |vop|
|
399
|
+
@virtual_ops_cache[vop.block] << vop
|
400
|
+
end
|
401
|
+
end
|
402
|
+
end
|
403
|
+
|
404
|
+
vops_found = true
|
405
|
+
|
406
|
+
if !!@virtual_ops_cache[block_num]
|
407
|
+
@virtual_ops_cache[block_num].each do |vop|
|
408
|
+
next unless block_num == vop.block
|
409
|
+
next if types.any? && !types.include?(vop.op.type)
|
410
|
+
|
411
|
+
if vop.virtual_op == 0
|
412
|
+
# require 'pry' ; binding.pry if vop.op.type == 'producer_reward_operation'
|
413
|
+
warn "Found non-virtual operation (#{vop.op.type}) in enum_virtual_ops result for block: #{block_num}" unless @no_warn
|
414
|
+
|
415
|
+
next
|
416
|
+
end
|
417
|
+
|
418
|
+
block.call vop.op, vop.trx_id, block_num
|
419
|
+
end
|
420
|
+
|
421
|
+
@virtual_ops_cache.delete(block_num)
|
422
|
+
end
|
423
|
+
rescue Hive::UnknownError => e
|
424
|
+
if e.message =~ /This API is not supported for account history backed by Chainbase/
|
425
|
+
warn "Retrying with get_ops_in_block (api does not support enum_virtual_ops)" unless @no_warn
|
426
|
+
@enum_virtual_ops_supported = false
|
427
|
+
vops_found = false
|
428
|
+
else
|
429
|
+
raise e
|
430
|
+
end
|
431
|
+
end
|
432
|
+
end
|
433
|
+
|
434
|
+
break if vops_found
|
435
|
+
|
436
|
+
# Fallback to previous method.
|
437
|
+
warn "Retrying with get_ops_in_block (did not find ops for block #{block_num} using enum_virtual_ops)" unless @no_warn
|
438
|
+
|
439
|
+
response = case account_history_api
|
333
440
|
when Hive::CondenserApi
|
334
|
-
|
441
|
+
account_history_api.get_ops_in_block(block_num, true)
|
335
442
|
when Hive::AccountHistoryApi
|
336
|
-
|
443
|
+
account_history_api.get_ops_in_block(
|
337
444
|
block_num: block_num,
|
338
|
-
only_virtual: true
|
339
|
-
|
445
|
+
only_virtual: true,
|
446
|
+
# TODO Use: mode != :irreversible
|
447
|
+
include_reversible: true
|
448
|
+
)
|
340
449
|
end
|
341
450
|
|
342
|
-
response = account_history_api.get_ops_in_block(*get_ops_in_block_options)
|
343
|
-
|
344
451
|
if response.nil? || (result = response.result).nil?
|
345
452
|
if retries < MAX_RETRY_COUNT
|
346
453
|
warn "Retrying get_ops_in_block on block #{block_num}" unless @no_warn
|
@@ -367,7 +474,7 @@ module Hive
|
|
367
474
|
retries = retries + 1
|
368
475
|
redo
|
369
476
|
else
|
370
|
-
warn "unable to find virtual operations for block: #{block_num}"
|
477
|
+
warn "unable to find virtual operations for block: #{block_num}" unless @no_warn
|
371
478
|
# raise TooManyRetriesError, "unable to find virtual operations for block: #{block_num}"
|
372
479
|
end
|
373
480
|
end
|
@@ -375,7 +482,7 @@ module Hive
|
|
375
482
|
ops.each do |op|
|
376
483
|
next if types.any? && !types.include?(op.type)
|
377
484
|
|
378
|
-
block.call op,
|
485
|
+
block.call op, vop.trx_id, block_num
|
379
486
|
end
|
380
487
|
|
381
488
|
break
|
data/lib/hive/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: hive-ruby
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Anthony Martin
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-04-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -16,20 +16,20 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '1
|
19
|
+
version: '2.1'
|
20
20
|
- - ">="
|
21
21
|
- !ruby/object:Gem::Version
|
22
|
-
version: 1.
|
22
|
+
version: 2.1.4
|
23
23
|
type: :development
|
24
24
|
prerelease: false
|
25
25
|
version_requirements: !ruby/object:Gem::Requirement
|
26
26
|
requirements:
|
27
27
|
- - "~>"
|
28
28
|
- !ruby/object:Gem::Version
|
29
|
-
version: '1
|
29
|
+
version: '2.1'
|
30
30
|
- - ">="
|
31
31
|
- !ruby/object:Gem::Version
|
32
|
-
version: 1.
|
32
|
+
version: 2.1.4
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
34
|
name: rake
|
35
35
|
requirement: !ruby/object:Gem::Requirement
|
@@ -294,22 +294,16 @@ dependencies:
|
|
294
294
|
name: hashie
|
295
295
|
requirement: !ruby/object:Gem::Requirement
|
296
296
|
requirements:
|
297
|
-
- - "~>"
|
298
|
-
- !ruby/object:Gem::Version
|
299
|
-
version: '3.5'
|
300
297
|
- - ">="
|
301
298
|
- !ruby/object:Gem::Version
|
302
|
-
version: 3.5
|
299
|
+
version: '3.5'
|
303
300
|
type: :runtime
|
304
301
|
prerelease: false
|
305
302
|
version_requirements: !ruby/object:Gem::Requirement
|
306
303
|
requirements:
|
307
|
-
- - "~>"
|
308
|
-
- !ruby/object:Gem::Version
|
309
|
-
version: '3.5'
|
310
304
|
- - ">="
|
311
305
|
- !ruby/object:Gem::Version
|
312
|
-
version: 3.5
|
306
|
+
version: '3.5'
|
313
307
|
- !ruby/object:Gem::Dependency
|
314
308
|
name: bitcoin-ruby
|
315
309
|
requirement: !ruby/object:Gem::Requirement
|
@@ -317,9 +311,9 @@ dependencies:
|
|
317
311
|
- - "~>"
|
318
312
|
- !ruby/object:Gem::Version
|
319
313
|
version: '0.0'
|
320
|
-
- -
|
314
|
+
- - '='
|
321
315
|
- !ruby/object:Gem::Version
|
322
|
-
version: 0.0.
|
316
|
+
version: 0.0.20
|
323
317
|
type: :runtime
|
324
318
|
prerelease: false
|
325
319
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -327,9 +321,9 @@ dependencies:
|
|
327
321
|
- - "~>"
|
328
322
|
- !ruby/object:Gem::Version
|
329
323
|
version: '0.0'
|
330
|
-
- -
|
324
|
+
- - '='
|
331
325
|
- !ruby/object:Gem::Version
|
332
|
-
version: 0.0.
|
326
|
+
version: 0.0.20
|
333
327
|
- !ruby/object:Gem::Dependency
|
334
328
|
name: ffi
|
335
329
|
requirement: !ruby/object:Gem::Requirement
|
@@ -497,9 +491,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
497
491
|
version: '0'
|
498
492
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
499
493
|
requirements:
|
500
|
-
- - "
|
494
|
+
- - ">="
|
501
495
|
- !ruby/object:Gem::Version
|
502
|
-
version:
|
496
|
+
version: '0'
|
503
497
|
requirements: []
|
504
498
|
rubygems_version: 3.0.8
|
505
499
|
signing_key:
|