hive-ruby 1.0.2 → 1.0.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0532e4cf00c66e983cec921fd1ef3d193df1d61c5c317254f1876c3741299c4c
4
- data.tar.gz: f14bf0b18bdd1ed9926708c89f9fe0f9a3b83cfa81dd310ac3673b0f49d3b69f
3
+ metadata.gz: 8bf5d72142c42463a6146346f0bdd4e9943da776894b26f19a6bf3bac7416670
4
+ data.tar.gz: a06c05f424bf354b792fc3a49dde1a064c1bd974a922c4a01ec9ef6281fc3611
5
5
  SHA512:
6
- metadata.gz: f7204cf39c276304f1ee0c8fb0ed125fc82a4ddcb905841db870bc1ea39e7a10f5ac0da18fd2b49c5a8c4fa16ed0bc154bb1744fcaeb6c292f423a2bdf024bfc
7
- data.tar.gz: d7189046c313b6d7eeeb39b81bb4ad7472bc4a80093d27a7c1a4aef2e53fd7ad07bfedda690b11638bdc93368914c16a29f9045f90f3b47d510a979da1f31b04
6
+ metadata.gz: fff041bd517d2101d0d84b521d17880b0bb7d641751bfa5a1c22529fd3e451290f28556830726ca02f38a40fa39e521bf55f852885374d93842a3c4f6bd1b7dc
7
+ data.tar.gz: a6d4e6be972a57411d6c51894e5ba12c610496e06f4889f1e5c72493044c8fbcb1d5438b51d329209d66ce38fb88c165a6994dfa80c7f2ec2b6900f33763513a
data/Rakefile CHANGED
@@ -153,9 +153,13 @@ namespace :stream do
153
153
  first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
154
154
  stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode)
155
155
  api = Hive::Api.new(url: ENV['TEST_NODE'])
156
+ block_api = Hive::BlockApi.new(url: ENV['TEST_NODE'])
156
157
  last_block_num = nil
157
158
  last_timestamp = nil
158
159
  range_complete = false
160
+ round_pool = {}
161
+ aging_blocks = {}
162
+ aged_block_interval = 630
159
163
 
160
164
  api.get_dynamic_global_properties do |properties|
161
165
  current_block_num = if mode == :head
@@ -165,13 +169,14 @@ namespace :stream do
165
169
  end
166
170
 
167
171
  # First pass replays latest a random number of blocks to test chunking.
168
- first_block_num ||= current_block_num - (rand * 200).to_i
172
+ first_block_num ||= current_block_num - (rand * 2000).to_i
169
173
 
170
174
  range = first_block_num..current_block_num
171
175
  puts "Initial block range: #{range.size}"
172
176
 
173
177
  stream.blocks(at_block_num: range.first) do |block, block_num|
174
178
  current_timestamp = Time.parse(block.timestamp + 'Z')
179
+ round_pool[current_timestamp] = {block_num: block_num, block: block}
175
180
 
176
181
  if !range_complete && block_num > range.last
177
182
  puts 'Done with initial range.'
@@ -188,9 +193,35 @@ namespace :stream do
188
193
  exit
189
194
  end
190
195
 
191
- puts "\t#{block_num} Timestamp: #{current_timestamp}, witness: #{block.witness}"
196
+ round_pool.each do |k, v|
197
+ aging_blocks[k] = v if Time.now - k > aged_block_interval
198
+ end
199
+
200
+ round_pool = round_pool.select{|k, v| Time.now - k <= aged_block_interval}.to_h
201
+ drift = last_timestamp.nil? ? 0 : (current_timestamp - last_timestamp) - Hive::Stream::BLOCK_INTERVAL.to_f
202
+
203
+ puts "\t#{block_num} Timestamp: #{current_timestamp}, witness: #{block.witness}, aging blocks: #{aging_blocks.size}, drift: #{drift}"
204
+
192
205
  last_block_num = block_num
193
206
  last_timestamp = current_timestamp
207
+
208
+ if range_complete && aging_blocks.any?
209
+ aging_block_nums = aging_blocks.map{|k, v| v[:block_num]}
210
+ wire_block_range = (aging_block_nums.first..aging_block_nums.last)
211
+
212
+ block_api.get_block_headers(block_range: wire_block_range) do |wire_header, wire_block_num|
213
+ wire_timestamp = Time.parse(wire_header.timestamp + 'Z')
214
+ aging_block = aging_blocks[wire_timestamp][:block]
215
+
216
+ if wire_header.previous == aging_block.previous
217
+ puts "\t\tAged block test #{wire_block_num}: √"
218
+ aging_blocks.delete(wire_timestamp)
219
+ else
220
+ puts "\t\tAged block test #{wire_block_num}: detected block-reorganization (#{wire_header.previous} != #{aging_block.previous})"
221
+ exit
222
+ end
223
+ end
224
+ end
194
225
  end
195
226
  end
196
227
  end
@@ -247,6 +278,8 @@ namespace :stream do
247
278
  first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
248
279
  stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode)
249
280
  api = Hive::Api.new(url: ENV['TEST_NODE'])
281
+ ah_api = Hive::AccountHistoryApi.new(url: ENV['TEST_NODE'])
282
+ round_vops = {}
250
283
 
251
284
  api.get_dynamic_global_properties do |properties|
252
285
  current_block_num = if mode == :head
@@ -259,6 +292,31 @@ namespace :stream do
259
292
  first_block_num ||= current_block_num - (rand * 200).to_i
260
293
 
261
294
  stream.operations(at_block_num: first_block_num, only_virtual: true) do |op, trx_id, block_num|
295
+ # 126 is about two shuffle rounds (if mode == :head), we need to avoid
296
+ # the current block_num because we're still in the middle of reading
297
+ # all of the vops for that block.
298
+ if round_vops.size > 126 && !round_vops.include?(block_num)
299
+ ah_api.enum_virtual_ops(block_range_begin: round_vops.keys.min, block_range_end: round_vops.keys.max + 1, include_reversible: true) do |result|
300
+ round_vops.each do |k, v|
301
+ later_ops = result.ops.select{|vop| vop.block == k}
302
+ if (verify_count = later_ops.size) == v.size
303
+ puts "\t\t#{k} :: streamed vop count was #{v.size} √"
304
+ else
305
+ puts "\t\t#{k} :: streamed vop count was #{v.size}, later became #{verify_count}"
306
+ puts "\t\t\t#{v.map{|op| op.type}.join(', ')}"
307
+ puts "\t\tLater ops:\n\t\t\t#{later_ops.map{|vop| vop.op.type}.join(', ')}"
308
+
309
+ exit
310
+ end
311
+ end
312
+ end
313
+
314
+ round_vops = {}
315
+ end
316
+
317
+ round_vops[block_num] ||= []
318
+ round_vops[block_num] << op
319
+
262
320
  puts "#{block_num} :: #{trx_id}; op: #{op.type}"
263
321
  end
264
322
  end
@@ -286,6 +344,78 @@ namespace :stream do
286
344
  end
287
345
  end
288
346
  end
347
+
348
+ desc 'Test the ability to stream all operations (including virtual) that match a pattern.'
349
+ task :op_pattern, [:pattern, :mode, :at_block_num] do |t, args|
350
+ mode = (args[:mode] || 'irreversible').to_sym
351
+ first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
352
+ stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode)
353
+ api = Hive::Api.new(url: ENV['TEST_NODE'])
354
+ pattern = /#{args[:pattern]}/i
355
+
356
+ api.get_dynamic_global_properties do |properties|
357
+ current_block_num = if mode == :head
358
+ properties.head_block_number
359
+ else
360
+ properties.last_irreversible_block_num
361
+ end
362
+
363
+ # First pass replays latest a random number of blocks to test chunking.
364
+ first_block_num ||= current_block_num - (rand * 200).to_i
365
+
366
+ stream.operations(at_block_num: first_block_num, include_virtual: true) do |op, trx_id, block_num|
367
+ next unless op.to_json =~ pattern
368
+
369
+ puts "#{block_num} :: #{trx_id}; op: #{op.to_json}"
370
+ end
371
+ end
372
+ end
373
+
374
+ desc 'Test the ability to stream all effective_comment_vote_operation operations.'
375
+ task :effective_comment_vote_operation, [:mode, :at_block_num] do |t, args|
376
+ mode = (args[:mode] || 'irreversible').to_sym
377
+ first_block_num = args[:at_block_num].to_i if !!args[:at_block_num]
378
+ stream = Hive::Stream.new(url: ENV['TEST_NODE'], mode: mode, no_warn: true)
379
+ api = Hive::Api.new(url: ENV['TEST_NODE'])
380
+
381
+ api.get_dynamic_global_properties do |properties|
382
+ current_block_num = if mode == :head
383
+ properties.head_block_number
384
+ else
385
+ properties.last_irreversible_block_num
386
+ end
387
+
388
+ # First pass replays latest a random number of blocks to test chunking.
389
+ first_block_num ||= current_block_num - (rand * 200).to_i
390
+
391
+ stream.operations(at_block_num: first_block_num, include_virtual: true) do |op, trx_id, block_num|
392
+ next unless op.type == 'effective_comment_vote_operation'
393
+ pending_payout = Hive::Type::Amount.new(op.value.pending_payout)
394
+
395
+ puts "#{block_num} :: #{trx_id}; voter: #{op.value.voter}, author: #{op.value.author}, pending_payout: #{pending_payout}"
396
+ end
397
+ end
398
+ end
399
+ end
400
+
401
+ desc 'List hardforks.'
402
+ task :hardforks do
403
+ database_api = Hive::DatabaseApi.new(url: ENV['TEST_NODE'])
404
+ block_api = Hive::BlockApi.new(url: ENV['TEST_NODE'])
405
+ ah_api = Hive::AccountHistoryApi.new(url: ENV['TEST_NODE'])
406
+ last_hf_timestamp = block_api.get_block(block_num: 1) do |result|
407
+ Time.parse(result.block.timestamp + 'Z')
408
+ end
409
+
410
+ database_api.get_hardfork_properties do |properties|
411
+ processed_hardforks = properties.processed_hardforks
412
+
413
+ processed_hardforks.each_with_index do |timestamp, index|
414
+ timestamp = Time.parse(timestamp + 'Z')
415
+
416
+ puts "HF#{index}: #{timestamp}"
417
+ end
418
+ end
289
419
  end
290
420
 
291
421
  YARD::Rake::YardocTask.new do |t|
data/hive-ruby.gemspec CHANGED
@@ -32,7 +32,7 @@ Gem::Specification.new do |spec|
32
32
 
33
33
  spec.add_dependency 'json', '~> 2.1', '>= 2.1.0'
34
34
  spec.add_dependency 'logging', '~> 2.2', '>= 2.2.0'
35
- spec.add_dependency 'hashie', '~> 4.1', '>= 3.5.7'
35
+ spec.add_dependency 'hashie', '>= 3.5'
36
36
  spec.add_dependency 'bitcoin-ruby', '~> 0.0', '0.0.20'
37
37
  spec.add_dependency 'ffi', '~> 1.9', '>= 1.9.23'
38
38
  spec.add_dependency 'bindata', '~> 2.4', '>= 2.4.4'
data/lib/hive/api.rb CHANGED
@@ -193,7 +193,18 @@ module Hive
193
193
  # Some argument are optional, but if the arguments passed are greater
194
194
  # than the expected arguments size, we can warn.
195
195
  if args_size > expected_args_size
196
- @error_pipe.puts "Warning #{rpc_method_name} expects arguments: #{expected_args_size}, got: #{args_size}"
196
+ if rpc_method_name == 'account_history_api.get_account_history' && expected_args_size == 3 && args_size == 6
197
+ # TODO Remove this condition if they ever fix this issue:
198
+ # https://gitlab.syncad.com/hive/hive/-/issues/100
199
+ elsif rpc_method_name == 'account_history_api.get_ops_in_block' && expected_args_size == 2 && args_size == 3
200
+ # TODO Remove this condition if they ever fix this issue:
201
+ # https://gitlab.syncad.com/hive/hive/-/issues/100
202
+ elsif rpc_method_name == 'account_history_api.enum_virtual_ops' && expected_args_size == 2 && args_size == 3
203
+ # TODO Remove this condition if they ever fix this issue:
204
+ # https://gitlab.syncad.com/hive/hive/-/issues/100
205
+ else
206
+ @error_pipe.puts "Warning #{rpc_method_name} expects arguments: #{expected_args_size}, got: #{args_size}"
207
+ end
197
208
  end
198
209
  rescue NoMethodError => e
199
210
  error = Hive::ArgumentError.new("#{rpc_method_name} expects arguments: #{expected_args_size}", e)
@@ -6,6 +6,8 @@ module Hive
6
6
  # Also see: {https://developers.hive.io/apidefinitions/block-api.html Block API Definitions}
7
7
  class BlockApi < Api
8
8
  MAX_RANGE_SIZE = 50
9
+ MAX_NO_BATCH_RANGE_SIZE = 200
10
+ MAX_NO_BATCH_NO_RANGE_SIZE = 1
9
11
 
10
12
  def initialize(options = {})
11
13
  self.class.api_name = :block_api
@@ -20,24 +22,30 @@ module Hive
20
22
  get_block_objects(options.merge(object: :block_header), block)
21
23
  end
22
24
 
23
- # Uses a batched requst on a range of blocks.
25
+ # Uses get_block_range (or batched requsts) on a range of blocks.
24
26
  #
25
27
  # @param options [Hash] The attributes to get a block range with.
26
28
  # @option options [Range] :block_range starting on one block number and ending on an higher block number.
27
- def get_blocks(options = {block_range: (0..0)}, &block)
29
+ # @option options [Boolean] :use_batch use json-rpc batch instead of get_block_range (preferred)
30
+ def get_blocks(options = {block_range: (0..0), use_batch: false}, &block)
28
31
  get_block_objects(options.merge(object: :block), block)
29
32
  end
30
33
  private
31
- def get_block_objects(options = {block_range: (0..0)}, block = nil)
34
+ def get_block_objects(options = {block_range: (0..0), use_batch: false}, block = nil)
32
35
  object = options[:object]
33
- object_method = "get_#{object}".to_sym
34
36
  block_range = options[:block_range] || (0..0)
37
+ use_batch = !!options[:use_batch]
38
+
39
+ object = :block_range if object == :block && !use_batch
40
+ object_method = "get_#{object}".to_sym
35
41
 
36
- if (start = block_range.first) < 1
42
+ if !!block_range && block_range.any? && (start = block_range.first) < 1
37
43
  raise Hive::ArgumentError, "Invalid starting block: #{start}"
38
44
  end
39
45
 
40
- chunks = if block_range.size > MAX_RANGE_SIZE
46
+ chunks = if object == :block_range
47
+ block_range.each_slice(MAX_NO_BATCH_RANGE_SIZE)
48
+ elsif block_range.size > MAX_RANGE_SIZE
41
49
  block_range.each_slice(MAX_RANGE_SIZE)
42
50
  else
43
51
  [block_range]
@@ -46,27 +54,65 @@ module Hive
46
54
  for sub_range in chunks do
47
55
  request_object = []
48
56
 
49
- for i in sub_range do
50
- @rpc_client.put(self.class.api_name, object_method, block_num: i, request_object: request_object)
57
+ if !!use_batch
58
+ for i in sub_range do
59
+ @rpc_client.put(self.class.api_name, object_method, block_num: i, request_object: request_object)
60
+ end
61
+ else
62
+ case object
63
+ when :block_header
64
+ # Must use json-rpc batch for block headers request.
65
+ for i in sub_range do
66
+ @rpc_client.put(self.class.api_name, :get_block_header, block_num: i, request_object: request_object)
67
+ end
68
+ when :block, :block_range
69
+ if sub_range.size == 1
70
+ @rpc_client.put(self.class.api_name, :get_block, block_num: sub_range.first, request_object: request_object)
71
+ else
72
+ @rpc_client.put(self.class.api_name, :get_block_range, starting_block_num: sub_range.first, count: sub_range.size, request_object: request_object)
73
+ end
74
+ end
51
75
  end
52
76
 
53
77
  if !!block
54
78
  index = 0
55
79
  @rpc_client.rpc_batch_execute(api_name: self.class.api_name, request_object: request_object) do |result, error, id|
80
+ raise Hive::RemoteNodeError, error.to_json if !!error
81
+
56
82
  block_num = sub_range.to_a[index]
57
83
  index = index + 1
58
84
 
59
85
  case object
60
86
  when :block_header
61
- block.call(result.nil? ? nil : result[:header], block_num)
87
+ block.call(result[:header], block_num)
62
88
  else
63
- block.call(result.nil? ? nil : result[object], block_num)
89
+ if !!use_batch || !!result[:block]
90
+ block.call(result[:block] || result[object], block_num)
91
+ else
92
+ current_block_num = block_num
93
+ result[:blocks].each do |b|
94
+ # Now verify that the previous block_num really is the
95
+ # previous block.
96
+
97
+ decoded_previous_block_num = b.previous[0..7].to_i(16)
98
+ previous_block_num = current_block_num - 1
99
+
100
+ unless decoded_previous_block_num == previous_block_num
101
+ raise Hive::RemoteNodeError, "Wrong block_num. Got #{decoded_previous_block_num}, expected #{previous_block_num}"
102
+ end
103
+
104
+ block.call(b, current_block_num)
105
+ current_block_num = current_block_num + 1
106
+ end
107
+ end
64
108
  end
65
109
  end
66
110
  else
67
111
  blocks = []
68
112
 
69
113
  @rpc_client.rpc_batch_execute(api_name: self.class.api_name, request_object: request_object) do |result, error, id|
114
+ raise Hive::RemoteNodeError, error.to_json if !!error
115
+
70
116
  blocks << result
71
117
  end
72
118
  end
@@ -5,7 +5,7 @@ module Hive
5
5
  include Utils
6
6
 
7
7
  # IDs derrived from:
8
- # https://gitlab.syncad.com/hive/hive/-/blob/master/libraries/protocol/include/steem/protocol/operations.hpp
8
+ # https://gitlab.syncad.com/hive/hive/-/blob/master/libraries/protocol/include/hive/protocol/operations.hpp
9
9
 
10
10
  IDS = [
11
11
  :vote_operation,
@@ -64,18 +64,18 @@ module Hive
64
64
  :create_proposal_operation,
65
65
  :update_proposal_votes_operation,
66
66
  :remove_proposal_operation,
67
+ :update_proposal_operation,
67
68
 
68
69
  # SMT operations
69
70
  :claim_reward_balance2_operation,
70
71
 
71
72
  :smt_setup_operation,
72
- :smt_cap_reveal_operation,
73
- :smt_refund_operation,
74
73
  :smt_setup_emissions_operation,
75
74
  :smt_set_setup_parameters_operation,
76
75
  :smt_set_runtime_parameters_operation,
77
76
  :smt_create_operation,
78
-
77
+ :smt_contribute_operation
78
+ ] + VIRTUAL_OP_IDS = [
79
79
  # virtual operations below this point
80
80
  :fill_convert_request_operation,
81
81
  :author_reward_operation,
@@ -92,7 +92,16 @@ module Hive
92
92
  :return_vesting_delegation_operation,
93
93
  :comment_benefactor_reward_operation,
94
94
  :producer_reward_operation,
95
- :clear_null_account_balance_operation
95
+ :clear_null_account_balance_operation,
96
+ :proposal_pay_operation,
97
+ :sps_fund_operation,
98
+ :hardfork_hive_operation,
99
+ :hardfork_hive_restore_operation,
100
+ :delayed_voting_operation,
101
+ :consolidate_treasury_balance_operation,
102
+ :effective_comment_vote_operation,
103
+ :ineffective_delete_comment_operation,
104
+ :sps_convert_operation
96
105
  ]
97
106
 
98
107
  def self.op_id(op)
@@ -62,12 +62,13 @@ module Hive
62
62
  response = nil
63
63
 
64
64
  loop do
65
+ sub_options = options.dup
65
66
  request = http_post(api_name)
66
67
 
67
68
  request_object = if !!api_name && !!api_method
68
- put(api_name, api_method, options)
69
- elsif !!options && defined?(options.delete)
70
- options.delete(:request_object)
69
+ put(api_name, api_method, sub_options)
70
+ elsif !!options && defined?(sub_options.delete)
71
+ sub_options.delete(:request_object)
71
72
  end
72
73
 
73
74
  if request_object.size > JSON_RPC_BATCH_SIZE_MAXIMUM
@@ -124,7 +125,7 @@ module Hive
124
125
  raise_error_response rpc_method_name, rpc_args, r
125
126
  rescue *TIMEOUT_ERRORS => e
126
127
  timeout_detected = true
127
- timeout_cause = nil
128
+ timeout_cause = JSON[e.message]['error'] + " while posting: #{rpc_args}" rescue e.to_s
128
129
 
129
130
  break # fail fast
130
131
  end
data/lib/hive/stream.rb CHANGED
@@ -35,7 +35,9 @@ module Hive
35
35
  MAX_RETRY_COUNT = 10
36
36
 
37
37
  VOP_TRX_ID = ('0' * 40).freeze
38
-
38
+ MAX_VOP_READ_AHEAD = 100
39
+ SHUFFLE_ROUND_LENGTH = 21
40
+
39
41
  # @param options [Hash] additional options
40
42
  # @option options [Hive::DatabaseApi] :database_api
41
43
  # @option options [Hive::BlockApi] :block_api
@@ -92,7 +94,7 @@ module Hive
92
94
  def transactions(options = {}, &block)
93
95
  blocks(options) do |block, block_num|
94
96
  if block.nil?
95
- warn "Batch missing block_num: #{block_num}, retrying ..."
97
+ warn "Batch missing block_num: #{block_num}, retrying ..." unless @no_warn
96
98
 
97
99
  block = block_api.get_block(block_num: block_num) do |result|
98
100
  result.block
@@ -214,6 +216,10 @@ module Hive
214
216
  only_virtual = false
215
217
  include_virtual = false
216
218
  last_block_num = nil
219
+ within_shuffle_round = nil
220
+ initial_head_block_number = database_api.get_dynamic_global_properties do |dgpo|
221
+ dgpo.head_block_number
222
+ end
217
223
 
218
224
  case args.first
219
225
  when Hash
@@ -226,7 +232,9 @@ module Hive
226
232
 
227
233
  if only_virtual
228
234
  block_numbers(options) do |block_num|
229
- get_virtual_ops(types, block_num, block)
235
+ within_shuffle_round ||= initial_head_block_number - block_num < SHUFFLE_ROUND_LENGTH * 2
236
+
237
+ get_virtual_ops(types, block_num, within_shuffle_round, block)
230
238
  end
231
239
  else
232
240
  transactions(options) do |transaction, trx_id, block_num|
@@ -236,8 +244,9 @@ module Hive
236
244
  next unless last_block_num != block_num
237
245
 
238
246
  last_block_num = block_num
247
+ within_shuffle_round ||= initial_head_block_number - block_num < SHUFFLE_ROUND_LENGTH * 2
239
248
 
240
- get_virtual_ops(types, block_num, block) if include_virtual
249
+ get_virtual_ops(types, block_num, within_shuffle_round, block) if include_virtual
241
250
  end
242
251
  end
243
252
  end
@@ -257,6 +266,7 @@ module Hive
257
266
  object = options[:object]
258
267
  object_method = "get_#{object}".to_sym
259
268
  block_interval = BLOCK_INTERVAL
269
+ use_block_range = true
260
270
 
261
271
  at_block_num, until_block_num = if !!block_range = options[:block_range]
262
272
  [block_range.first, block_range.last]
@@ -281,9 +291,32 @@ module Hive
281
291
  block_interval = BLOCK_INTERVAL
282
292
  end
283
293
  else
284
- block_api.send(object_method, block_range: range) do |b, n|
285
- block.call b, n
286
- block_interval = BLOCK_INTERVAL
294
+ loop do
295
+ begin
296
+ if use_block_range
297
+ block_api.send(object_method, block_range: range) do |b, n|
298
+ block.call b, n
299
+ block_interval = BLOCK_INTERVAL
300
+ end
301
+ else
302
+ range.each do |block_num|
303
+ block_api.get_block(block_num: block_num) do |b, n|
304
+ block.call b.block, b.block.block_id[0..7].to_i(16)
305
+ block_interval = BLOCK_INTERVAL
306
+ end
307
+ end
308
+ end
309
+ rescue Hive::UnknownError => e
310
+ if e.message =~ /Could not find method get_block_range/
311
+ use_block_range = false
312
+
313
+ redo
314
+ end
315
+
316
+ raise e
317
+ end
318
+
319
+ break
287
320
  end
288
321
  end
289
322
 
@@ -325,22 +358,96 @@ module Hive
325
358
  end
326
359
 
327
360
  # @private
328
- def get_virtual_ops(types, block_num, block)
361
+ def get_virtual_ops(types, block_num, within_shuffle_round, block)
329
362
  retries = 0
363
+ vop_read_ahead = within_shuffle_round ? 1 : MAX_VOP_READ_AHEAD
364
+
365
+ @virtual_ops_cache ||= {}
366
+ @virtual_ops_cache = @virtual_ops_cache.reject do |k, v|
367
+ if k < block_num
368
+ warn "Found orphaned virtual operations for block_num #{k}: #{v.to_json}" unless @no_warn
369
+
370
+ true
371
+ end
372
+
373
+ false
374
+ end
330
375
 
331
376
  loop do
332
- get_ops_in_block_options = case account_history_api
377
+ vops_found = false
378
+
379
+ if account_history_api.class == Hive::AccountHistoryApi || @enum_virtual_ops_supported.nil? && @enum_virtual_ops_supported != false
380
+ begin
381
+ # Use account_history_api.enum_virtual_ops, if supported.
382
+
383
+ if @virtual_ops_cache.empty? || !@virtual_ops_cache.keys.include?(block_num)
384
+ (block_num..(block_num + vop_read_ahead)).each do |block_num|
385
+ @virtual_ops_cache[block_num] = []
386
+ end
387
+
388
+ enum_virtual_ops_options = {
389
+ block_range_begin: block_num,
390
+ block_range_end: block_num + vop_read_ahead,
391
+ # TODO Use: mode != :irreversible
392
+ include_reversible: true
393
+ }
394
+
395
+ account_history_api.enum_virtual_ops(enum_virtual_ops_options) do |result|
396
+ @enum_virtual_ops_supported = true
397
+
398
+ result.ops.each do |vop|
399
+ @virtual_ops_cache[vop.block] << vop
400
+ end
401
+ end
402
+ end
403
+
404
+ vops_found = true
405
+
406
+ if !!@virtual_ops_cache[block_num]
407
+ @virtual_ops_cache[block_num].each do |vop|
408
+ next unless block_num == vop.block
409
+ next if types.any? && !types.include?(vop.op.type)
410
+
411
+ if vop.virtual_op == 0
412
+ # require 'pry' ; binding.pry if vop.op.type == 'producer_reward_operation'
413
+ warn "Found non-virtual operation (#{vop.op.type}) in enum_virtual_ops result for block: #{block_num}" unless @no_warn
414
+
415
+ next
416
+ end
417
+
418
+ block.call vop.op, vop.trx_id, block_num
419
+ end
420
+
421
+ @virtual_ops_cache.delete(block_num)
422
+ end
423
+ rescue Hive::UnknownError => e
424
+ if e.message =~ /This API is not supported for account history backed by Chainbase/
425
+ warn "Retrying with get_ops_in_block (api does not support enum_virtual_ops)" unless @no_warn
426
+ @enum_virtual_ops_supported = false
427
+ vops_found = false
428
+ else
429
+ raise e
430
+ end
431
+ end
432
+ end
433
+
434
+ break if vops_found
435
+
436
+ # Fallback to previous method.
437
+ warn "Retrying with get_ops_in_block (did not find ops for block #{block_num} using enum_virtual_ops)" unless @no_warn
438
+
439
+ response = case account_history_api
333
440
  when Hive::CondenserApi
334
- [block_num, true]
441
+ account_history_api.get_ops_in_block(block_num, true)
335
442
  when Hive::AccountHistoryApi
336
- {
443
+ account_history_api.get_ops_in_block(
337
444
  block_num: block_num,
338
- only_virtual: true
339
- }
445
+ only_virtual: true,
446
+ # TODO Use: mode != :irreversible
447
+ include_reversible: true
448
+ )
340
449
  end
341
450
 
342
- response = account_history_api.get_ops_in_block(*get_ops_in_block_options)
343
-
344
451
  if response.nil? || (result = response.result).nil?
345
452
  if retries < MAX_RETRY_COUNT
346
453
  warn "Retrying get_ops_in_block on block #{block_num}" unless @no_warn
@@ -367,7 +474,7 @@ module Hive
367
474
  retries = retries + 1
368
475
  redo
369
476
  else
370
- warn "unable to find virtual operations for block: #{block_num}"
477
+ warn "unable to find virtual operations for block: #{block_num}" unless @no_warn
371
478
  # raise TooManyRetriesError, "unable to find virtual operations for block: #{block_num}"
372
479
  end
373
480
  end
@@ -375,7 +482,7 @@ module Hive
375
482
  ops.each do |op|
376
483
  next if types.any? && !types.include?(op.type)
377
484
 
378
- block.call op, VOP_TRX_ID, block_num
485
+ block.call op, vop.trx_id, block_num
379
486
  end
380
487
 
381
488
  break
data/lib/hive/version.rb CHANGED
@@ -1,4 +1,4 @@
1
1
  module Hive
2
- VERSION = '1.0.2'
2
+ VERSION = '1.0.3'
3
3
  AGENT_ID = "hive-ruby/#{VERSION}"
4
4
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hive-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.2
4
+ version: 1.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Anthony Martin
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-11-06 00:00:00.000000000 Z
11
+ date: 2021-04-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -296,20 +296,14 @@ dependencies:
296
296
  requirements:
297
297
  - - ">="
298
298
  - !ruby/object:Gem::Version
299
- version: 3.5.7
300
- - - "~>"
301
- - !ruby/object:Gem::Version
302
- version: '4.1'
299
+ version: '3.5'
303
300
  type: :runtime
304
301
  prerelease: false
305
302
  version_requirements: !ruby/object:Gem::Requirement
306
303
  requirements:
307
304
  - - ">="
308
305
  - !ruby/object:Gem::Version
309
- version: 3.5.7
310
- - - "~>"
311
- - !ruby/object:Gem::Version
312
- version: '4.1'
306
+ version: '3.5'
313
307
  - !ruby/object:Gem::Dependency
314
308
  name: bitcoin-ruby
315
309
  requirement: !ruby/object:Gem::Requirement