reth 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,361 @@
1
+ # -*- encoding : ascii-8bit -*-
2
+
3
+ require 'fileutils'
4
+
5
+ module Reth
6
+
7
+ class AccountService < ::DEVp2p::Service
8
+
9
+ name 'accounts'
10
+
11
+ default_config(
12
+ accounts: {
13
+ keystore_dir: 'keystore',
14
+ must_include_coinbase: true
15
+ }
16
+ )
17
+
18
+ attr :accounts
19
+
20
+ DEFAULT_COINBASE = Utils.decode_hex('de0b295669a9fd93d5f28d9ec85e40f4cb697bae')
21
+
22
+ def initialize(app)
23
+ super(app)
24
+
25
+ if app.config[:accounts][:keystore_dir][0] == '/'
26
+ @keystore_dir = app.config[:accounts][:keystore_dir]
27
+ else # relative path
28
+ @keystore_dir = File.join app.config[:data_dir], app.config[:accounts][:keystore_dir]
29
+ end
30
+
31
+ @accounts = []
32
+
33
+ if !File.exist?(@keystore_dir)
34
+ logger.warn "keystore directory does not exist", directory: @keystore_dir
35
+ elsif !File.directory?(@keystore_dir)
36
+ logger.error "configured keystore directory is a file, not a directory", directory: @keystore_dir
37
+ else
38
+ logger.info "searching for key files", directory: @keystore_dir
39
+
40
+ ignore = %w(. ..)
41
+ Dir.foreach(@keystore_dir) do |filename|
42
+ next if ignore.include?(filename)
43
+
44
+ begin
45
+ @accounts.push Account.load(File.join(@keystore_dir, filename))
46
+ rescue ValueError
47
+ logger.warn "invalid file skipped in keystore directory", path: filename
48
+ end
49
+ end
50
+ end
51
+ @accounts.sort_by! {|acct| acct.path.to_s }
52
+
53
+ if @accounts.empty?
54
+ logger.warn "no accounts found"
55
+ else
56
+ logger.info "found account(s)", accounts: @accounts
57
+ end
58
+ end
59
+
60
+ def start
61
+ # do nothing
62
+ end
63
+
64
+ def stop
65
+ # do nothing
66
+ end
67
+
68
+ ##
69
+ # Return the address that should be used as coinbase for new blocks.
70
+ #
71
+ # The coinbase address is given by the config field pow.coinbase_hex. If
72
+ # this does not exist, the address of the first account is used instead.
73
+ # If there are no accounts, the coinbase is `DEFAULT_COINBASE`.
74
+ #
75
+ # @raise [ValueError] if the coinbase is invalid (no string, wrong
76
+ # length) or there is no account for it and the config flag
77
+ # `accounts.check_coinbase` is set (does not apply to the default
78
+ # coinbase).
79
+ #
80
+ def coinbase
81
+ cb_hex = (app.config[:pow] || {})[:coinbase_hex]
82
+ if cb_hex
83
+ raise ValueError, 'coinbase must be String' unless cb_hex.is_a?(String)
84
+ begin
85
+ cb = Utils.decode_hex Utils.remove_0x_head(cb_hex)
86
+ rescue TypeError
87
+ raise ValueError, 'invalid coinbase'
88
+ end
89
+ else
90
+ accts = accounts_with_address
91
+ return DEFAULT_COINBASE if accts.empty?
92
+ cb = accts[0].address
93
+ end
94
+
95
+ raise ValueError, 'wrong coinbase length' if cb.size != 20
96
+
97
+ if config[:accounts][:must_include_coinbase]
98
+ raise ValueError, 'no account for coinbase' if !@accounts.map(&:address).include?(cb)
99
+ end
100
+
101
+ cb
102
+ end
103
+
104
+ ##
105
+ # Add an account.
106
+ #
107
+ # If `store` is true the account will be stored as a key file at the
108
+ # location given by `account.path`. If this is `nil` a `ValueError` is
109
+ # raised. `include_address` and `include_id` determine if address and id
110
+ # should be removed for storage or not.
111
+ #
112
+ # This method will raise a `ValueError` if the new account has the same
113
+ # UUID as an account already known to the service. Note that address
114
+ # collisions do not result in an exception as those may slip through
115
+ # anyway for locked accounts with hidden addresses.
116
+ #
117
+ def add_account(account, store=true, include_address=true, include_id=true)
118
+ logger.info "adding account", account: account
119
+
120
+ if account.uuid && @accounts.any? {|acct| acct.uuid == account.uuid }
121
+ logger.error 'could not add account (UUID collision)', uuid: account.uuid
122
+ raise ValueError, 'Could not add account (UUID collision)'
123
+ end
124
+
125
+ if store
126
+ raise ValueError, 'Cannot store account without path' if account.path.nil?
127
+ if File.exist?(account.path)
128
+ logger.error 'File does already exist', path: account.path
129
+ raise IOError, 'File does already exist'
130
+ end
131
+
132
+ raise AssertError if @accounts.any? {|acct| acct.path == account.path }
133
+
134
+ begin
135
+ directory = File.dirname account.path
136
+ FileUtils.mkdir_p(directory) unless File.exist?(directory)
137
+
138
+ File.open(account.path, 'w') do |f|
139
+ f.write account.dump(include_address, include_id)
140
+ end
141
+ rescue IOError => e
142
+ logger.error "Could not write to file", path: account.path, message: e.to_s
143
+ raise e
144
+ end
145
+ end
146
+
147
+ @accounts.push account
148
+ @accounts.sort_by! {|acct| acct.path.to_s }
149
+ end
150
+
151
+ ##
152
+ # Replace the password of an account.
153
+ #
154
+ # The update is carried out in three steps:
155
+ #
156
+ # 1. the old keystore file is renamed
157
+ # 2. the new keystore file is created at the previous location of the old
158
+ # keystore file
159
+ # 3. the old keystore file is removed
160
+ #
161
+ # In this way, at least one of the keystore files exists on disk at any
162
+ # time and can be recovered if the process is interrupted.
163
+ #
164
+ # @param account [Account] which must be unlocked, stored on disk and
165
+ # included in `@accounts`
166
+ # @param include_address [Bool] forwarded to `add_account` during step 2
167
+ # @param include_id [Bool] forwarded to `add_account` during step 2
168
+ #
169
+ # @raise [ValueError] if the account is locked, if it is not added to the
170
+ # account manager, or if it is not stored
171
+ #
172
+ def update_account(account, new_password, include_address=true, include_id=true)
173
+ raise ValueError, "Account not managed by account service" unless @accounts.include?(account)
174
+ raise ValueError, "Cannot update locked account" if account.locked?
175
+ raise ValueError, 'Account not stored on disk' unless account.path
176
+
177
+ logger.debug "creating new account"
178
+ new_account = Account.create new_password, account.privkey, account.uuid, account.path
179
+
180
+ backup_path = account.path + '~'
181
+ i = 1
182
+ while File.exist?(backup_path)
183
+ backup_path = backup_path[0, backup_path.rindex('~')+1] + i.to_s
184
+ i += 1
185
+ end
186
+ raise AssertError if File.exist?(backup_path)
187
+
188
+ logger.info 'moving old keystore file to backup location', from: account.path, to: backup_path
189
+ begin
190
+ FileUtils.mv account.path, backup_path
191
+ rescue
192
+ logger.error "could not backup keystore, stopping account update", from: account.path, to: backup_path
193
+ raise $!
194
+ end
195
+ raise AssertError unless File.exist?(backup_path)
196
+ raise AssertError if File.exist?(new_account.path)
197
+ account.path = backup_path
198
+
199
+ @accounts.delete account
200
+ begin
201
+ add_account new_account, include_address, include_id
202
+ rescue
203
+ logger.error 'adding new account failed, recovering from backup'
204
+ FileUtils.mv backup_path, new_account.path
205
+ account.path = new_account.path
206
+ @accounts.push account
207
+ @accounts.sort_by! {|acct| acct.path.to_s }
208
+ raise $!
209
+ end
210
+ raise AssertError unless File.exist?(new_account.path)
211
+
212
+ logger.info "deleting backup of old keystore", path: backup_path
213
+ begin
214
+ FileUtils.rm backup_path
215
+ rescue
216
+ logger.error 'failed to delete no longer needed backup of old keystore', path: account.path
217
+ raise $!
218
+ end
219
+
220
+ account.keystore = new_account.keystore
221
+ account.path = new_account.path
222
+
223
+ @accounts.push account
224
+ @accounts.delete new_account
225
+ @accounts.sort_by! {|acct| acct.path.to_s }
226
+
227
+ logger.debug "account update successful"
228
+ end
229
+
230
+ def accounts_with_address
231
+ @accounts.select {|acct| acct.address }
232
+ end
233
+
234
+ def unlocked_accounts
235
+ @accounts.select {|acct| !acct.locked? }
236
+ end
237
+
238
+ ##
239
+ # Find an account by either its address, its id, or its index as string.
240
+ #
241
+ # Example identifiers:
242
+ #
243
+ # - '9c0e0240776cfbe6fa1eb37e57721e1a88a563d1' (address)
244
+ # - '0x9c0e0240776cfbe6fa1eb37e57721e1a88a563d1' (address with 0x prefix)
245
+ # - '01dd527b-f4a5-4b3c-9abb-6a8e7cd6722f' (UUID)
246
+ # - '3' (index)
247
+ #
248
+ # @param identifier [String] the accounts hex encoded, case insensitive
249
+ # address (with optional 0x prefix), its UUID or its index (as string,
250
+ # >= 1)
251
+ #
252
+ # @raise [ValueError] if the identifier could not be interpreted
253
+ # @raise [KeyError] if the identified account is not known to the account
254
+ # service
255
+ #
256
+ def find(identifier)
257
+ identifier = identifier.downcase
258
+
259
+ if identifier =~ /\A[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\z/ # uuid
260
+ return get_by_id(identifier)
261
+ end
262
+
263
+ begin
264
+ address = Address.new(identifier).to_bytes
265
+ raise AssertError unless address.size == 20
266
+ return self[address]
267
+ rescue
268
+ # do nothing
269
+ end
270
+
271
+ index = identifier.to_i
272
+ raise ValueError, 'Index must be 1 or greater' if index <= 0
273
+ raise KeyError if index > @accounts.size
274
+ @accounts[index-1]
275
+ end
276
+
277
+ ##
278
+ # Return the account with a given id.
279
+ #
280
+ # Note that accounts are not required to have an id.
281
+ #
282
+ # @raise [KeyError] if no matching account can be found
283
+ #
284
+ def get_by_id(id)
285
+ accts = @accounts.select {|acct| acct.uuid == id }
286
+
287
+ if accts.size == 0
288
+ raise KeyError, "account with id #{id} unknown"
289
+ elsif accts.size > 1
290
+ logger.warn "multiple accounts with same UUID found", uuid: id
291
+ end
292
+
293
+ accts[0]
294
+ end
295
+
296
+ ##
297
+ # Get an account by its address.
298
+ #
299
+ # Note that even if an account with the given address exists, it might
300
+ # not be found if it is locked. Also, multiple accounts with the same
301
+ # address may exist, in which case the first one is returned (and a
302
+ # warning is logged).
303
+ #
304
+ # @raise [KeyError] if no matching account can be found
305
+ #
306
+ def get_by_address(address)
307
+ raise ArgumentError, 'address must be 20 bytes' unless address.size == 20
308
+
309
+ accts = @accounts.select {|acct| acct.address == address }
310
+
311
+ if accts.size == 0
312
+ raise KeyError, "account not found by address #{Utils.encode_hex(address)}"
313
+ elsif accts.size > 1
314
+ logger.warn "multiple accounts with same address found", address: Utils.encode_hex(address)
315
+ end
316
+
317
+ accts[0]
318
+ end
319
+
320
+ def sign_tx(address, tx)
321
+ get_by_address(address).sign_tx(tx)
322
+ end
323
+
324
+ def propose_path(address)
325
+ File.join @keystore_dir, Utils.encode_hex(address)
326
+ end
327
+
328
+ def include?(address)
329
+ raise ArgumentError, 'address must be 20 bytes' unless address.size == 20
330
+ @accounts.any? {|acct| acct.address == address }
331
+ end
332
+
333
+ def [](address_or_idx)
334
+ if address_or_idx.instance_of?(String)
335
+ raise ArgumentError, 'address must be 20 bytes' unless address_or_idx.size == 20
336
+ acct = @accounts.find {|acct| acct.address == address_or_idx }
337
+ acct or raise KeyError
338
+ else
339
+ raise ArgumentError, 'address_or_idx must be String or Integer' unless address_or_idx.is_a?(Integer)
340
+ @accounts[address_or_idx]
341
+ end
342
+ end
343
+
344
+ include Enumerable
345
+ def each(&block)
346
+ @accounts.each(&block)
347
+ end
348
+
349
+ def size
350
+ @accounts.size
351
+ end
352
+
353
+ private
354
+
355
+ def logger
356
+ @logger ||= Logger.new('accounts')
357
+ end
358
+
359
+ end
360
+
361
+ end
@@ -0,0 +1,15 @@
1
+ # -*- encoding : ascii-8bit -*-
2
+
3
+ module Reth
4
+
5
+ class App < ::DEVp2p::App
6
+
7
+ default_config(
8
+ client_version_string: CLIENT_VERSION_STRING,
9
+ deactivated_services: [],
10
+ post_app_start_callback: nil
11
+ )
12
+
13
+ end
14
+
15
+ end
@@ -0,0 +1,500 @@
1
+ # -*- encoding : ascii-8bit -*-
2
+
3
+ require 'thread'
4
+
5
+ module Reth
6
+
7
+ class ChainService < ::DEVp2p::WiredService
8
+ name 'chain'
9
+
10
+ default_config(
11
+ eth: {
12
+ network_id: 0,
13
+ genesis: '',
14
+ pruning: -1
15
+ },
16
+ block: Env::DEFAULT_CONFIG
17
+ )
18
+
19
+ BLOCK_QUEUE_SIZE = 512
20
+ TRANSACTION_QUEUE_SIZE = 512
21
+
22
+ MAX_NEWBLOCK_PROCESSING_TIME_STATS = 1000
23
+
24
+ attr :chain, :block_queue, :transaction_queue, :synchronizer
25
+
26
+ def initialize(app)
27
+ setup_db(app)
28
+ super(app)
29
+
30
+ logger.info 'initializing chain'
31
+ coinbase = app.services.accounts.coinbase
32
+ env = Env.new @db, config: config[:eth][:block]
33
+ @chain = Chain.new env, new_head_cb: method(:on_new_head), coinbase: coinbase
34
+
35
+ logger.info 'chain at', number: @chain.head.number
36
+ if config[:eth][:genesis_hash]
37
+ raise AssertError, "Genesis hash mismatch. Expected: #{config[:eth][:genesis_hash]}, Got: #{@chain.genesis.full_hash_hex}" unless config[:eth][:genesis_hash] == @chain.genesis.full_hash_hex
38
+ end
39
+
40
+ @synchronizer = Synchronizer.new(self, nil)
41
+
42
+ @block_queue = SyncQueue.new BLOCK_QUEUE_SIZE
43
+ @transaction_queue = SyncQueue.new TRANSACTION_QUEUE_SIZE
44
+ @add_blocks_lock = false
45
+ @add_transaction_lock = Mutex.new # TODO: should be semaphore
46
+
47
+ @broadcast_filter = DuplicatesFilter.new
48
+ @on_new_head_cbs = []
49
+ @on_new_head_candidate_cbs = []
50
+ @newblock_processing_times = []
51
+
52
+ @processed_gas = 0
53
+ @processed_elapsed = 0
54
+
55
+ @wire_protocol = ETHProtocol
56
+ end
57
+
58
+ def start
59
+ # do nothing
60
+ end
61
+
62
+ def stop
63
+ # do nothing
64
+ end
65
+
66
+ def syncing?
67
+ @synchronizer.syncing?
68
+ end
69
+
70
+ def mining?
71
+ app.services.include?('pow') && app.services.pow.active?
72
+ end
73
+
74
+ def add_transaction(tx, origin=nil, force_broadcast=false)
75
+ if syncing?
76
+ if force_broadcast
77
+ raise AssertError, 'only allowed for local txs' if origin
78
+ logger.debug 'force broadcasting unvalidated tx'
79
+ broadcast_transaction tx, origin
80
+ end
81
+
82
+ return
83
+ end
84
+
85
+ logger.debug 'add_transaction', locked: !@add_transaction_lock.locked?, tx: tx
86
+ raise ArgumentError, 'tx must be Transaction' unless tx.instance_of?(Transaction)
87
+ raise ArgumentError, 'origin must be nil or DEVp2p::Protocol' unless origin.nil? || origin.is_a?(DEVp2p::Protocol)
88
+
89
+ if @broadcast_filter.include?(tx.full_hash)
90
+ logger.debug 'discarding known tx'
91
+ return
92
+ end
93
+
94
+ begin
95
+ @chain.head_candidate.validate_transaction tx
96
+ logger.debug 'valid tx, broadcasting'
97
+ broadcast_transaction tx, origin
98
+ rescue InvalidTransaction => e
99
+ logger.debug 'invalid tx', error: e
100
+ return
101
+ end
102
+
103
+ if origin # not locally added via jsonrpc
104
+ if !mining? || syncing?
105
+ logger.debug 'discarding tx', syncing: syncing?, mining: mining?
106
+ return
107
+ end
108
+ end
109
+
110
+ @add_transaction_lock.lock
111
+ success = @chain.add_transaction tx
112
+ @add_transaction_lock.unlock
113
+
114
+ on_new_head_candidate if success
115
+ success
116
+ end
117
+
118
+ def add_block(t_block, proto)
119
+ @block_queue.enq [t_block, proto] # blocks if full
120
+ if !@add_blocks_lock
121
+ @add_blocks_lock = true
122
+ Thread.new { add_blocks }
123
+ end
124
+ end
125
+
126
+ def add_blocks
127
+ logger.debug 'add_blocks', qsize: @block_queue.size, add_tx_lock: @add_transaction_lock.locked?
128
+ raise AssertError unless @add_blocks_lock
129
+ @add_transaction_lock.lock
130
+
131
+ while !@block_queue.empty?
132
+ t_block, proto = @block_queue.peek
133
+
134
+ if @chain.include?(t_block.header.full_hash)
135
+ logger.warn 'known block', block: t_block
136
+ @block_queue.deq
137
+ next
138
+ end
139
+
140
+ if !@chain.include?(t_block.header.prevhash)
141
+ logger.warn 'missing parent', block: t_block, head: @chain.head
142
+ @block_queue.deq
143
+ next
144
+ end
145
+
146
+ block = nil
147
+ begin # deserialize
148
+ t = Time.now
149
+ block = t_block.to_block @chain.env
150
+ elapsed = Time.now - t
151
+ logger.debug 'deserialized', elapsed: elapsed, gas_used: block.gas_used, gpsec: gpsec(block.gas_used, elapsed)
152
+ rescue InvalidTransaction => e
153
+ logger.warn 'invalid transaction', block: t_block, error: e
154
+ errtype = case e
155
+ when InvalidNonce then 'InvalidNonce'
156
+ when InsufficientBalance then 'NotEnoughCash'
157
+ when InsufficientStartGas then 'OutOfGasBase'
158
+ else 'other_transaction_error'
159
+ end
160
+ warn_invalid t_block, errtype
161
+ @block_queue.deq
162
+ next
163
+ rescue ValidationError => e
164
+ logger.warn 'verification failed', error: e
165
+ warn_invalid t_block, 'other_block_error'
166
+ @block_queue.deq
167
+ next
168
+ end
169
+
170
+ # all check passed
171
+ logger.debug 'adding', block: block
172
+ t = Time.now
173
+ if @chain.add_block(block, mining?)
174
+ logger.info 'added', block: block, txs: block.transaction_count, gas_used: block.gas_used, time: (Time.now-t)
175
+
176
+ now = Time.now.to_i
177
+ if t_block.newblock_timestamp && t_block.newblock_timestamp > 0
178
+ total = now - t_block.newblock_timestamp
179
+ @newblock_processing_times.push total
180
+ @newblock_processing_times.shift if @newblock_processing_times.size > MAX_NEWBLOCK_AGE
181
+
182
+ avg = @newblock_processing_times.reduce(0.0, &:+) / @newblock_processing_times.size
183
+ max = @newblock_processing_times.max
184
+ min = @newblock_processing_times.min
185
+ logger.info 'processing time', last: total, avg: avg, max: max, min: min
186
+ end
187
+ else
188
+ logger.warn 'could not add', block: block
189
+ end
190
+
191
+ @block_queue.deq
192
+ sleep 0.001
193
+ end
194
+ rescue
195
+ logger.error $!
196
+ logger.error $!.backtrace[0,10].join("\n")
197
+ ensure
198
+ @add_blocks_lock = false
199
+ @add_transaction_lock.unlock
200
+ end
201
+
202
+ def add_mined_block(block)
203
+ logger.debug 'adding mined block', block: block
204
+ raise ArgumentError, 'block must be Block' unless block.is_a?(Block)
205
+ raise AssertError, 'invalid pow' unless block.header.check_pow
206
+
207
+ if @chain.add_block(block)
208
+ logger.debug 'added', block: block
209
+ raise AssertError, 'block is not head' unless block == @chain.head
210
+ broadcast_newblock block, block.chain_difficulty
211
+ end
212
+ end
213
+
214
+ ##
215
+ # if block is in chain or in queue
216
+ #
217
+ def knows_block(blockhash)
218
+ return true if @chain.include?(blockhash)
219
+ @block_queue.queue.any? {|(block, proto)| block.header.full_hash == blockhash }
220
+ end
221
+
222
+ def broadcast_newblock(block, chain_difficulty=nil, origin=nil)
223
+ unless chain_difficulty
224
+ raise AssertError, 'block not in chain' unless @chain.include?(block.full_hash)
225
+ chain_difficulty = block.chain_difficulty
226
+ end
227
+
228
+ raise ArgumentError, 'block must be Block or TransientBlock' unless block.is_a?(Block) or block.instance_of?(TransientBlock)
229
+
230
+ if @broadcast_filter.update(block.header.full_hash)
231
+ logger.debug 'broadcasting newblock', origin: origin
232
+ exclude_peers = origin ? [origin.peer] : []
233
+ app.services.peermanager.broadcast(ETHProtocol, 'newblock', [block, chain_difficulty], {}, nil, exclude_peers)
234
+ else
235
+ logger.debug 'already broadcasted block'
236
+ end
237
+ end
238
+
239
+ def broadcast_transaction(tx, origin=nil)
240
+ raise ArgumentError, 'tx must be Transaction' unless tx.instance_of?(Transaction)
241
+
242
+ if @broadcast_filter.update(tx.full_hash)
243
+ logger.debug 'broadcasting tx', origin: origin
244
+ exclude_peers = origin ? [origin.peer] : []
245
+ app.services.peermanager.broadcast ETHProtocol, 'transactions', [tx], {}, nil, exclude_peers
246
+ else
247
+ logger.debug 'already broadcasted tx'
248
+ end
249
+ end
250
+
251
+ def on_wire_protocol_start(proto)
252
+ logger.debug 'on_wire_protocol_start', proto: proto
253
+ raise AssertError, 'incompatible protocol' unless proto.instance_of?(@wire_protocol)
254
+
255
+ # register callbacks
256
+ %i(status newblockhashes transactions getblockhashes blockhashes getblocks blocks newblock getblockhashesfromnumber).each do |cmd|
257
+ proto.send(:"receive_#{cmd}_callbacks").push method(:"on_receive_#{cmd}")
258
+ end
259
+
260
+ head = @chain.head
261
+ proto.send_status head.chain_difficulty, head.full_hash, @chain.genesis.full_hash
262
+ end
263
+
264
+ def on_wire_protocol_stop(proto)
265
+ raise AssertError, 'incompatible protocol' unless proto.instance_of?(@wire_protocol)
266
+ logger.debug 'on_wire_protocol_stop', proto: proto
267
+ end
268
+
269
+ def on_receive_status(proto, options)
270
+ eth_version = options[:eth_version]
271
+ network_id = options[:network_id]
272
+ chain_difficulty = options[:chain_difficulty]
273
+ chain_head_hash = options[:chain_head_hash]
274
+ genesis_hash = options[:genesis_hash]
275
+
276
+ logger.debug 'status received', proto: proto, eth_version: eth_version
277
+ raise ETHProtocolError, 'eth version mismatch' unless eth_version == proto.version
278
+
279
+ if network_id != config[:eth].fetch(:network_id, proto.network_id)
280
+ logger.warn 'invalid network id', remote_network_id: network_id, expected_network_id: config[:eth].fetch(:network_id, proto.network_id)
281
+ raise ETHProtocolError, 'wrong network id'
282
+ end
283
+
284
+ # check genesis
285
+ if genesis_hash != @chain.genesis.full_hash
286
+ logger.warn 'invalid genesis hash', remote_id: proto, genesis: Utils.encode_hex(genesis_hash)
287
+ raise ETHProtocolError, 'wrong genesis block'
288
+ end
289
+
290
+ # request chain
291
+ @synchronizer.receive_status proto, chain_head_hash, chain_difficulty
292
+
293
+ # send transactions
294
+ transactions = @chain.get_transactions
295
+ unless transactions.empty?
296
+ logger.debug 'sending transactions', remote_id: proto
297
+ proto.send_transactions *transactions
298
+ end
299
+ rescue ETHProtocolError
300
+ app.services.peermanager.exclude proto.peer
301
+ rescue
302
+ logger.error $!
303
+ logger.error $!.backtrace[0,10].join("\n")
304
+ end
305
+
306
+ def on_receive_transactions(proto, transactions)
307
+ logger.debug 'remote transactions received', count: transactions.size, remote_id: proto
308
+ transactions.each do |tx|
309
+ add_transaction tx, proto
310
+ end
311
+ rescue
312
+ logger.debug $!
313
+ logger.debug $!.backtrace[0,10].join("\n")
314
+ end
315
+
316
+ def on_receive_newblockhashes(proto, newblockhashes)
317
+ logger.debug 'recv newblockhashes', num: newblockhashes.size, remote_id: proto
318
+ raise AssertError, 'cannot handle more than 32 block hashes at one time' unless newblockhashes.size <= 32
319
+
320
+ @synchronizer.receive_newblockhashes(proto, newblockhashes)
321
+ end
322
+
323
+ def on_receive_getblockhashes(proto, options)
324
+ child_block_hash = options[:child_block_hash]
325
+ count = options[:count]
326
+
327
+ logger.debug 'handle getblockhashes', count: count, block_hash: Utils.encode_hex(child_block_hash)
328
+
329
+ max_hashes = [count, @wire_protocol::MAX_GETBLOCKHASHES_COUNT].min
330
+ found = []
331
+
332
+ unless @chain.include?(child_block_hash)
333
+ logger.debug 'unknown block'
334
+ proto.send_blockhashes
335
+ return
336
+ end
337
+
338
+ last = child_block_hash
339
+ while found.size < max_hashes
340
+ begin
341
+ last = RLP.decode_lazy(@chain.db.get(last))[0][0] # [head][prevhash]
342
+ rescue KeyError
343
+ # this can happen if we started a chain download, which did not complete
344
+ # should not happen if the hash is part of the canonical chain
345
+ logger.warn 'KeyError in getblockhashes', hash: last
346
+ break
347
+ end
348
+
349
+ if last
350
+ found.push(last)
351
+ else
352
+ break
353
+ end
354
+ end
355
+
356
+ logger.debug 'sending: found block_hashes', count: found.size
357
+ proto.send_blockhashes *found
358
+ end
359
+
360
+ def on_receive_blockhashes(proto, blockhashes)
361
+ if blockhashes.empty?
362
+ logger.debug 'recv 0 remote block hashes, signifying genesis block'
363
+ else
364
+ logger.debug 'on receive blockhashes', count: blockhashes.size, remote_id: proto, first: Utils.encode_hex(blockhashes.first), last: Utils.encode_hex(blockhashes.last)
365
+ end
366
+
367
+ @synchronizer.receive_blockhashes proto, blockhashes
368
+ end
369
+
370
+ def on_receive_getblocks(proto, blockhashes)
371
+ logger.debug 'on receive getblocks', count: blockhashes.size
372
+
373
+ found = []
374
+ blockhashes[0, @wire_protocol::MAX_GETBLOCKS_COUNT].each do |bh|
375
+ begin
376
+ found.push @chain.db.get(bh)
377
+ rescue KeyError
378
+ logger.debug 'unknown block requested', block_hash: Utils.encode_hex(bh)
379
+ end
380
+ end
381
+
382
+ unless found.empty?
383
+ logger.debug 'found', count: found.dize
384
+ proto.send_blocks *found
385
+ end
386
+ end
387
+
388
+ def on_receive_blocks(proto, transient_blocks)
389
+ blk_number = transient_blocks.empty? ? 0 : transient_blocks.map {|blk| blk.header.number }.max
390
+ logger.debug 'recv blocks', count: transient_blocks.size, remote_id: proto, highest_number: blk_number
391
+
392
+ unless transient_blocks.empty?
393
+ @synchronizer.receive_blocks proto, transient_blocks
394
+ end
395
+ end
396
+
397
+ def on_receive_newblock(proto, options)
398
+ block = options[:block]
399
+ chain_difficulty = options[:chain_difficulty]
400
+
401
+ logger.debug 'recv newblock', block: block, remote_id: proto
402
+ @synchronizer.receive_newblock proto, block, chain_difficulty
403
+ rescue
404
+ logger.debug $!
405
+ logger.debug $!.backtrace[0,10].join("\n")
406
+ end
407
+
408
+ def on_receive_getblockhashesfromnumber(proto, options)
409
+ number = options[:number]
410
+ count = options[:count]
411
+
412
+ logger.debug 'recv getblockhashesfromnumber', number: number, count: count, remote_id: proto
413
+
414
+ found = []
415
+ count = [count, @wire_protocol::MAX_GETBLOCKHASHES_COUNT].min
416
+
417
+ for i in (number...(number+count))
418
+ begin
419
+ h = @chain.index.get_block_by_number(i)
420
+ found.push h
421
+ rescue KeyError
422
+ logger.debug 'unknown block requested', number: number
423
+ end
424
+ end
425
+
426
+ logger.debug 'sending: found block_hashes', count: found.size
427
+ proto.send_blockhashes *found
428
+ end
429
+
430
+ private
431
+
432
+ def logger
433
+ @logger ||= Logger.new('eth.chainservice')
434
+ end
435
+
436
+ def on_new_head(block)
437
+ logger.debug 'new head cbs', num: @on_new_head_cbs.size
438
+ @on_new_head_cbs.each {|cb| cb.call block }
439
+ on_new_head_candidate # we implicitly have a new head_candidate
440
+ end
441
+
442
+ def on_new_head_candidate
443
+ @on_new_head_candidate_cbs.each {|cb| cb.call @chain.head_candidate }
444
+ end
445
+
446
+ def gpsec(gas_spent=0, elapsed=0)
447
+ if gas_spent != 0
448
+ @processed_gas += gas_spent
449
+ @processed_elapsed += elapsed
450
+ end
451
+
452
+ (@processed_gas / (0.001 + @processed_elapsed)).to_i
453
+ end
454
+
455
+ def warn_invalid(block, errortype='other')
456
+ # TODO: send to badblocks.ethereum.org
457
+ end
458
+
459
+ def setup_db(app)
460
+ data_dir = app.config[:data_dir]
461
+ eth_config = app.config[:eth] || {}
462
+
463
+ if eth_config[:pruning].to_i >= 0
464
+ @db = DB::RefcountDB.new app.services.db
465
+
466
+ if @db.db.include?("I am not pruning")
467
+ raise "The database in '#{data_dir}' was initialized as non-pruning. Can not enable pruning now."
468
+ end
469
+
470
+ @db.ttl = eth_config[:pruning].to_i
471
+ @db.db.put "I am pruning", "1"
472
+ @db.commit
473
+ else
474
+ @db = app.services.db
475
+
476
+ if @db.include?("I am pruning")
477
+ raise "The database in '#{data_dir}' was initialized as pruning. Can not disable pruning now."
478
+ end
479
+
480
+ @db.put "I am not pruning", "1"
481
+ @db.commit
482
+ end
483
+
484
+ if @db.include?('network_id')
485
+ db_network_id = @db.get 'network_id'
486
+
487
+ if db_network_id != eth_config[:network_id].to_s
488
+ raise "The database in '#{data_dir}' was initialized with network id #{db_network_id} and can not be used when connecting to network id #{eth_config[:network_id]}. Please choose a different data directory."
489
+ end
490
+ else
491
+ @db.put 'network_id', eth_config[:network_id].to_s
492
+ @db.commit
493
+ end
494
+
495
+ raise AssertError, 'failed to setup db' if @db.nil?
496
+ end
497
+
498
+ end
499
+
500
+ end