reth 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +6 -0
- data/bin/reth +208 -0
- data/lib/reth.rb +44 -0
- data/lib/reth/account.rb +195 -0
- data/lib/reth/account_service.rb +361 -0
- data/lib/reth/app.rb +15 -0
- data/lib/reth/chain_service.rb +500 -0
- data/lib/reth/config.rb +88 -0
- data/lib/reth/db_service.rb +90 -0
- data/lib/reth/duplicates_filter.rb +29 -0
- data/lib/reth/eth_protocol.rb +209 -0
- data/lib/reth/genesisdata/genesis_frontier.json +26691 -0
- data/lib/reth/genesisdata/genesis_morden.json +27 -0
- data/lib/reth/genesisdata/genesis_olympic.json +48 -0
- data/lib/reth/jsonrpc.rb +13 -0
- data/lib/reth/jsonrpc/app.rb +79 -0
- data/lib/reth/jsonrpc/filter.rb +288 -0
- data/lib/reth/jsonrpc/handler.rb +424 -0
- data/lib/reth/jsonrpc/helper.rb +156 -0
- data/lib/reth/jsonrpc/server.rb +24 -0
- data/lib/reth/jsonrpc/service.rb +37 -0
- data/lib/reth/keystore.rb +150 -0
- data/lib/reth/leveldb_service.rb +79 -0
- data/lib/reth/profile.rb +66 -0
- data/lib/reth/sync_task.rb +273 -0
- data/lib/reth/synchronizer.rb +192 -0
- data/lib/reth/transient_block.rb +40 -0
- data/lib/reth/utils.rb +22 -0
- data/lib/reth/version.rb +3 -0
- metadata +201 -0
@@ -0,0 +1,24 @@
|
|
1
|
+
module Reth
|
2
|
+
module JSONRPC
|
3
|
+
|
4
|
+
class Server
|
5
|
+
include Concurrent::Async
|
6
|
+
|
7
|
+
def initialize(app, host, port)
|
8
|
+
super()
|
9
|
+
|
10
|
+
@app = app
|
11
|
+
@host = host
|
12
|
+
@port = port
|
13
|
+
end
|
14
|
+
|
15
|
+
def start
|
16
|
+
Rack::Handler::WEBrick.run App.new(@app), Host: @host, Port: @port
|
17
|
+
rescue
|
18
|
+
puts $!
|
19
|
+
puts $!.backtrace[0,10].join("\n")
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
module Reth
|
2
|
+
module JSONRPC
|
3
|
+
|
4
|
+
class Service < ::DEVp2p::Service
|
5
|
+
|
6
|
+
class <<self
|
7
|
+
def register_with_app(app)
|
8
|
+
config = default_config[:jsonrpc]
|
9
|
+
app.register_service self, app, config[:host], config[:port]
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
name 'jsonrpc'
|
14
|
+
default_config(
|
15
|
+
jsonrpc: {
|
16
|
+
host: '127.0.0.1',
|
17
|
+
port: 8333
|
18
|
+
}
|
19
|
+
)
|
20
|
+
|
21
|
+
def initialize(app, host, port)
|
22
|
+
super(app)
|
23
|
+
|
24
|
+
@app = app
|
25
|
+
@host = host
|
26
|
+
@port = port
|
27
|
+
end
|
28
|
+
|
29
|
+
def start
|
30
|
+
@server = Server.new @app, @host, @port
|
31
|
+
@server.async.start
|
32
|
+
end
|
33
|
+
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,150 @@
|
|
1
|
+
# -*- encoding : ascii-8bit -*-
|
2
|
+
|
3
|
+
require 'openssl'
|
4
|
+
|
5
|
+
module Reth
|
6
|
+
|
7
|
+
class Keystore
|
8
|
+
|
9
|
+
class PBKDF2
|
10
|
+
attr :name, :params
|
11
|
+
|
12
|
+
def initialize(params=nil)
|
13
|
+
@name = 'pbkdf2'
|
14
|
+
@params = params || mkparams
|
15
|
+
end
|
16
|
+
|
17
|
+
def eval(pw)
|
18
|
+
OpenSSL::PKCS5.pbkdf2_hmac(
|
19
|
+
pw,
|
20
|
+
Utils.decode_hex(params[:salt]),
|
21
|
+
params[:c],
|
22
|
+
params[:dklen],
|
23
|
+
'SHA256'
|
24
|
+
)
|
25
|
+
end
|
26
|
+
|
27
|
+
def mkparams
|
28
|
+
{ prf: 'hmac-sha256',
|
29
|
+
dklen: 32,
|
30
|
+
c: 262144,
|
31
|
+
salt: Utils.encode_hex(SecureRandom.random_bytes(16)) }
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
class AES128CTR
|
36
|
+
attr :name, :params
|
37
|
+
|
38
|
+
def initialize(params=nil)
|
39
|
+
@name = 'aes-128-ctr'
|
40
|
+
@params = params || mkparams
|
41
|
+
end
|
42
|
+
|
43
|
+
def encrypt(text, key)
|
44
|
+
cipher = OpenSSL::Cipher.new name
|
45
|
+
cipher.encrypt
|
46
|
+
cipher.key = key
|
47
|
+
cipher.iv = Utils.decode_hex(params[:iv])
|
48
|
+
cipher.update(text) + cipher.final
|
49
|
+
end
|
50
|
+
|
51
|
+
def decrypt(text, key)
|
52
|
+
cipher = OpenSSL::Cipher.new name
|
53
|
+
cipher.decrypt
|
54
|
+
cipher.key = key
|
55
|
+
cipher.iv = Utils.decode_hex(params[:iv])
|
56
|
+
cipher.update(text) + cipher.final
|
57
|
+
end
|
58
|
+
|
59
|
+
def mkparams
|
60
|
+
{iv: Utils.encode_hex(SecureRandom.random_bytes(16))}
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
KDF = {
|
65
|
+
'pbkdf2' => PBKDF2
|
66
|
+
}.freeze
|
67
|
+
|
68
|
+
CIPHER = {
|
69
|
+
'aes-128-ctr' => AES128CTR
|
70
|
+
}.freeze
|
71
|
+
|
72
|
+
class <<self
|
73
|
+
|
74
|
+
def make_json(priv, pw, kdf=PBKDF2.new, cipher=AES128CTR.new)
|
75
|
+
derivedkey = kdf.eval pw
|
76
|
+
|
77
|
+
enckey = derivedkey[0,16]
|
78
|
+
c = cipher.encrypt priv, enckey
|
79
|
+
|
80
|
+
mac = Utils.keccak256 "#{derivedkey[16,16]}#{c}"
|
81
|
+
uuid = SecureRandom.uuid
|
82
|
+
|
83
|
+
{
|
84
|
+
crypto: {
|
85
|
+
cipher: cipher.name,
|
86
|
+
ciphertext: Utils.encode_hex(c),
|
87
|
+
cipherparams: cipher.params,
|
88
|
+
kdf: kdf.name,
|
89
|
+
kdfparams: kdf.params,
|
90
|
+
mac: Utils.encode_hex(mac),
|
91
|
+
version: 1
|
92
|
+
},
|
93
|
+
id: uuid,
|
94
|
+
version: 3
|
95
|
+
}
|
96
|
+
end
|
97
|
+
|
98
|
+
def decode_json(jsondata, pw)
|
99
|
+
jsondata = Hashie::Mash.new jsondata
|
100
|
+
|
101
|
+
cryptdata = jsondata.crypto || jsondata.Crypto
|
102
|
+
raise ArgumentError, "JSON data must contain 'crypto' object" unless cryptdata
|
103
|
+
|
104
|
+
kdfparams = cryptdata.kdfparams
|
105
|
+
kdf = KDF[cryptdata.kdf].new kdfparams
|
106
|
+
|
107
|
+
cipherparams = cryptdata.cipherparams
|
108
|
+
cipher = CIPHER[cryptdata.cipher].new cipherparams
|
109
|
+
|
110
|
+
derivedkey = kdf.eval pw
|
111
|
+
raise ValueError, "Derived key must be at least 32 bytes long" unless derivedkey.size >= 32
|
112
|
+
|
113
|
+
enckey = derivedkey[0,16]
|
114
|
+
ct = Utils.decode_hex cryptdata.ciphertext
|
115
|
+
o = cipher.decrypt ct, enckey
|
116
|
+
|
117
|
+
mac1 = Utils.keccak256 "#{derivedkey[16,16]}#{ct}"
|
118
|
+
mac2 = Utils.decode_hex cryptdata.mac
|
119
|
+
raise ValueError, "MAC mismatch. Password incorrect?" unless mac1 == mac2
|
120
|
+
|
121
|
+
o
|
122
|
+
end
|
123
|
+
|
124
|
+
##
|
125
|
+
# Check if json has the structure of a keystore file version 3.
|
126
|
+
#
|
127
|
+
# Note that this test is not complete, e.g. it doesn't check key
|
128
|
+
# derivation or cipher parameters.
|
129
|
+
#
|
130
|
+
# @param json [Hash] data load from json file
|
131
|
+
# @return [Bool] `true` if the data appears to be valid, otherwise
|
132
|
+
# `false`
|
133
|
+
#
|
134
|
+
def validate(json)
|
135
|
+
return false unless json.has_key?('crypto') || json.has_key?('Crypto')
|
136
|
+
return false unless json['version'] == 3
|
137
|
+
|
138
|
+
crypto = json['crypto'] || json['Crypto']
|
139
|
+
return false unless crypto.has_key?('cipher')
|
140
|
+
return false unless crypto.has_key?('ciphertext')
|
141
|
+
return false unless crypto.has_key?('kdf')
|
142
|
+
return false unless crypto.has_key?('mac')
|
143
|
+
|
144
|
+
true
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
end
|
149
|
+
|
150
|
+
end
|
@@ -0,0 +1,79 @@
|
|
1
|
+
# -*- encoding : ascii-8bit -*-
|
2
|
+
|
3
|
+
module Reth
|
4
|
+
|
5
|
+
class LevelDBService < ::DEVp2p::Service
|
6
|
+
name 'leveldb'
|
7
|
+
|
8
|
+
attr :db # implement DB::BaseDB interface
|
9
|
+
|
10
|
+
def initialize(app)
|
11
|
+
super(app)
|
12
|
+
@db = DB::LevelDB.new File.join(app.config[:data_dir], 'leveldb')
|
13
|
+
end
|
14
|
+
|
15
|
+
def start
|
16
|
+
# do nothing
|
17
|
+
end
|
18
|
+
|
19
|
+
def stop
|
20
|
+
# do nothing
|
21
|
+
end
|
22
|
+
|
23
|
+
def get(k)
|
24
|
+
@db.get(k)
|
25
|
+
rescue KeyError
|
26
|
+
nil
|
27
|
+
end
|
28
|
+
|
29
|
+
def put(k, v)
|
30
|
+
@db.put(k, v)
|
31
|
+
end
|
32
|
+
|
33
|
+
def commit
|
34
|
+
@db.commit
|
35
|
+
end
|
36
|
+
|
37
|
+
def delete(k)
|
38
|
+
@db.delete(k)
|
39
|
+
end
|
40
|
+
|
41
|
+
def include?(k)
|
42
|
+
@db.include?(k)
|
43
|
+
end
|
44
|
+
alias has_key? include?
|
45
|
+
|
46
|
+
def inc_refcount(k, v)
|
47
|
+
put(k, v)
|
48
|
+
end
|
49
|
+
|
50
|
+
def dec_refcount(k)
|
51
|
+
# do nothing
|
52
|
+
end
|
53
|
+
|
54
|
+
def revert_refcount_changes(epoch)
|
55
|
+
# do nothing
|
56
|
+
end
|
57
|
+
|
58
|
+
def commit_refcount_changes(epoch)
|
59
|
+
# do nothing
|
60
|
+
end
|
61
|
+
|
62
|
+
def cleanup(epoch)
|
63
|
+
# do nothing
|
64
|
+
end
|
65
|
+
|
66
|
+
def put_temporarily(k, v)
|
67
|
+
inc_refcount(k, v)
|
68
|
+
dec_refcount(k)
|
69
|
+
end
|
70
|
+
|
71
|
+
private
|
72
|
+
|
73
|
+
def logger
|
74
|
+
@logger ||= Logger.new 'db'
|
75
|
+
end
|
76
|
+
|
77
|
+
end
|
78
|
+
|
79
|
+
end
|
data/lib/reth/profile.rb
ADDED
@@ -0,0 +1,66 @@
|
|
1
|
+
# -*- encoding : ascii-8bit -*-
|
2
|
+
|
3
|
+
module Reth
|
4
|
+
|
5
|
+
class Profile
|
6
|
+
|
7
|
+
GENESIS_DIR = File.expand_path('../genesisdata', __FILE__)
|
8
|
+
|
9
|
+
ALL = {
|
10
|
+
livenet: {
|
11
|
+
eth: {
|
12
|
+
network_id: 1,
|
13
|
+
genesis: File.join(GENESIS_DIR, 'genesis_frontier.json'),
|
14
|
+
genesis_hash: 'd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3',
|
15
|
+
},
|
16
|
+
discovery: {
|
17
|
+
bootstrap_nodes: [
|
18
|
+
'enode://487611428e6c99a11a9795a6abe7b529e81315ca6aad66e2a2fc76e3adf263faba0d35466c2f8f68d561dbefa8878d4df5f1f2ddb1fbeab7f42ffb8cd328bd4a@5.1.83.226:30303', # C++
|
19
|
+
'enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303', # GO
|
20
|
+
'enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303', # GO2
|
21
|
+
'enode://2676755dd8477ad3beea32b4e5a144fa10444b70dfa3e05effb0fdfa75683ebd4f75709e1f8126cb5317c5a35cae823d503744e790a3a038ae5dd60f51ee9101@144.76.62.101:30303', # Python
|
22
|
+
]
|
23
|
+
},
|
24
|
+
},
|
25
|
+
testnet: {
|
26
|
+
eth: {
|
27
|
+
network_id: 2,
|
28
|
+
genesis: File.join(GENESIS_DIR, 'genesis_morden.json'),
|
29
|
+
genesis_hash: '0cd786a2425d16f152c658316c423e6ce1181e15c3295826d7c9904cba9ce303',
|
30
|
+
block: {
|
31
|
+
account_initial_nonce: 2 ** 20,
|
32
|
+
homestead_fork_blknum: 494000,
|
33
|
+
},
|
34
|
+
},
|
35
|
+
discovery: {
|
36
|
+
bootstrap_nodes: [
|
37
|
+
'enode://e4533109cc9bd7604e4ff6c095f7a1d807e15b38e9bfeb05d3b7c423ba86af0a9e89abbf40bd9dde4250fef114cd09270fa4e224cbeef8b7bf05a51e8260d6b8@94.242.229.4:40404' # Go
|
38
|
+
]
|
39
|
+
},
|
40
|
+
}
|
41
|
+
}
|
42
|
+
|
43
|
+
class <<self
|
44
|
+
def all
|
45
|
+
@all ||= Hashie::Mash.new ALL
|
46
|
+
end
|
47
|
+
|
48
|
+
def public(name)
|
49
|
+
all[name]
|
50
|
+
end
|
51
|
+
|
52
|
+
def private(network_id)
|
53
|
+
Hashie::Mash.new({
|
54
|
+
eth: {
|
55
|
+
network_id: network_id
|
56
|
+
},
|
57
|
+
discovery: {
|
58
|
+
bootstrap_nodes: []
|
59
|
+
}
|
60
|
+
})
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
end
|
65
|
+
|
66
|
+
end
|
@@ -0,0 +1,273 @@
|
|
1
|
+
# -*- encoding : ascii-8bit -*-
|
2
|
+
|
3
|
+
module Reth
|
4
|
+
|
5
|
+
##
|
6
|
+
# Synchronizes the chain starting from a given blockhash. Blockchain hash
|
7
|
+
# is fetched from a single peer (which led to the unknown blockhash).
|
8
|
+
# Blocks are fetched from the best peers.
|
9
|
+
#
|
10
|
+
class SyncTask
|
11
|
+
MAX_BLOCKS_PER_REQUEST = 32
|
12
|
+
INITIAL_BLOCKHASHES_PER_REQUEST = 16
|
13
|
+
MAX_BLOCKHASHES_PER_REQUEST = 512
|
14
|
+
|
15
|
+
BLOCKS_REQUEST_TIMEOUT = 32
|
16
|
+
BLOCKHASHES_REQUEST_TIMEOUT = 32
|
17
|
+
|
18
|
+
attr :start_block_number, :end_block_number
|
19
|
+
|
20
|
+
def initialize(synchronizer, proto, blockhash, chain_difficulty=0, originator_only=false)
|
21
|
+
@synchronizer = synchronizer
|
22
|
+
@chain = synchronizer.chain
|
23
|
+
@chainservice = synchronizer.chainservice
|
24
|
+
|
25
|
+
@originating_proto = proto
|
26
|
+
@originator_only = originator_only
|
27
|
+
|
28
|
+
@blockhash = blockhash
|
29
|
+
@chain_difficulty = chain_difficulty
|
30
|
+
|
31
|
+
@requests = {} # proto => [cond, result]
|
32
|
+
@start_block_number = @chain.head.number
|
33
|
+
@end_block_number = @start_block_number + 1 # minimum synctask
|
34
|
+
|
35
|
+
@run = Thread.new { run }
|
36
|
+
end
|
37
|
+
|
38
|
+
def run
|
39
|
+
logger.info 'spawning new synctask'
|
40
|
+
|
41
|
+
fetch_hashchain
|
42
|
+
rescue
|
43
|
+
logger.error $!
|
44
|
+
logger.error $!.backtrace[0,20].join("\n")
|
45
|
+
task_exit false
|
46
|
+
end
|
47
|
+
|
48
|
+
def task_exit(success=false)
|
49
|
+
if success
|
50
|
+
logger.debug 'successfully synced'
|
51
|
+
else
|
52
|
+
logger.warn 'syncing failed'
|
53
|
+
end
|
54
|
+
|
55
|
+
@synchronizer.synctask_exited(success)
|
56
|
+
end
|
57
|
+
|
58
|
+
def protocols
|
59
|
+
return [@originating_proto] if @originator_only
|
60
|
+
@synchronizer.protocols
|
61
|
+
end
|
62
|
+
|
63
|
+
def fetch_hashchain
|
64
|
+
logger.debug 'fetching hashchain'
|
65
|
+
|
66
|
+
blockhashes_chain = [@blockhash] # youngest to oldest
|
67
|
+
blockhash = @blockhash = blockhashes_chain.last
|
68
|
+
raise AssertError if @chain.include?(blockhash)
|
69
|
+
|
70
|
+
# get block hashes until we found a known one
|
71
|
+
max_blockhashes_per_request = INITIAL_BLOCKHASHES_PER_REQUEST
|
72
|
+
chain_head_number = @chain.head.number
|
73
|
+
while !@chain.include?(blockhash)
|
74
|
+
blockhashes_batch = []
|
75
|
+
|
76
|
+
# proto with highest difficulty should be the proto we got the
|
77
|
+
# newblock from
|
78
|
+
protos = self.protocols
|
79
|
+
if protos.nil? || protos.empty?
|
80
|
+
logger.warn 'no protocols available'
|
81
|
+
return task_exit(false)
|
82
|
+
end
|
83
|
+
|
84
|
+
protos.each do |proto|
|
85
|
+
logger.debug "syncing with", proto: proto
|
86
|
+
next if proto.stopped?
|
87
|
+
|
88
|
+
raise AssertError if @requests.has_key?(proto)
|
89
|
+
deferred = Concurrent::IVar.new
|
90
|
+
@requests[proto] = deferred
|
91
|
+
|
92
|
+
proto.async.send_getblockhashes blockhash, max_blockhashes_per_request
|
93
|
+
begin
|
94
|
+
blockhashes_batch = deferred.value(BLOCKHASHES_REQUEST_TIMEOUT)
|
95
|
+
rescue Defer::TimedOut
|
96
|
+
logger.warn 'syncing hashchain timed out'
|
97
|
+
next
|
98
|
+
ensure
|
99
|
+
@requests.delete proto
|
100
|
+
end
|
101
|
+
|
102
|
+
if blockhashes_batch.empty?
|
103
|
+
logger.warn 'empty getblockhashes result'
|
104
|
+
next
|
105
|
+
end
|
106
|
+
|
107
|
+
unless blockhashes_batch.all? {|bh| bh.instance_of?(String) }
|
108
|
+
logger.warn "get wrong data type", expected: 'String', received: blockhashes_batch.map(&:class).uniq
|
109
|
+
next
|
110
|
+
end
|
111
|
+
|
112
|
+
break
|
113
|
+
end
|
114
|
+
|
115
|
+
if blockhashes_batch.empty?
|
116
|
+
logger.warn 'syncing failed with all peers', num_protos: protos.size
|
117
|
+
return task_exit(false)
|
118
|
+
end
|
119
|
+
|
120
|
+
if @chain.include?(blockhashes_batch.last)
|
121
|
+
blockhashes_batch.each do |bh| # youngest to oldest
|
122
|
+
blockhash = bh
|
123
|
+
|
124
|
+
if @chain.include?(blockhash)
|
125
|
+
logger.debug "found known blockhash", blockhash: Utils.encode_hex(blockhash), is_genesis: (blockhash == @chain.genesis.full_hash)
|
126
|
+
break
|
127
|
+
else
|
128
|
+
blockhashes_chain.push(blockhash)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
else # no overlap
|
132
|
+
blockhashes_chain.concat blockhashes_batch
|
133
|
+
blockhash = blockhashes_batch.last
|
134
|
+
end
|
135
|
+
|
136
|
+
logger.debug "downloaded #{blockhashes_chain.size} block hashes, ending with #{Utils.encode_hex(blockhashes_chain.last)}"
|
137
|
+
@end_block_number = chain_head_number + blockhashes_chain.size
|
138
|
+
max_blockhashes_per_request = MAX_BLOCKHASHES_PER_REQUEST
|
139
|
+
end
|
140
|
+
|
141
|
+
@start_block_number = @chain.get(blockhash).number
|
142
|
+
@end_block_number = @start_block_number + blockhashes_chain.size
|
143
|
+
|
144
|
+
logger.debug 'computed missing numbers', start_number: @start_block_number, end_number: @end_block_number
|
145
|
+
|
146
|
+
fetch_blocks blockhashes_chain
|
147
|
+
end
|
148
|
+
|
149
|
+
def fetch_blocks(blockhashes_chain)
|
150
|
+
raise ArgumentError, 'no blockhashes' if blockhashes_chain.empty?
|
151
|
+
logger.debug 'fetching blocks', num: blockhashes_chain.size
|
152
|
+
|
153
|
+
blockhashes_chain.reverse! # oldest to youngest
|
154
|
+
num_blocks = blockhashes_chain.size
|
155
|
+
num_fetched = 0
|
156
|
+
|
157
|
+
while !blockhashes_chain.empty?
|
158
|
+
blockhashes_batch = blockhashes_chain[0, MAX_BLOCKS_PER_REQUEST]
|
159
|
+
t_blocks = []
|
160
|
+
|
161
|
+
protos = self.protocols
|
162
|
+
if protos.empty?
|
163
|
+
logger.warn 'no protocols available'
|
164
|
+
return task_exit(false)
|
165
|
+
end
|
166
|
+
|
167
|
+
proto = nil
|
168
|
+
reply_proto = nil
|
169
|
+
protos.each do |_proto|
|
170
|
+
proto = _proto
|
171
|
+
|
172
|
+
next if proto.stopped?
|
173
|
+
raise AssertError if @requests.has_key?(proto)
|
174
|
+
|
175
|
+
logger.debug 'requesting blocks', num: blockhashes_batch.size
|
176
|
+
deferred = Concurrent::IVar.new
|
177
|
+
@requests[proto] = deferred
|
178
|
+
|
179
|
+
proto.async.send_getblocks *blockhashes_batch
|
180
|
+
begin
|
181
|
+
t_blocks = deferred.value(BLOCKS_REQUEST_TIMEOUT)
|
182
|
+
rescue Defer::TimedOut
|
183
|
+
logger.warn 'getblocks timed out, trying next proto'
|
184
|
+
next
|
185
|
+
ensure
|
186
|
+
@requests.delete proto
|
187
|
+
end
|
188
|
+
|
189
|
+
if t_blocks.empty?
|
190
|
+
logger.warn 'empty getblocks reply, trying next proto'
|
191
|
+
next
|
192
|
+
elsif !t_blocks.all? {|b| b.instance_of?(TransientBlock) }
|
193
|
+
logger.warn 'received unexpected data', data: t_blocks
|
194
|
+
t_blocks = []
|
195
|
+
next
|
196
|
+
end
|
197
|
+
|
198
|
+
unless t_blocks.map {|b| b.header.full_hash } == blockhashes_batch[0, t_blocks.size]
|
199
|
+
logger.warn 'received wrong blocks, should ban peer'
|
200
|
+
t_blocks = []
|
201
|
+
next
|
202
|
+
end
|
203
|
+
|
204
|
+
reply_proto = proto
|
205
|
+
break
|
206
|
+
end
|
207
|
+
|
208
|
+
# add received t_blocks
|
209
|
+
num_fetched += t_blocks.size
|
210
|
+
logger.debug "received blocks", num: t_blocks.size, num_fetched: num_fetched, total: num_blocks, missing: (num_blocks - num_fetched)
|
211
|
+
|
212
|
+
if t_blocks.empty?
|
213
|
+
logger.warn 'failed to fetch blocks', missing: blockhashes_chain.size
|
214
|
+
return task_exit(false)
|
215
|
+
end
|
216
|
+
|
217
|
+
t = Time.now
|
218
|
+
logger.debug 'adding blocks', qsize: @chainservice.block_queue.size
|
219
|
+
t_blocks.each do |blk|
|
220
|
+
b = blockhashes_chain.shift
|
221
|
+
raise AssertError unless blk.header.full_hash == b
|
222
|
+
raise AssertError if blockhashes_chain.include?(blk.header.full_hash)
|
223
|
+
|
224
|
+
@chainservice.add_block blk, reply_proto # this blocks if the queue is full
|
225
|
+
end
|
226
|
+
logger.debug 'adding blocks done', took: (Time.now - t)
|
227
|
+
end
|
228
|
+
|
229
|
+
# done
|
230
|
+
last_block = t_blocks.last
|
231
|
+
raise AssertError, 'still missing blocks' unless blockhashes_chain.empty?
|
232
|
+
raise AssertError, 'still missing blocks' unless last_block.header.full_hash == @blockhash
|
233
|
+
logger.debug 'syncing finished'
|
234
|
+
|
235
|
+
# at this time blocks are not in the chain yet, but in the add_block queue
|
236
|
+
if @chain_difficulty >= @chain.head.chain_difficulty
|
237
|
+
@chainservice.broadcast_newblock last_block, @chain_difficulty, proto
|
238
|
+
end
|
239
|
+
|
240
|
+
task_exit(true)
|
241
|
+
rescue
|
242
|
+
logger.error $!
|
243
|
+
logger.error $!.backtrace[0,10].join("\n")
|
244
|
+
task_exit(false)
|
245
|
+
end
|
246
|
+
|
247
|
+
def receive_blocks(proto, t_blocks)
|
248
|
+
logger.debug 'blocks received', proto: proto, num: t_blocks.size
|
249
|
+
unless @requests.has_key?(proto)
|
250
|
+
logger.debug 'unexpected blocks'
|
251
|
+
return
|
252
|
+
end
|
253
|
+
@requests[proto].set t_blocks
|
254
|
+
end
|
255
|
+
|
256
|
+
def receive_blockhashes(proto, blockhashes)
|
257
|
+
logger.debug 'blockhashes received', proto: proto, num: blockhashes.size
|
258
|
+
unless @requests.has_key?(proto)
|
259
|
+
logger.debug 'unexpected blockhashes'
|
260
|
+
return
|
261
|
+
end
|
262
|
+
@requests[proto].set blockhashes
|
263
|
+
end
|
264
|
+
|
265
|
+
private
|
266
|
+
|
267
|
+
def logger
|
268
|
+
@logger ||= Logger.new('eth.sync.task')
|
269
|
+
end
|
270
|
+
|
271
|
+
end
|
272
|
+
|
273
|
+
end
|