scale_rb 0.1.14 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +1 -1
- data/lib/address.rb +85 -89
- data/lib/client/abstract_ws_client.rb +85 -81
- data/lib/client/http_client.rb +2 -170
- data/lib/client/http_client_metadata.rb +10 -8
- data/lib/client/http_client_storage.rb +180 -0
- data/lib/client/rpc_request_builder.rb +44 -42
- data/lib/codec.rb +28 -29
- data/lib/hasher.rb +39 -37
- data/lib/metadata/metadata.rb +130 -122
- data/lib/metadata/metadata_v10.rb +72 -69
- data/lib/metadata/metadata_v11.rb +78 -75
- data/lib/metadata/metadata_v12.rb +42 -39
- data/lib/metadata/metadata_v13.rb +71 -68
- data/lib/metadata/metadata_v14.rb +185 -181
- data/lib/metadata/metadata_v9.rb +92 -89
- data/lib/monkey_patching.rb +22 -22
- data/lib/portable_codec.rb +236 -232
- data/lib/scale_rb/version.rb +1 -1
- data/lib/scale_rb.rb +0 -1
- data/lib/storage_helper.rb +52 -50
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9857ea16c7327b90a598f40ce8afe919ee3db3a1fada67ba9e1a5ee768c50fdf
|
4
|
+
data.tar.gz: 7bd319b972c1c4aa13f4f24c6af525294bcbd8b6361423db66ac6038e44ad990
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: db6452725e87b88cd0bdffe419588c8b9b8cfe892bdac01df226e9008bf02e45d56c9c25447a932d8ddf43bdfecf6ade0095710c9e056ee14ef2813f7b500b75
|
7
|
+
data.tar.gz: 2c6d01fd6726cc7ff5ed7c303b5e3c7ffe48347267a7b6989a6cda032741c47cfe339fa4c51871166b78f794a999cf8b0e6104cf44d013912bf5dbac0599cb45
|
data/Gemfile.lock
CHANGED
data/lib/address.rb
CHANGED
@@ -1,105 +1,101 @@
|
|
1
1
|
require 'base58'
|
2
2
|
|
3
3
|
# Warning: Just for test
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
# raise "Invalid decoded address checksum" unless is_valid_checksum && ignore_checksum
|
57
|
-
|
58
|
-
decoded[1...size].unpack1('H*')
|
59
|
-
end
|
60
|
-
|
61
|
-
def encode(pubkey, addr_type = 42)
|
62
|
-
pubkey = pubkey[2..-1] if pubkey =~ /^0x/i
|
63
|
-
key = [pubkey].pack('H*')
|
4
|
+
module ScaleRb
|
5
|
+
class Address
|
6
|
+
SS58_PREFIX = 'SS58PRE'
|
7
|
+
|
8
|
+
TYPES = [
|
9
|
+
# Polkadot Live (SS58, AccountId)
|
10
|
+
0, 1,
|
11
|
+
# Polkadot Canary (SS58, AccountId)
|
12
|
+
2, 3,
|
13
|
+
# Kulupu (SS58, Reserved)
|
14
|
+
16, 17,
|
15
|
+
# Darwinia Live
|
16
|
+
18,
|
17
|
+
# Dothereum (SS58, AccountId)
|
18
|
+
20, 21,
|
19
|
+
# Generic Substrate wildcard (SS58, AccountId)
|
20
|
+
42, 43,
|
21
|
+
|
22
|
+
# Schnorr/Ristretto 25519 ("S/R 25519") key
|
23
|
+
48,
|
24
|
+
# Edwards Ed25519 key
|
25
|
+
49,
|
26
|
+
# ECDSA SECP256k1 key
|
27
|
+
50,
|
28
|
+
|
29
|
+
# Reserved for future address format extensions.
|
30
|
+
*64..255
|
31
|
+
]
|
32
|
+
|
33
|
+
class << self
|
34
|
+
def decode(address, addr_type = 42, _ignore_checksum = true)
|
35
|
+
decoded = Base58.base58_to_binary(address, :bitcoin)
|
36
|
+
is_pubkey = decoded.size == 35
|
37
|
+
|
38
|
+
size = decoded.size - (is_pubkey ? 2 : 1)
|
39
|
+
|
40
|
+
prefix = decoded[0, 1].unpack1('C*')
|
41
|
+
|
42
|
+
raise 'Invalid address type' unless TYPES.include?(addr_type)
|
43
|
+
|
44
|
+
hash_bytes = make_hash(decoded[0, size])
|
45
|
+
is_valid_checksum =
|
46
|
+
if is_pubkey
|
47
|
+
decoded[-2].unpack1('C*') == hash_bytes[0] && decoded[-1].unpack1('C*') == hash_bytes[1]
|
48
|
+
else
|
49
|
+
decoded[-1].unpack1('C*') == hash_bytes[0]
|
50
|
+
end
|
51
|
+
|
52
|
+
# raise "Invalid decoded address checksum" unless is_valid_checksum && ignore_checksum
|
53
|
+
|
54
|
+
decoded[1...size].unpack1('H*')
|
55
|
+
end
|
64
56
|
|
65
|
-
|
57
|
+
def encode(pubkey, addr_type = 42)
|
58
|
+
pubkey = pubkey[2..-1] if pubkey =~ /^0x/i
|
59
|
+
key = [pubkey].pack('H*')
|
66
60
|
|
67
|
-
|
68
|
-
when 32, 33
|
69
|
-
2
|
70
|
-
when 1, 2, 4, 8
|
71
|
-
1
|
72
|
-
else
|
73
|
-
raise 'Invalid pubkey length'
|
74
|
-
end
|
61
|
+
pubkey_bytes = key.bytes
|
75
62
|
|
76
|
-
|
77
|
-
|
63
|
+
checksum_length = case pubkey_bytes.length
|
64
|
+
when 32, 33
|
65
|
+
2
|
66
|
+
when 1, 2, 4, 8
|
67
|
+
1
|
78
68
|
else
|
79
|
-
|
80
|
-
((ss58_format & 0b0000_0000_1111_1100) >> 2) | 0b0100_0000,
|
81
|
-
(ss58_format >> 8) | ((ss58_format & 0b0000_0000_0000_0011) << 6)
|
82
|
-
].pack('C*')
|
69
|
+
raise 'Invalid pubkey length'
|
83
70
|
end
|
84
71
|
|
85
|
-
|
86
|
-
|
72
|
+
ss58_format_bytes = if addr_type < 64
|
73
|
+
[addr_type].pack('C*')
|
74
|
+
else
|
75
|
+
[
|
76
|
+
((ss58_format & 0b0000_0000_1111_1100) >> 2) | 0b0100_0000,
|
77
|
+
(ss58_format >> 8) | ((ss58_format & 0b0000_0000_0000_0011) << 6)
|
78
|
+
].pack('C*')
|
79
|
+
end
|
87
80
|
|
88
|
-
|
89
|
-
|
81
|
+
input_bytes = ss58_format_bytes.bytes + pubkey_bytes
|
82
|
+
checksum = Blake2b.hex(SS58_PREFIX.bytes + input_bytes, 64)._to_bytes
|
90
83
|
|
91
|
-
|
92
|
-
|
93
|
-
end
|
84
|
+
Base58.binary_to_base58((input_bytes + checksum[0...checksum_length]).pack('C*'), :bitcoin)
|
85
|
+
end
|
94
86
|
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
87
|
+
def make_hash(body)
|
88
|
+
Blake2b.hex("#{SS58_PREFIX}#{body}".bytes, 64)
|
89
|
+
end
|
90
|
+
|
91
|
+
def is_ss58_address?(address)
|
92
|
+
begin
|
93
|
+
decode(address)
|
94
|
+
rescue StandardError
|
95
|
+
return false
|
96
|
+
end
|
97
|
+
true
|
100
98
|
end
|
101
|
-
true
|
102
99
|
end
|
103
100
|
end
|
104
101
|
end
|
105
|
-
|
@@ -1,100 +1,104 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
3
|
+
require_relative './rpc_request_builder'
|
4
|
+
|
5
|
+
module ScaleRb
|
6
|
+
class AbstractWsClient
|
7
|
+
extend RpcRequestBuilder
|
8
|
+
attr_accessor :metadata, :registry
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
@id = 0
|
12
|
+
@metadata = nil
|
13
|
+
@registry = nil
|
14
|
+
@callbacks = {}
|
15
|
+
@subscription_callbacks = {}
|
16
|
+
end
|
14
17
|
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
+
def send_json_rpc(_body)
|
19
|
+
raise 'WsClient is a abstract base class for websocket client, please use its sub-class'
|
20
|
+
end
|
18
21
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
22
|
+
# changes: [
|
23
|
+
# [
|
24
|
+
# "0x26aa394eea5630e07c48ae0c9558cef780d41e5e16056765bc8461851072c9d7", # storage key
|
25
|
+
# "0x0400000000000000d887690900000000020000" # change
|
26
|
+
# ]
|
27
|
+
# ]
|
28
|
+
def process(resp)
|
29
|
+
# handle id
|
30
|
+
@callbacks[resp['id']]&.call(resp['id'], resp) if resp['id']
|
31
|
+
|
32
|
+
# handle storage subscription
|
33
|
+
return unless resp['params'] && resp['params']['subscription']
|
34
|
+
return unless @metadata && @registry
|
35
|
+
|
36
|
+
subscription = resp['params']['subscription']
|
37
|
+
changes = resp['params']['result']['changes']
|
38
|
+
block = resp['params']['result']['block']
|
39
|
+
p "block: #{block}"
|
40
|
+
|
41
|
+
return unless @subscription_callbacks[subscription]
|
42
|
+
|
43
|
+
pallet_name, item_name, subscription_callback = @subscription_callbacks[subscription]
|
44
|
+
storage_item = Metadata.get_storage_item(pallet_name, item_name, @metadata)
|
45
|
+
storages = decode_storages(changes.map(&:last), storage_item, registry)
|
46
|
+
subscription_callback.call(storages)
|
47
|
+
end
|
45
48
|
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
49
|
+
def get_metadata(callback = nil)
|
50
|
+
if callback.nil?
|
51
|
+
callback = lambda do |id, resp|
|
52
|
+
return unless resp['id'] && resp['result']
|
53
|
+
return if resp['id'] != id
|
51
54
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
+
metadata_hex = resp['result']
|
56
|
+
metadata = Metadata.decode_metadata(metadata_hex.strip._to_bytes)
|
57
|
+
return unless metadata
|
55
58
|
|
56
|
-
|
57
|
-
|
59
|
+
@metadata = metadata
|
60
|
+
@registry = Metadata.build_registry(@metadata)
|
61
|
+
end
|
58
62
|
end
|
59
|
-
end
|
60
63
|
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
64
|
+
id = bind_id_to(callback)
|
65
|
+
body = state_getMetadata(id)
|
66
|
+
send_json_rpc(body)
|
67
|
+
end
|
65
68
|
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
69
|
+
def subscribe_storage(pallet_name, item_name, subscription_callback, key = nil, registry = nil)
|
70
|
+
callback = create_callback_for_subscribe_storage(pallet_name, item_name, subscription_callback)
|
71
|
+
id = bind_id_to(callback)
|
72
|
+
body = derived_state_subscribe_storage(id, pallet_name, item_name, key, registry)
|
73
|
+
send_json_rpc(body)
|
74
|
+
end
|
72
75
|
|
73
|
-
|
76
|
+
private
|
74
77
|
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
78
|
+
def bind_id_to(callback)
|
79
|
+
@callbacks[@id] = callback
|
80
|
+
old = @id
|
81
|
+
@id += 1
|
82
|
+
old
|
83
|
+
end
|
81
84
|
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
+
def decode_storages(datas, storage_item, registry)
|
86
|
+
datas.map do |data|
|
87
|
+
StorageHelper.decode_storage2(data, storage_item, registry)
|
88
|
+
end
|
85
89
|
end
|
86
|
-
end
|
87
90
|
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
91
|
+
def create_callback_for_subscribe_storage(pallet_name, item_name, subscription_callback)
|
92
|
+
lambda do |id, resp|
|
93
|
+
return unless resp['id'] && resp['result']
|
94
|
+
return if resp['id'] != id
|
92
95
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
96
|
+
@subscription_callbacks[resp['result']] = [
|
97
|
+
pallet_name,
|
98
|
+
item_name,
|
99
|
+
subscription_callback
|
100
|
+
]
|
101
|
+
end
|
98
102
|
end
|
99
103
|
end
|
100
104
|
end
|
data/lib/client/http_client.rb
CHANGED
@@ -3,7 +3,9 @@
|
|
3
3
|
require 'uri'
|
4
4
|
require 'net/http'
|
5
5
|
require 'json'
|
6
|
+
require_relative './rpc_request_builder'
|
6
7
|
require_relative './http_client_metadata'
|
8
|
+
require_relative './http_client_storage'
|
7
9
|
|
8
10
|
# TODO: method_name = cmd.gsub(/([a-z\d])([A-Z])/, '\1_\2').downcase
|
9
11
|
module ScaleRb
|
@@ -60,176 +62,6 @@ module ScaleRb
|
|
60
62
|
result = json_rpc_call(url, 'rpc_methods', [])
|
61
63
|
result['methods']
|
62
64
|
end
|
63
|
-
|
64
|
-
def get_metadata(url, at = nil)
|
65
|
-
hex = state_getMetadata(url, at)
|
66
|
-
Metadata.decode_metadata(hex.strip.to_bytes)
|
67
|
-
end
|
68
|
-
|
69
|
-
def query_storage_at(url, storage_keys, type_id, default, registry, at = nil)
|
70
|
-
result = state_queryStorageAt(url, storage_keys, at)
|
71
|
-
result.map do |item|
|
72
|
-
item['changes'].map do |change|
|
73
|
-
storage_key = change[0]
|
74
|
-
data = change[1] || default
|
75
|
-
storage = data.nil? ? nil : PortableCodec.decode(type_id, data.to_bytes, registry)[0]
|
76
|
-
{ storage_key: storage_key, storage: storage }
|
77
|
-
end
|
78
|
-
end.flatten
|
79
|
-
end
|
80
|
-
|
81
|
-
def get_storage_keys_by_partial_key(url, partial_storage_key, start_key = nil, at = nil)
|
82
|
-
storage_keys = state_getKeysPaged(url, partial_storage_key, 1000, start_key, at)
|
83
|
-
if storage_keys.length == 1000
|
84
|
-
storage_keys + get_storage_keys_by_partial_key(url, partial_storage_key, storage_keys.last, at)
|
85
|
-
else
|
86
|
-
storage_keys
|
87
|
-
end
|
88
|
-
end
|
89
|
-
|
90
|
-
def get_storages_by_partial_key(url, partial_storage_key, type_id_of_value, default, registry, at = nil)
|
91
|
-
storage_keys = get_storage_keys_by_partial_key(url, partial_storage_key, partial_storage_key, at)
|
92
|
-
storage_keys.each_slice(250).map do |slice|
|
93
|
-
query_storage_at(
|
94
|
-
url,
|
95
|
-
slice,
|
96
|
-
type_id_of_value,
|
97
|
-
default,
|
98
|
-
registry,
|
99
|
-
at
|
100
|
-
)
|
101
|
-
end.flatten
|
102
|
-
end
|
103
|
-
|
104
|
-
# 1. Plain
|
105
|
-
# key: nil
|
106
|
-
# value: { type: 3, modifier: 'Default', callback: '' }
|
107
|
-
#
|
108
|
-
# 2. Map
|
109
|
-
# key: { value: value, type: 0, hashers: ['Blake2128Concat'] }
|
110
|
-
# value: { type: 3, modifier: 'Default', callback: '' }
|
111
|
-
#
|
112
|
-
# 3. Map, but key.value is nil
|
113
|
-
# key: { value: nil, type: 0, hashers: ['Blake2128Concat'] }
|
114
|
-
# value: { type: 3, modifier: 'Default', callback: '' }
|
115
|
-
#
|
116
|
-
# example:
|
117
|
-
# 'System',
|
118
|
-
# 'Account',
|
119
|
-
# key = {
|
120
|
-
# value: [['0x724d50824542b56f422588421643c4a162b90b5416ef063f2266a1eae6651641'.to_bytes]], # [AccountId]
|
121
|
-
# type: 0,
|
122
|
-
# hashers: ['Blake2128Concat']
|
123
|
-
# },
|
124
|
-
# value = {
|
125
|
-
# type: 3,
|
126
|
-
# modifier: 'Default',
|
127
|
-
# callback: '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
|
128
|
-
# },
|
129
|
-
# ..
|
130
|
-
#
|
131
|
-
# TODO: part of the key is provided, but not all
|
132
|
-
def get_storage(url, pallet_name, item_name, key, value, registry, at = nil)
|
133
|
-
if key
|
134
|
-
if key[:value].nil? || key[:value].empty?
|
135
|
-
# map, but no key's value provided. get all storages under the partial storage key
|
136
|
-
partial_storage_key = StorageHelper.encode_storage_key(pallet_name, item_name).to_hex
|
137
|
-
get_storages_by_partial_key(
|
138
|
-
url,
|
139
|
-
partial_storage_key,
|
140
|
-
value[:type],
|
141
|
-
value[:modifier] == 'Default' ? value[:fallback] : nil,
|
142
|
-
registry,
|
143
|
-
at
|
144
|
-
)
|
145
|
-
elsif key[:value].length != key[:hashers].length
|
146
|
-
# map with multi part, but only provide part value
|
147
|
-
partial_storage_key = StorageHelper.encode_storage_key(pallet_name, item_name, key, registry).to_hex
|
148
|
-
get_storages_by_partial_key(
|
149
|
-
url,
|
150
|
-
partial_storage_key,
|
151
|
-
value[:type],
|
152
|
-
value[:modifier] == 'Default' ? value[:fallback] : nil,
|
153
|
-
registry,
|
154
|
-
at
|
155
|
-
)
|
156
|
-
end
|
157
|
-
else
|
158
|
-
storage_key = StorageHelper.encode_storage_key(pallet_name, item_name, key, registry).to_hex
|
159
|
-
data = state_getStorage(url, storage_key, at)
|
160
|
-
StorageHelper.decode_storage(data, value[:type], value[:modifier] == 'Optional', value[:fallback], registry)
|
161
|
-
end
|
162
|
-
end
|
163
|
-
|
164
|
-
def get_storage2(url, pallet_name, item_name, value_of_key, metadata, at = nil)
|
165
|
-
raise 'Metadata should not be nil' if metadata.nil?
|
166
|
-
|
167
|
-
registry = Metadata.build_registry(metadata)
|
168
|
-
item = Metadata.get_storage_item(pallet_name, item_name, metadata)
|
169
|
-
raise "No such storage item: `#{pallet_name}`.`#{item_name}`" if item.nil?
|
170
|
-
|
171
|
-
modifier = item._get(:modifier) # Default | Optional
|
172
|
-
fallback = item._get(:fallback)
|
173
|
-
type = item._get(:type)
|
174
|
-
|
175
|
-
plain = type._get(:plain)
|
176
|
-
map = type._get(:map)
|
177
|
-
# debug
|
178
|
-
|
179
|
-
key, value =
|
180
|
-
if plain
|
181
|
-
[
|
182
|
-
nil,
|
183
|
-
{ type: plain, modifier: modifier, fallback: fallback }
|
184
|
-
]
|
185
|
-
elsif map
|
186
|
-
[
|
187
|
-
{ value: value_of_key, type: map._get(:key), hashers: map._get(:hashers) },
|
188
|
-
{ type: map._get(:value), modifier: modifier, fallback: fallback }
|
189
|
-
]
|
190
|
-
else
|
191
|
-
raise 'NoSuchStorageType'
|
192
|
-
end
|
193
|
-
get_storage(url, pallet_name, item_name, key, value, registry, at)
|
194
|
-
end
|
195
|
-
|
196
|
-
# get_storage3 is a more ruby style function
|
197
|
-
#
|
198
|
-
# pallet_name and storage_name is pascal style like 'darwinia_staking'
|
199
|
-
def get_storage3(url, metadata, pallet_name, storage_name, key_part1: nil, key_part2: nil, at: nil)
|
200
|
-
pallet_name = to_pascal pallet_name
|
201
|
-
storage_name = to_pascal storage_name
|
202
|
-
ScaleRb.logger.debug "#{pallet_name}.#{storage_name}(#{[key_part1, key_part2].compact.join(', ')})"
|
203
|
-
|
204
|
-
key = [key_part1, key_part2].compact.map { |part_of_key| c(part_of_key) }
|
205
|
-
ScaleRb.logger.debug "converted key: #{key}"
|
206
|
-
|
207
|
-
get_storage2(
|
208
|
-
url,
|
209
|
-
pallet_name,
|
210
|
-
storage_name,
|
211
|
-
key,
|
212
|
-
metadata,
|
213
|
-
at
|
214
|
-
)
|
215
|
-
end
|
216
|
-
|
217
|
-
private
|
218
|
-
|
219
|
-
def to_pascal(str)
|
220
|
-
str.split('_').collect(&:capitalize).join
|
221
|
-
end
|
222
|
-
|
223
|
-
# convert key to byte array
|
224
|
-
def c(key)
|
225
|
-
if key.start_with?('0x')
|
226
|
-
key.to_bytes
|
227
|
-
elsif key.to_i.to_s == key # check if key is a number
|
228
|
-
key.to_i
|
229
|
-
else
|
230
|
-
key
|
231
|
-
end
|
232
|
-
end
|
233
65
|
end
|
234
66
|
end
|
235
67
|
end
|
@@ -4,14 +4,16 @@ require 'fileutils'
|
|
4
4
|
module ScaleRb
|
5
5
|
module HttpClient
|
6
6
|
class << self
|
7
|
+
def get_metadata(url, at = nil)
|
8
|
+
hex = state_getMetadata(url, at)
|
9
|
+
Metadata.decode_metadata(hex.strip._to_bytes)
|
10
|
+
end
|
11
|
+
|
7
12
|
# cached version of get_metadata
|
8
13
|
# get metadata from cache first
|
9
|
-
def get_metadata_cached(url, at: nil, dir:
|
10
|
-
|
11
|
-
|
12
|
-
# else
|
13
|
-
# at = ScaleRb::HttpClient.chain_getFinalizedHead(url)
|
14
|
-
# end
|
14
|
+
def get_metadata_cached(url, at: nil, dir: nil)
|
15
|
+
dir = ENV['SCALE_RB_METADATA_DIR'] || File.join(Dir.pwd, 'metadata') if dir.nil?
|
16
|
+
|
15
17
|
at = ScaleRb::HttpClient.chain_getFinalizedHead(url) if at.nil?
|
16
18
|
spec_name, spec_version = get_spec(url, at)
|
17
19
|
|
@@ -27,7 +29,7 @@ module ScaleRb
|
|
27
29
|
metadata = ScaleRb::HttpClient.get_metadata(url, at)
|
28
30
|
|
29
31
|
# cache it
|
30
|
-
|
32
|
+
ScaleRb.logger.debug "caching metadata `#{spec_name}_#{spec_version}.json`"
|
31
33
|
save_metadata_to_file(
|
32
34
|
spec_name: spec_name,
|
33
35
|
spec_version: spec_version,
|
@@ -54,7 +56,7 @@ module ScaleRb
|
|
54
56
|
file_path = File.join(dir, "#{spec_name}_#{spec_version}.json")
|
55
57
|
return unless File.exist?(file_path)
|
56
58
|
|
57
|
-
|
59
|
+
ScaleRb.logger.debug "found metadata `#{spec_name}_#{spec_version}.json` in cache"
|
58
60
|
JSON.parse(File.read(file_path))
|
59
61
|
end
|
60
62
|
|