xrpl-ruby 0.0.1 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/address-codec/address_codec.rb +118 -0
- data/lib/address-codec/codec.rb +98 -0
- data/lib/address-codec/xrp_codec.rb +79 -0
- data/lib/binary-codec/binary_codec.rb +38 -0
- data/lib/binary-codec/enums/constants.rb +8 -0
- data/lib/binary-codec/enums/definitions.json +3183 -0
- data/lib/binary-codec/enums/definitions.rb +78 -0
- data/lib/binary-codec/enums/fields.rb +102 -0
- data/lib/binary-codec/serdes/binary_parser.rb +145 -0
- data/lib/binary-codec/serdes/binary_serializer.rb +82 -0
- data/lib/binary-codec/serdes/bytes_list.rb +36 -0
- data/lib/binary-codec/types/account_id.rb +79 -0
- data/lib/binary-codec/types/amount.rb +284 -0
- data/lib/binary-codec/types/blob.rb +32 -0
- data/lib/binary-codec/types/currency.rb +105 -0
- data/lib/binary-codec/types/hash.rb +105 -0
- data/lib/binary-codec/types/serialized_type.rb +133 -0
- data/lib/binary-codec/types/st_object.rb +60 -0
- data/lib/binary-codec/types/uint.rb +53 -0
- data/lib/binary-codec/utilities.rb +80 -0
- data/lib/core/base_58_xrp.rb +12 -0
- data/lib/core/base_x.rb +1 -0
- data/lib/core/core.rb +71 -1
- data/lib/xrpl-ruby.rb +12 -1
- metadata +24 -3
@@ -0,0 +1,78 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'json'
|
3
|
+
|
4
|
+
module BinaryCodec
|
5
|
+
|
6
|
+
class Definitions
|
7
|
+
|
8
|
+
@@instance = nil
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
file_path = File.join(__dir__, 'definitions.json') #
|
12
|
+
contents = File.read(file_path)
|
13
|
+
@definitions = JSON.parse(contents)
|
14
|
+
|
15
|
+
@type_ordinals = @definitions['TYPES']
|
16
|
+
@ledger_entry_types = @definitions['LEDGER_ENTRY_TYPES']
|
17
|
+
@transaction_results = @definitions['TRANSACTION_RESULTS']
|
18
|
+
@transaction_types = @definitions['TRANSACTION_TYPES']
|
19
|
+
|
20
|
+
@field_info_map = {}
|
21
|
+
@field_id_name_map = {}
|
22
|
+
@field_header_map = {}
|
23
|
+
|
24
|
+
@definitions['FIELDS'].each do |field|
|
25
|
+
field_name = field[0]
|
26
|
+
field_info = FieldInfo.new(
|
27
|
+
nth: field[1]['nth'],
|
28
|
+
is_vl_encoded: field[1]['isVLEncoded'],
|
29
|
+
is_serialized: field[1]['isSerialized'],
|
30
|
+
is_signing_field: field[1]['isSigningField'],
|
31
|
+
type: field[1]['type']
|
32
|
+
)
|
33
|
+
field_header = FieldHeader.new(type: @type_ordinals[field_info.type], nth: field_info.nth)
|
34
|
+
|
35
|
+
@field_info_map[field_name] = field_info
|
36
|
+
@field_id_name_map[Digest::MD5.hexdigest(Marshal.dump(field_header))] = field_name
|
37
|
+
@field_header_map[field_name] = field_header
|
38
|
+
end
|
39
|
+
|
40
|
+
rescue Errno::ENOENT
|
41
|
+
raise "Error: The file '#{file_path}' was not found. Please ensure the file exists."
|
42
|
+
rescue JSON::ParserError => e
|
43
|
+
raise "Error: The file '#{file_path}' contains invalid JSON: #{e.message}"
|
44
|
+
|
45
|
+
end
|
46
|
+
|
47
|
+
def self.instance
|
48
|
+
@@instance ||= new
|
49
|
+
end
|
50
|
+
|
51
|
+
def get_field_header_from_name(field_name)
|
52
|
+
@field_header_map[field_name]
|
53
|
+
end
|
54
|
+
|
55
|
+
def get_field_name_from_header(field_header)
|
56
|
+
@field_id_name_map[Digest::MD5.hexdigest(Marshal.dump(field_header))]
|
57
|
+
end
|
58
|
+
|
59
|
+
def get_field_instance(field_name)
|
60
|
+
field_info = @field_info_map[field_name]
|
61
|
+
field_header = get_field_header_from_name(field_name)
|
62
|
+
|
63
|
+
FieldInstance.new(
|
64
|
+
nth: field_info.nth,
|
65
|
+
is_variable_length_encoded: field_info.is_vl_encoded,
|
66
|
+
is_serialized: field_info.is_serialized,
|
67
|
+
is_signing_field: field_info.is_signing_field,
|
68
|
+
type: field_info.type,
|
69
|
+
ordinal: (field_header.type << 16) | field_info.nth, # @type_ordinals[field_info.type],
|
70
|
+
name: field_name,
|
71
|
+
header: field_header,
|
72
|
+
associated_type: field_info.type # SerializedType::getTypeByName($this->type)::class;
|
73
|
+
)
|
74
|
+
end
|
75
|
+
|
76
|
+
end
|
77
|
+
|
78
|
+
end
|
@@ -0,0 +1,102 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module BinaryCodec
|
4
|
+
|
5
|
+
class FieldHeader
|
6
|
+
|
7
|
+
attr_reader :type, :nth
|
8
|
+
|
9
|
+
def initialize(type:, nth:)
|
10
|
+
@type = type
|
11
|
+
@nth = nth
|
12
|
+
end
|
13
|
+
|
14
|
+
def to_bytes
|
15
|
+
header = []
|
16
|
+
if type < 16
|
17
|
+
if nth < 16
|
18
|
+
header.push((type << 4) | nth)
|
19
|
+
else
|
20
|
+
header.push(type << 4, nth)
|
21
|
+
end
|
22
|
+
elsif nth < 16
|
23
|
+
header.push(nth,type)
|
24
|
+
else
|
25
|
+
header.push(0, type, nth)
|
26
|
+
end
|
27
|
+
|
28
|
+
header
|
29
|
+
end
|
30
|
+
|
31
|
+
end
|
32
|
+
class FieldInfo
|
33
|
+
|
34
|
+
attr_reader :nth, :is_vl_encoded, :is_serialized, :is_signing_field, :type
|
35
|
+
|
36
|
+
def initialize(nth:, is_vl_encoded:, is_serialized:, is_signing_field:, type:)
|
37
|
+
@nth = nth
|
38
|
+
@is_vl_encoded = is_vl_encoded
|
39
|
+
@is_serialized = is_serialized
|
40
|
+
@is_signing_field = is_signing_field
|
41
|
+
@type = type
|
42
|
+
end
|
43
|
+
|
44
|
+
end
|
45
|
+
|
46
|
+
class FieldInstance
|
47
|
+
|
48
|
+
attr_reader :nth, :is_variable_length_encoded, :is_serialized, :is_signing_field, :type, :ordinal, :name, :header, :associated_type
|
49
|
+
|
50
|
+
def initialize(nth:, is_variable_length_encoded:, is_serialized:, is_signing_field:, type:, ordinal:, name:, header:, associated_type:)
|
51
|
+
@nth = nth
|
52
|
+
@is_variable_length_encoded = is_variable_length_encoded
|
53
|
+
@is_serialized = is_serialized
|
54
|
+
@is_signing_field = is_signing_field
|
55
|
+
@type = type
|
56
|
+
@ordinal = ordinal
|
57
|
+
@name = name
|
58
|
+
@header = header
|
59
|
+
@associated_type = associated_type
|
60
|
+
end
|
61
|
+
|
62
|
+
end
|
63
|
+
|
64
|
+
# TODO: See if this makes sense or if Ruby hashes are just fine
|
65
|
+
class FieldLookup
|
66
|
+
def initialize(fields:, types:)
|
67
|
+
@fields_hash = {}
|
68
|
+
|
69
|
+
fields.each do |name, field_info|
|
70
|
+
type_ordinal = types[field_info.type]
|
71
|
+
field = build_field([name, field_info], type_ordinal) # Store the built field
|
72
|
+
@fields_hash[name] = field # Map field by name
|
73
|
+
@fields_hash[field.ordinal.to_s] = field # Map field by ordinal
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
# Method to retrieve a FieldInstance by its string key
|
78
|
+
def from_string(value)
|
79
|
+
@fields_hash[value]
|
80
|
+
end
|
81
|
+
|
82
|
+
private
|
83
|
+
|
84
|
+
# Dummy build_field method (must be implemented elsewhere)
|
85
|
+
def build_field(field, type_ordinal)
|
86
|
+
field_header = FieldHeader.new(type: field[1].type, nth: field[1].nth)
|
87
|
+
FieldInstance.new(
|
88
|
+
nth: field[1].nth,
|
89
|
+
is_variable_length_encoded: field[1].is_vl_encoded,
|
90
|
+
is_serialized: field[1].is_serialized,
|
91
|
+
is_signing_field: field[1].is_signing_field,
|
92
|
+
type: field[1].type,
|
93
|
+
ordinal: type_ordinal,
|
94
|
+
name: field[0],
|
95
|
+
header: field_header,
|
96
|
+
associated_type: SerializedType
|
97
|
+
#associated_type: SerializedType.get_type_by_name(field[1].type)
|
98
|
+
)
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
end
|
@@ -0,0 +1,145 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module BinaryCodec
|
4
|
+
|
5
|
+
class BinaryParser
|
6
|
+
|
7
|
+
attr_reader :definitions
|
8
|
+
|
9
|
+
def initialize(hex_bytes = '')
|
10
|
+
@bytes = hex_to_bytes(hex_bytes)
|
11
|
+
@definitions = Definitions.instance
|
12
|
+
end
|
13
|
+
|
14
|
+
def peek
|
15
|
+
if @bytes.empty?
|
16
|
+
raise StandardError.new
|
17
|
+
end
|
18
|
+
@bytes[0]
|
19
|
+
end
|
20
|
+
|
21
|
+
def skip(n)
|
22
|
+
if n > @bytes.length
|
23
|
+
raise StandardError.new
|
24
|
+
end
|
25
|
+
@bytes = @bytes[n..-1]
|
26
|
+
end
|
27
|
+
|
28
|
+
def read(n)
|
29
|
+
if n > @bytes.length
|
30
|
+
raise StandardError.new('End of byte stream reached')
|
31
|
+
end
|
32
|
+
|
33
|
+
slice = @bytes[0, n]
|
34
|
+
skip(n)
|
35
|
+
slice
|
36
|
+
end
|
37
|
+
|
38
|
+
def read_uint_n(n)
|
39
|
+
if n <= 0 || n > 4
|
40
|
+
raise StandardError.new('invalid n')
|
41
|
+
end
|
42
|
+
read(n).reduce(0) { |a, b| (a << 8) | b }
|
43
|
+
end
|
44
|
+
|
45
|
+
def read_uint8
|
46
|
+
read_uint_n(1)
|
47
|
+
end
|
48
|
+
|
49
|
+
def read_uint16
|
50
|
+
read_uint_n(2)
|
51
|
+
end
|
52
|
+
|
53
|
+
def read_uint32
|
54
|
+
read_uint_n(4)
|
55
|
+
end
|
56
|
+
|
57
|
+
def size
|
58
|
+
@bytes.length
|
59
|
+
end
|
60
|
+
|
61
|
+
def end?(custom_end = nil)
|
62
|
+
length = @bytes.length
|
63
|
+
length == 0 || (!custom_end.nil? && length <= custom_end)
|
64
|
+
end
|
65
|
+
|
66
|
+
def read_variable_length
|
67
|
+
read(read_variable_length_length)
|
68
|
+
end
|
69
|
+
|
70
|
+
def read_variable_length_length
|
71
|
+
b1 = read_uint8
|
72
|
+
if b1 <= 192
|
73
|
+
b1
|
74
|
+
elsif b1 <= 240
|
75
|
+
b2 = read_uint8
|
76
|
+
193 + (b1 - 193) * 256 + b2
|
77
|
+
elsif b1 <= 254
|
78
|
+
b2 = read_uint8
|
79
|
+
b3 = read_uint8
|
80
|
+
12481 + (b1 - 241) * 65536 + b2 * 256 + b3
|
81
|
+
else
|
82
|
+
raise StandardError.new('Invalid variable length indicator')
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
def read_field_header
|
87
|
+
type = read_uint8
|
88
|
+
nth = type & 15
|
89
|
+
type >>= 4
|
90
|
+
|
91
|
+
if type == 0
|
92
|
+
type = read_uint8
|
93
|
+
if type == 0 || type < 16
|
94
|
+
raise StandardError.new("Cannot read FieldOrdinal, type_code #{type} out of range")
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
if nth == 0
|
99
|
+
nth = read_uint8
|
100
|
+
if nth == 0 || nth < 16
|
101
|
+
raise StandardError.new("Cannot read FieldOrdinal, field_code #{nth} out of range")
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
FieldHeader.new(type: type, nth: nth) # (type << 16) | nth for read_field_ordinal
|
106
|
+
end
|
107
|
+
|
108
|
+
def read_field
|
109
|
+
field_header = read_field_header
|
110
|
+
field_name = @definitions.get_field_name_from_header(field_header)
|
111
|
+
|
112
|
+
@definitions.get_field_instance(field_name)
|
113
|
+
end
|
114
|
+
|
115
|
+
def read_type(type)
|
116
|
+
type.from_parser(self)
|
117
|
+
end
|
118
|
+
|
119
|
+
def type_for_field(field)
|
120
|
+
field.associated_type
|
121
|
+
end
|
122
|
+
|
123
|
+
def read_field_value(field)
|
124
|
+
type = SerializedType.get_type_by_name(field.type)
|
125
|
+
|
126
|
+
if type.nil?
|
127
|
+
raise StandardError.new("unsupported: (#{field.name}, #{field.type.name})")
|
128
|
+
end
|
129
|
+
|
130
|
+
size_hint = field.is_variable_length_encoded ? read_variable_length_length : nil
|
131
|
+
value = type.from_parser(self, size_hint)
|
132
|
+
|
133
|
+
if value.nil?
|
134
|
+
raise StandardError.new("from_parser for (#{field.name}, #{field.type.name}) -> nil")
|
135
|
+
end
|
136
|
+
|
137
|
+
value
|
138
|
+
end
|
139
|
+
|
140
|
+
# get_size
|
141
|
+
# read_field_and_value
|
142
|
+
|
143
|
+
end
|
144
|
+
|
145
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module BinaryCodec
|
4
|
+
class BinarySerializer
|
5
|
+
|
6
|
+
def initialize(sink)
|
7
|
+
@sink = sink || BytesList.new
|
8
|
+
end
|
9
|
+
|
10
|
+
def write(value)
|
11
|
+
value.to_byte_sink(@sink)
|
12
|
+
end
|
13
|
+
|
14
|
+
def put(bytes)
|
15
|
+
@sink.put(bytes)
|
16
|
+
end
|
17
|
+
|
18
|
+
def write_type(type, value)
|
19
|
+
write(type.from(value))
|
20
|
+
end
|
21
|
+
|
22
|
+
def write_bytes_list(bytes_list)
|
23
|
+
bytes_list.to_byte_sink(@sink)
|
24
|
+
end
|
25
|
+
|
26
|
+
def write_field_and_value(field, value, is_unl_modify_workaround = false)
|
27
|
+
field_header = FieldHeader.new(type: field.header.type, nth: field.nth)
|
28
|
+
type_class = SerializedType.get_type_by_name(field.type)
|
29
|
+
associated_value = type_class.from(value)
|
30
|
+
|
31
|
+
if !associated_value.respond_to?(:to_byte_sink) || field.name.nil?
|
32
|
+
raise 'Error'
|
33
|
+
end
|
34
|
+
|
35
|
+
@sink.put(field_header.to_bytes)
|
36
|
+
|
37
|
+
if field.is_variable_length_encoded
|
38
|
+
write_length_encoded(associated_value, is_unl_modify_workaround)
|
39
|
+
else
|
40
|
+
associated_value.to_byte_sink(@sink)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def write_length_encoded(value, is_unl_modify_workaround = false)
|
45
|
+
bytes = BytesList.new
|
46
|
+
|
47
|
+
unless is_unl_modify_workaround
|
48
|
+
# This part doesn't happen for the Account field in a UNLModify transaction
|
49
|
+
value.to_byte_sink(bytes)
|
50
|
+
end
|
51
|
+
|
52
|
+
self.put(encode_variable_length(bytes.get_length))
|
53
|
+
write_bytes_list(bytes)
|
54
|
+
end
|
55
|
+
|
56
|
+
private
|
57
|
+
|
58
|
+
def encode_variable_length(length)
|
59
|
+
len_bytes = [0, 0, 0] # Create an array to hold 3 bytes (default 0)
|
60
|
+
|
61
|
+
if length <= 192
|
62
|
+
len_bytes[0] = length
|
63
|
+
return len_bytes[0, 1] # Equivalent to slice(0, 1)
|
64
|
+
elsif length <= 12480
|
65
|
+
length -= 193
|
66
|
+
len_bytes[0] = 193 + (length >> 8) # Equivalent to length >>> 8 in TypeScript
|
67
|
+
len_bytes[1] = length & 0xff
|
68
|
+
return len_bytes[0, 2] # Equivalent to slice(0, 2)
|
69
|
+
elsif length <= 918744
|
70
|
+
length -= 12481
|
71
|
+
len_bytes[0] = 241 + (length >> 16)
|
72
|
+
len_bytes[1] = (length >> 8) & 0xff
|
73
|
+
len_bytes[2] = length & 0xff
|
74
|
+
return len_bytes[0, 3] # Equivalent to slice(0, 3)
|
75
|
+
end
|
76
|
+
|
77
|
+
raise 'Overflow error'
|
78
|
+
end
|
79
|
+
|
80
|
+
end
|
81
|
+
|
82
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module BinaryCodec
|
4
|
+
class BytesList
|
5
|
+
|
6
|
+
attr_reader :bytes_array
|
7
|
+
|
8
|
+
def initialize
|
9
|
+
@bytes_array = []
|
10
|
+
end
|
11
|
+
|
12
|
+
def get_length
|
13
|
+
@bytes_array.inject(0) { |sum, arr| sum + arr.length }
|
14
|
+
end
|
15
|
+
|
16
|
+
def put(bytes_arg)
|
17
|
+
bytes = bytes_arg.dup
|
18
|
+
@bytes_array << bytes
|
19
|
+
self # Allow chaining
|
20
|
+
end
|
21
|
+
|
22
|
+
def to_byte_sink(list)
|
23
|
+
list.put(to_bytes)
|
24
|
+
end
|
25
|
+
|
26
|
+
def to_bytes
|
27
|
+
@bytes_array.flatten # TODO: Uses concat in xrpl.js, maybe implement that instead
|
28
|
+
end
|
29
|
+
|
30
|
+
def to_hex
|
31
|
+
bytes_to_hex(to_bytes)
|
32
|
+
end
|
33
|
+
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
@@ -0,0 +1,79 @@
|
|
1
|
+
require 'address-codec/address_codec'
|
2
|
+
require 'address-codec/xrp_codec'
|
3
|
+
|
4
|
+
module BinaryCodec
|
5
|
+
class AccountId < Hash160
|
6
|
+
|
7
|
+
# Create a single instance of AddressCodec for use in static functions
|
8
|
+
@address_codec = AddressCodec::AddressCodec.new
|
9
|
+
|
10
|
+
attr_reader :bytes
|
11
|
+
|
12
|
+
@width = 20
|
13
|
+
|
14
|
+
def initialize(bytes = nil)
|
15
|
+
super(bytes || Array.new(20, 0))
|
16
|
+
end
|
17
|
+
|
18
|
+
#def self.address_codec
|
19
|
+
# @address_codec ||= AddressCodec::AddressCodec.new
|
20
|
+
#end
|
21
|
+
|
22
|
+
# Defines how to construct an AccountID
|
23
|
+
#
|
24
|
+
# @param value [AccountID, String] Either an existing AccountID, a hex string, or a base58 r-Address
|
25
|
+
# @return [AccountID] An AccountID object
|
26
|
+
def self.from(value)
|
27
|
+
if value.is_a?(AccountId)
|
28
|
+
return value
|
29
|
+
end
|
30
|
+
|
31
|
+
if value.is_a?(String)
|
32
|
+
return AccountId.new if value.empty?
|
33
|
+
|
34
|
+
if valid_hex?(value)
|
35
|
+
return AccountId.new(hex_to_bytes(value))
|
36
|
+
else
|
37
|
+
return from_base58(value)
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
raise 'Cannot construct AccountID from the value provided'
|
42
|
+
end
|
43
|
+
|
44
|
+
# Defines how to build an AccountID from a base58 r-Address
|
45
|
+
#
|
46
|
+
# @param value [String] A base58 r-Address
|
47
|
+
# @return [AccountID] An AccountID object
|
48
|
+
def self.from_base58(value)
|
49
|
+
if @address_codec.valid_x_address?(value)
|
50
|
+
classic = @address_codec.x_address_to_classic_address(value)
|
51
|
+
|
52
|
+
if classic[:tag] != false
|
53
|
+
raise 'Only allowed to have tag on Account or Destination'
|
54
|
+
end
|
55
|
+
|
56
|
+
value = classic[:classic_address]
|
57
|
+
end
|
58
|
+
|
59
|
+
AccountId.new(@address_codec.decode_account_id(value))
|
60
|
+
end
|
61
|
+
|
62
|
+
# Overload of to_json
|
63
|
+
#
|
64
|
+
# @return [String] The base58 string for this AccountID
|
65
|
+
def to_json
|
66
|
+
to_base58
|
67
|
+
end
|
68
|
+
|
69
|
+
# Defines how to encode AccountID into a base58 address
|
70
|
+
#
|
71
|
+
# @return [String] The base58 string defined by this.bytes
|
72
|
+
def to_base58
|
73
|
+
address_codec = AddressCodec::AddressCodec.new
|
74
|
+
address_codec.encode_account_id(@bytes)
|
75
|
+
end
|
76
|
+
|
77
|
+
end
|
78
|
+
|
79
|
+
end
|