oydid 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/AUTHORS +1 -0
- data/LICENSE +21 -0
- data/README.md +5 -0
- data/VERSION +1 -0
- data/lib/oydid/basic.rb +212 -0
- data/lib/oydid/log.rb +391 -0
- data/lib/oydid.rb +661 -0
- data/spec/input/basic/arrays.json +8 -0
- data/spec/input/basic/french.json +6 -0
- data/spec/input/basic/sample2_get_location.doc +1 -0
- data/spec/input/basic/sample2_retrieve_document.doc +1 -0
- data/spec/input/basic/sample3_retrieve_document.doc +1 -0
- data/spec/input/basic/sample4_retrieve_document.doc +1 -0
- data/spec/input/basic/sample5_retrieve_document.doc +1 -0
- data/spec/input/basic/sample_dec.doc +1 -0
- data/spec/input/basic/sample_enc.doc +1 -0
- data/spec/input/basic/sample_get_location.doc +1 -0
- data/spec/input/basic/sample_hash.doc +1 -0
- data/spec/input/basic/sample_invalid2_readkey.doc +1 -0
- data/spec/input/basic/sample_invalid2_verify.doc +1 -0
- data/spec/input/basic/sample_invalid3_readkey.doc +1 -0
- data/spec/input/basic/sample_invalid3_verify.doc +1 -0
- data/spec/input/basic/sample_invalid_privkey.doc +1 -0
- data/spec/input/basic/sample_invalid_readkey.doc +1 -0
- data/spec/input/basic/sample_invalid_sign.doc +1 -0
- data/spec/input/basic/sample_invalid_verify.doc +1 -0
- data/spec/input/basic/sample_key.doc +1 -0
- data/spec/input/basic/sample_readkey.doc +1 -0
- data/spec/input/basic/sample_retrieve_document.doc +1 -0
- data/spec/input/basic/sample_sign.doc +1 -0
- data/spec/input/basic/sample_valid_privkey.doc +1 -0
- data/spec/input/basic/sample_verify.doc +1 -0
- data/spec/input/basic/structures.json +8 -0
- data/spec/input/basic/unicode.json +3 -0
- data/spec/input/basic/values.json +5 -0
- data/spec/input/basic/wierd.json +11 -0
- data/spec/input/basic/zQmaBZTghn.doc +1 -0
- data/spec/input/log/sample0_dag2array.doc +1 -0
- data/spec/input/log/sample0_dag_did.doc +1 -0
- data/spec/input/log/sample1_dag_did.doc +1 -0
- data/spec/input/log/sample1_dag_update.doc +1 -0
- data/spec/input/log/sample2_dag_did.doc +1 -0
- data/spec/input/log/sample2_dag_update.doc +1 -0
- data/spec/input/log/sample2_retrieve_log.doc +1 -0
- data/spec/input/log/sample3_dag_did.doc +1 -0
- data/spec/input/log/sample3_dag_update.doc +1 -0
- data/spec/input/log/sample3_retrieve_log.doc +1 -0
- data/spec/input/log/sample4_dag_did.doc +1 -0
- data/spec/input/log/sample4_dag_update.doc +1 -0
- data/spec/input/log/sample4_retrieve_log.doc +1 -0
- data/spec/input/log/sample5_dag_update.doc +1 -0
- data/spec/input/log/sample5_retrieve_log.doc +1 -0
- data/spec/input/log/sample6_dag_update.doc +1 -0
- data/spec/input/log/sample6_retrieve_log.doc +1 -0
- data/spec/input/log/sample7_dag_update.doc +1 -0
- data/spec/input/log/sample7_retrieve_log.doc +1 -0
- data/spec/input/log/sample8_dag_update.doc +1 -0
- data/spec/input/log/sample_addhash.doc +1 -0
- data/spec/input/log/sample_dag_update.doc +1 -0
- data/spec/input/log/sample_match_log.doc +1 -0
- data/spec/input/log/sample_op1_addhash.doc +1 -0
- data/spec/input/log/sample_retrieve_log.doc +1 -0
- data/spec/input/main/sample0_read.doc +1 -0
- data/spec/output/basic/arrays.json +1 -0
- data/spec/output/basic/french.json +1 -0
- data/spec/output/basic/sample2_get_location.doc +1 -0
- data/spec/output/basic/sample2_retrieve_document.doc +1 -0
- data/spec/output/basic/sample3_retrieve_document.doc +1 -0
- data/spec/output/basic/sample4_retrieve_document.doc +1 -0
- data/spec/output/basic/sample5_retrieve_document.doc +1 -0
- data/spec/output/basic/sample_dec.doc +1 -0
- data/spec/output/basic/sample_enc.doc +1 -0
- data/spec/output/basic/sample_get_location.doc +1 -0
- data/spec/output/basic/sample_hash.doc +1 -0
- data/spec/output/basic/sample_invalid2_readkey.doc +1 -0
- data/spec/output/basic/sample_invalid2_verify.doc +1 -0
- data/spec/output/basic/sample_invalid3_readkey.doc +1 -0
- data/spec/output/basic/sample_invalid3_verify.doc +1 -0
- data/spec/output/basic/sample_invalid_privkey.doc +1 -0
- data/spec/output/basic/sample_invalid_readkey.doc +1 -0
- data/spec/output/basic/sample_invalid_sign.doc +1 -0
- data/spec/output/basic/sample_invalid_verify.doc +1 -0
- data/spec/output/basic/sample_key.doc +1 -0
- data/spec/output/basic/sample_readkey.doc +1 -0
- data/spec/output/basic/sample_retrieve_document.doc +1 -0
- data/spec/output/basic/sample_sign.doc +1 -0
- data/spec/output/basic/sample_valid_privkey.doc +1 -0
- data/spec/output/basic/sample_verify.doc +1 -0
- data/spec/output/basic/structures.json +1 -0
- data/spec/output/basic/unicode.json +1 -0
- data/spec/output/basic/values.json +1 -0
- data/spec/output/basic/wierd.json +1 -0
- data/spec/output/log/sample0_dag2array.doc +1 -0
- data/spec/output/log/sample0_dag_did.doc +1 -0
- data/spec/output/log/sample1_dag_did.doc +1 -0
- data/spec/output/log/sample1_dag_update.doc +1 -0
- data/spec/output/log/sample2_dag_did.doc +1 -0
- data/spec/output/log/sample2_dag_update.doc +1 -0
- data/spec/output/log/sample2_retrieve_log.doc +1 -0
- data/spec/output/log/sample3_dag_did.doc +1 -0
- data/spec/output/log/sample3_dag_update.doc +1 -0
- data/spec/output/log/sample3_retrieve_log.doc +1 -0
- data/spec/output/log/sample4_dag_did.doc +1 -0
- data/spec/output/log/sample4_dag_update.doc +1 -0
- data/spec/output/log/sample4_retrieve_log.doc +1 -0
- data/spec/output/log/sample5_dag_update.doc +1 -0
- data/spec/output/log/sample5_retrieve_log.doc +1 -0
- data/spec/output/log/sample6_dag_update.doc +1 -0
- data/spec/output/log/sample6_retrieve_log.doc +1 -0
- data/spec/output/log/sample7_dag_update.doc +1 -0
- data/spec/output/log/sample7_retrieve_log.doc +1 -0
- data/spec/output/log/sample8_dag_update.doc +1 -0
- data/spec/output/log/sample_addhash.doc +1 -0
- data/spec/output/log/sample_dag_update.doc +1 -0
- data/spec/output/log/sample_match_log.doc +1 -0
- data/spec/output/log/sample_op1_addhash.doc +1 -0
- data/spec/output/log/sample_retrieve_log.doc +1 -0
- data/spec/output/main/sample0_read.doc +1 -0
- data/spec/oydid_spec.rb +170 -0
- data/spec/spec_helper.rb +31 -0
- metadata +405 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 84460bc149df8e10a4f5094879894399bd3d6670ff5ef1fc8c500811af5fb5e2
|
|
4
|
+
data.tar.gz: 3515b7f22cf286b3abda13299fd15a6a713f39a2048c8f1d3553f36832577e24
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 1eadd2c6dfba9ef8788e72514b3c12561e8628a20c6fc14bb21172d3777227bde42accb6dfe5cda14ccdeb027cb53415c179193cbd70bf1fa637b1bfe1208078
|
|
7
|
+
data.tar.gz: 9e0f47ca1316e508faf65af727afb8a61de3a89e023d789d21ce69ca9328da46f4d1c9a197ee7486aba3c460476112bebf919803c26b7f0c06896f911b1ecfa6
|
data/AUTHORS
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
* Christoph Fabianek <christoph@ownyourdata.eu>
|
data/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2022 OwnYourData
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
data/README.md
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
# OYDID Gem
|
|
2
|
+
|
|
3
|
+
[](http://badge.fury.io/rb/oydid)
|
|
4
|
+
[](https://github.com/ownyourdata/oydid/actions?query=workflow%3ACI)
|
|
5
|
+
[](https://coveralls.io/github/OwnYourData/oydid?branch=main)
|
data/VERSION
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
0.2.0
|
data/lib/oydid/basic.rb
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
class Oydid
|
|
5
|
+
|
|
6
|
+
# basic functions ---------------------------
|
|
7
|
+
def self.encode(message, method = "base58btc")
|
|
8
|
+
Multibases.pack(method, message).to_s
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def self.decode(message)
|
|
12
|
+
Multibases.unpack(message).decode.to_s('ASCII-8BIT')
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def self.hash(message)
|
|
16
|
+
encode(Multihashes.encode(RbNaCl::Hash.sha256(message), "sha2-256").unpack('C*'))
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def self.canonical(message)
|
|
20
|
+
if message.is_a? String
|
|
21
|
+
message = JSON.parse(message) rescue message
|
|
22
|
+
else
|
|
23
|
+
message = JSON.parse(message.to_json) rescue message
|
|
24
|
+
end
|
|
25
|
+
message.to_json_c14n
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# key management ----------------------------
|
|
29
|
+
def self.generate_private_key(input, method = "ed25519-priv")
|
|
30
|
+
begin
|
|
31
|
+
omc = Multicodecs[method].code
|
|
32
|
+
rescue
|
|
33
|
+
return [nil, "unknown key codec"]
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
case Multicodecs[method].name
|
|
37
|
+
when 'ed25519-priv'
|
|
38
|
+
if input != ""
|
|
39
|
+
raw_key = Ed25519::SigningKey.new(RbNaCl::Hash.sha256(input)).to_bytes
|
|
40
|
+
else
|
|
41
|
+
raw_key = Ed25519::SigningKey.generate.to_bytes
|
|
42
|
+
end
|
|
43
|
+
else
|
|
44
|
+
return [nil, "unsupported key codec"]
|
|
45
|
+
end
|
|
46
|
+
length = raw_key.bytesize
|
|
47
|
+
return [encode([omc, length, raw_key].pack("SCa#{length}")), ""]
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def self.public_key(private_key)
|
|
51
|
+
code, length, digest = decode(private_key).unpack('SCa*')
|
|
52
|
+
case Multicodecs[code].name
|
|
53
|
+
when 'ed25519-priv'
|
|
54
|
+
public_key = Ed25519::SigningKey.new(digest).verify_key
|
|
55
|
+
length = public_key.to_bytes.bytesize
|
|
56
|
+
return [encode([Multicodecs['ed25519-pub'].code, length, public_key].pack("CCa#{length}")), ""]
|
|
57
|
+
else
|
|
58
|
+
return [nil, "unsupported key codec"]
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def self.sign(message, private_key)
|
|
63
|
+
code, length, digest = decode(private_key).unpack('SCa*')
|
|
64
|
+
case Multicodecs[code].name
|
|
65
|
+
when 'ed25519-priv'
|
|
66
|
+
return [encode(Ed25519::SigningKey.new(digest).sign(message)), ""]
|
|
67
|
+
else
|
|
68
|
+
return [nil, "unsupported key codec"]
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def self.verify(message, signature, public_key)
|
|
73
|
+
begin
|
|
74
|
+
code, length, digest = decode(public_key).unpack('CCa*')
|
|
75
|
+
case Multicodecs[code].name
|
|
76
|
+
when 'ed25519-pub'
|
|
77
|
+
verify_key = Ed25519::VerifyKey.new(digest)
|
|
78
|
+
signature_verification = false
|
|
79
|
+
begin
|
|
80
|
+
verify_key.verify(decode(signature), message)
|
|
81
|
+
signature_verification = true
|
|
82
|
+
rescue Ed25519::VerifyError
|
|
83
|
+
signature_verification = false
|
|
84
|
+
end
|
|
85
|
+
return [signature_verification, ""]
|
|
86
|
+
else
|
|
87
|
+
return [nil, "unsupported key codec"]
|
|
88
|
+
end
|
|
89
|
+
rescue
|
|
90
|
+
return [nil, "unknown key codec"]
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def self.read_private_key(filename)
|
|
95
|
+
begin
|
|
96
|
+
f = File.open(filename)
|
|
97
|
+
key_encoded = f.read
|
|
98
|
+
f.close
|
|
99
|
+
rescue
|
|
100
|
+
return [nil, "cannot read file"]
|
|
101
|
+
end
|
|
102
|
+
decode_private_key(key_encoded)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def self.decode_private_key(key_encoded)
|
|
106
|
+
begin
|
|
107
|
+
code, length, digest = decode(key_encoded).unpack('SCa*')
|
|
108
|
+
case Multicodecs[code].name
|
|
109
|
+
when 'ed25519-priv'
|
|
110
|
+
private_key = Ed25519::SigningKey.new(digest).to_bytes
|
|
111
|
+
else
|
|
112
|
+
return [nil, "unsupported key codec"]
|
|
113
|
+
end
|
|
114
|
+
length = private_key.bytesize
|
|
115
|
+
return [Oydid.encode([code, length, private_key].pack("SCa#{length}")), ""]
|
|
116
|
+
rescue
|
|
117
|
+
return [nil, "invalid key"]
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
# storage functions -----------------------------
|
|
122
|
+
def self.write_private_storage(payload, filename)
|
|
123
|
+
File.open(filename, 'w') {|f| f.write(payload)}
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def self.read_private_storage(filename)
|
|
127
|
+
File.open(filename, 'r') { |f| f.read }
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def self.get_location(id)
|
|
131
|
+
if id.include?(LOCATION_PREFIX)
|
|
132
|
+
id_split = id.split(LOCATION_PREFIX)
|
|
133
|
+
return id_split[1]
|
|
134
|
+
else
|
|
135
|
+
if id.include?(CGI.escape(LOCATION_PREFIX))
|
|
136
|
+
id_split = id.split(CGI.escape(LOCATION_PREFIX))
|
|
137
|
+
return id_split[1]
|
|
138
|
+
else
|
|
139
|
+
return DEFAULT_LOCATION
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
def self.retrieve_document(doc_hash, doc_file, doc_location, options)
|
|
145
|
+
if doc_location == ""
|
|
146
|
+
doc_location = DEFAULT_LOCATION
|
|
147
|
+
end
|
|
148
|
+
if !(doc_location == "" || doc_location == "local")
|
|
149
|
+
if !doc_location.start_with?("http")
|
|
150
|
+
doc_location = "https://" + doc_location
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
case doc_location
|
|
155
|
+
when /^http/
|
|
156
|
+
retVal = HTTParty.get(doc_location + "/doc/" + doc_hash)
|
|
157
|
+
if retVal.code != 200
|
|
158
|
+
msg = retVal.parsed_response("error").to_s rescue "invalid response from " + doc_location.to_s + "/doc/" + doc_hash.to_s
|
|
159
|
+
return [nil, msg]
|
|
160
|
+
end
|
|
161
|
+
if options.transform_keys(&:to_s)["trace"]
|
|
162
|
+
if options[:silent].nil? || !options[:silent]
|
|
163
|
+
puts "GET " + doc_hash + " from " + doc_location
|
|
164
|
+
end
|
|
165
|
+
end
|
|
166
|
+
return [retVal.parsed_response, ""]
|
|
167
|
+
when "", "local"
|
|
168
|
+
doc = JSON.parse(read_private_storage(doc_file)) rescue {}
|
|
169
|
+
if doc == {}
|
|
170
|
+
return [nil, "cannot read file"]
|
|
171
|
+
else
|
|
172
|
+
return [doc, ""]
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
def self.retrieve_document_raw(doc_hash, doc_file, doc_location, options)
|
|
178
|
+
if doc_location == ""
|
|
179
|
+
doc_location = DEFAULT_LOCATION
|
|
180
|
+
end
|
|
181
|
+
if !(doc_location == "" || doc_location == "local")
|
|
182
|
+
if !doc_location.start_with?("http")
|
|
183
|
+
doc_location = "https://" + doc_location
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
case doc_location
|
|
188
|
+
when /^http/
|
|
189
|
+
retVal = HTTParty.get(doc_location + "/doc_raw/" + doc_hash)
|
|
190
|
+
if retVal.code != 200
|
|
191
|
+
msg = retVal.parsed_response("error").to_s rescue "invalid response from " + doc_location.to_s + "/doc/" + doc_hash.to_s
|
|
192
|
+
return [nil, msg]
|
|
193
|
+
end
|
|
194
|
+
if options.transform_keys(&:to_s)["trace"]
|
|
195
|
+
if options[:silent].nil? || !options[:silent]
|
|
196
|
+
puts "GET " + doc_hash + " from " + doc_location
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
return [retVal.parsed_response, ""]
|
|
200
|
+
when "", "local"
|
|
201
|
+
doc = JSON.parse(read_private_storage(doc_file)) rescue {}
|
|
202
|
+
log = JSON.parse(read_private_storage(doc_file.sub(".doc", ".log"))) rescue {}
|
|
203
|
+
if doc == {}
|
|
204
|
+
return [nil, "cannot read file"]
|
|
205
|
+
else
|
|
206
|
+
obj = {"doc" => doc, "log" => log}
|
|
207
|
+
return [obj, ""]
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
end
|
data/lib/oydid/log.rb
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
class Oydid
|
|
5
|
+
# log functions -----------------------------
|
|
6
|
+
def self.add_hash(log)
|
|
7
|
+
log.map do |item|
|
|
8
|
+
i = item.dup
|
|
9
|
+
i.delete("previous")
|
|
10
|
+
item["entry-hash"] = hash(canonical(item))
|
|
11
|
+
if item.transform_keys(&:to_s)["op"] == 1
|
|
12
|
+
item["sub-entry-hash"] = hash(canonical(i))
|
|
13
|
+
end
|
|
14
|
+
item
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# check if signature matches current document
|
|
19
|
+
# check if signature in log is correct
|
|
20
|
+
def self.match_log_did?(log, doc)
|
|
21
|
+
message = log["doc"].to_s
|
|
22
|
+
signature = log["sig"].to_s
|
|
23
|
+
public_keys = doc["key"].to_s
|
|
24
|
+
public_key = public_keys.split(":")[0] rescue ""
|
|
25
|
+
return verify(message, signature, public_key).first
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def self.retrieve_log(did_hash, log_file, log_location, options)
|
|
29
|
+
if log_location == ""
|
|
30
|
+
log_location = DEFAULT_LOCATION
|
|
31
|
+
end
|
|
32
|
+
if !(log_location == "" || log_location == "local")
|
|
33
|
+
if !log_location.start_with?("http")
|
|
34
|
+
log_location = "https://" + log_location
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
case log_location
|
|
39
|
+
when /^http/
|
|
40
|
+
retVal = HTTParty.get(log_location + "/log/" + did_hash)
|
|
41
|
+
if retVal.code != 200
|
|
42
|
+
msg = retVal.parsed_response("error").to_s rescue
|
|
43
|
+
"invalid response from " + log_location.to_s + "/log/" + did_hash.to_s
|
|
44
|
+
|
|
45
|
+
return [nil, msg]
|
|
46
|
+
end
|
|
47
|
+
if options.transform_keys(&:to_s)["trace"]
|
|
48
|
+
if options[:silent].nil? || !options[:silent]
|
|
49
|
+
puts "GET log for " + did_hash + " from " + log_location
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
retVal = JSON.parse(retVal.to_s) rescue nil
|
|
53
|
+
return [retVal, ""]
|
|
54
|
+
when "", "local"
|
|
55
|
+
doc = JSON.parse(read_private_storage(log_file)) rescue {}
|
|
56
|
+
if doc == {}
|
|
57
|
+
return [nil, "cannot read file '" + log_file + "'"]
|
|
58
|
+
end
|
|
59
|
+
return [doc, ""]
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def self.dag_did(logs, options)
|
|
64
|
+
dag = DAG.new
|
|
65
|
+
dag_log = []
|
|
66
|
+
log_hash = []
|
|
67
|
+
|
|
68
|
+
# calculate hash values for each entry and build vertices
|
|
69
|
+
i = 0
|
|
70
|
+
create_entries = 0
|
|
71
|
+
create_index = nil
|
|
72
|
+
terminate_indices = []
|
|
73
|
+
logs.each do |el|
|
|
74
|
+
if el["op"].to_i == 2
|
|
75
|
+
create_entries += 1
|
|
76
|
+
create_index = i
|
|
77
|
+
end
|
|
78
|
+
if el["op"].to_i == 0
|
|
79
|
+
terminate_indices << i
|
|
80
|
+
end
|
|
81
|
+
log_hash << Oydid.hash(Oydid.canonical(el))
|
|
82
|
+
dag_log << dag.add_vertex(id: i)
|
|
83
|
+
i += 1
|
|
84
|
+
end unless logs.nil?
|
|
85
|
+
|
|
86
|
+
if create_entries != 1
|
|
87
|
+
return [nil, nil, nil, "wrong number of CREATE entries (" + create_entries.to_s + ") in log" ]
|
|
88
|
+
end
|
|
89
|
+
if terminate_indices.length == 0
|
|
90
|
+
return [nil, nil, nil, "missing TERMINATE entries" ]
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# create edges between vertices
|
|
94
|
+
i = 0
|
|
95
|
+
logs.each do |el|
|
|
96
|
+
el["previous"].each do |p|
|
|
97
|
+
position = log_hash.find_index(p)
|
|
98
|
+
if !position.nil?
|
|
99
|
+
dag.add_edge from: dag_log[position], to: dag_log[i]
|
|
100
|
+
end
|
|
101
|
+
end unless el["previous"] == []
|
|
102
|
+
i += 1
|
|
103
|
+
end unless logs.nil?
|
|
104
|
+
|
|
105
|
+
# identify tangling TERMINATE entry
|
|
106
|
+
i = 0
|
|
107
|
+
terminate_entries = 0
|
|
108
|
+
terminate_overall = 0
|
|
109
|
+
terminate_index = nil
|
|
110
|
+
logs.each do |el|
|
|
111
|
+
if el["op"].to_i == 0
|
|
112
|
+
if dag.vertices[i].successors.length == 0
|
|
113
|
+
terminate_entries += 1
|
|
114
|
+
terminate_index = i
|
|
115
|
+
end
|
|
116
|
+
terminate_overall += 1
|
|
117
|
+
end
|
|
118
|
+
i += 1
|
|
119
|
+
end unless logs.nil?
|
|
120
|
+
|
|
121
|
+
if terminate_entries != 1 && !options[:log_complete]
|
|
122
|
+
if options[:silent].nil? || !options[:silent]
|
|
123
|
+
return [nil, nil, nil, "cannot resolve DID" ]
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
return [dag, create_index, terminate_index, ""]
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def self.dag2array(dag, log_array, index, result, options)
|
|
130
|
+
if options.transform_keys(&:to_s)["trace"]
|
|
131
|
+
if options[:silent].nil? || !options[:silent]
|
|
132
|
+
puts " vertex " + index.to_s + " at " + log_array[index]["ts"].to_s + " op: " + log_array[index]["op"].to_s + " doc: " + log_array[index]["doc"].to_s
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
result << log_array[index]
|
|
136
|
+
dag.vertices[index].successors.each do |s|
|
|
137
|
+
# check if successor has predecessor that is not self (i.e. REVOKE with TERMINATE)
|
|
138
|
+
s.predecessors.each do |p|
|
|
139
|
+
if p[:id] != index
|
|
140
|
+
if options.transform_keys(&:to_s)["trace"]
|
|
141
|
+
if options[:silent].nil? || !options[:silent]
|
|
142
|
+
puts " vertex " + p[:id].to_s + " at " + log_array[p[:id]]["ts"].to_s + " op: " + log_array[p[:id]]["op"].to_s + " doc: " + log_array[p[:id]]["doc"].to_s
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
result << log_array[p[:id]]
|
|
146
|
+
end
|
|
147
|
+
end unless s.predecessors.length < 2
|
|
148
|
+
dag2array(dag, log_array, s[:id], result, options)
|
|
149
|
+
end unless dag.vertices[index].successors.count == 0
|
|
150
|
+
result
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
def self.dag_update(currentDID, options)
|
|
154
|
+
i = 0
|
|
155
|
+
initial_did = currentDID["did"].to_s
|
|
156
|
+
initial_did = initial_did.delete_prefix("did:oyd:")
|
|
157
|
+
initial_did = initial_did.split("@").first
|
|
158
|
+
current_public_doc_key = ""
|
|
159
|
+
verification_output = false
|
|
160
|
+
currentDID["log"].each do |el|
|
|
161
|
+
case el["op"]
|
|
162
|
+
when 2,3 # CREATE, UPDATE
|
|
163
|
+
currentDID["doc_log_id"] = i
|
|
164
|
+
|
|
165
|
+
doc_did = el["doc"]
|
|
166
|
+
doc_location = get_location(doc_did)
|
|
167
|
+
did_hash = doc_did.delete_prefix("did:oyd:")
|
|
168
|
+
did_hash = did_hash.split("@").first
|
|
169
|
+
did10 = did_hash[0,10]
|
|
170
|
+
doc = retrieve_document_raw(doc_did, did10 + ".doc", doc_location, {})
|
|
171
|
+
if doc.first.nil?
|
|
172
|
+
currentDID["error"] = 2
|
|
173
|
+
msg = doc.last.to_s
|
|
174
|
+
if msg == ""
|
|
175
|
+
msg = "cannot retrieve " + doc_did.to_s
|
|
176
|
+
end
|
|
177
|
+
currentDID["message"] = msg
|
|
178
|
+
return currentDID
|
|
179
|
+
end
|
|
180
|
+
doc = doc.first["doc"]
|
|
181
|
+
if el["op"] == 2 # CREATE
|
|
182
|
+
if !match_log_did?(el, doc)
|
|
183
|
+
currentDID["error"] = 1
|
|
184
|
+
currentDID["message"] = "Signatures in log don't match"
|
|
185
|
+
return currentDID
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
currentDID["did"] = doc_did
|
|
189
|
+
currentDID["doc"] = doc
|
|
190
|
+
# since hash is guaranteed during retrieve_document this check is not necessary
|
|
191
|
+
# if hash(canonical(doc)) != did_hash
|
|
192
|
+
# currentDID["error"] = 1
|
|
193
|
+
# currentDID["message"] = "DID identifier and DID document don't match"
|
|
194
|
+
# if did_hash == initial_did
|
|
195
|
+
# verification_output = true
|
|
196
|
+
# end
|
|
197
|
+
# if verification_output
|
|
198
|
+
# currentDID["verification"] += "identifier: " + did_hash.to_s + "\n"
|
|
199
|
+
# currentDID["verification"] += "⛔ does not match DID Document:" + "\n"
|
|
200
|
+
# currentDID["verification"] += JSON.pretty_generate(doc) + "\n"
|
|
201
|
+
# currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#calculate_hash)" + "\n\n"
|
|
202
|
+
# end
|
|
203
|
+
# return currentDID
|
|
204
|
+
# end
|
|
205
|
+
if did_hash == initial_did
|
|
206
|
+
verification_output = true
|
|
207
|
+
end
|
|
208
|
+
if verification_output
|
|
209
|
+
currentDID["verification"] += "identifier: " + did_hash.to_s + "\n"
|
|
210
|
+
currentDID["verification"] += "✅ is hash of DID Document:" + "\n"
|
|
211
|
+
currentDID["verification"] += JSON.pretty_generate(doc) + "\n"
|
|
212
|
+
currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#calculate_hash)" + "\n\n"
|
|
213
|
+
end
|
|
214
|
+
current_public_doc_key = currentDID["doc"]["key"].split(":").first rescue ""
|
|
215
|
+
|
|
216
|
+
when 0 # TERMINATE
|
|
217
|
+
currentDID["termination_log_id"] = i
|
|
218
|
+
|
|
219
|
+
doc_did = currentDID["did"]
|
|
220
|
+
doc_location = get_location(doc_did)
|
|
221
|
+
did_hash = doc_did.delete_prefix("did:oyd:")
|
|
222
|
+
did_hash = did_hash.split("@").first
|
|
223
|
+
did10 = did_hash[0,10]
|
|
224
|
+
doc = retrieve_document_raw(doc_did, did10 + ".doc", doc_location, {})
|
|
225
|
+
# since it retrieves a DID that previously existed, this test is not necessary
|
|
226
|
+
# if doc.first.nil?
|
|
227
|
+
# currentDID["error"] = 2
|
|
228
|
+
# currentDID["message"] = doc.last.to_s
|
|
229
|
+
# return currentDID
|
|
230
|
+
# end
|
|
231
|
+
doc = doc.first["doc"]
|
|
232
|
+
term = doc["log"]
|
|
233
|
+
log_location = term.split("@")[1] rescue ""
|
|
234
|
+
if log_location.to_s == ""
|
|
235
|
+
log_location = DEFAULT_LOCATION
|
|
236
|
+
end
|
|
237
|
+
term = term.split("@").first
|
|
238
|
+
if hash(canonical(el)) != term
|
|
239
|
+
currentDID["error"] = 1
|
|
240
|
+
currentDID["message"] = "Log reference and record don't match"
|
|
241
|
+
if verification_output
|
|
242
|
+
currentDID["verification"] += "'log' reference in DID Document: " + term.to_s + "\n"
|
|
243
|
+
currentDID["verification"] += "⛔ does not match TERMINATE log record:" + "\n"
|
|
244
|
+
currentDID["verification"] += JSON.pretty_generate(el) + "\n"
|
|
245
|
+
currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#calculate_hash)" + "\n\n"
|
|
246
|
+
end
|
|
247
|
+
return currentDID
|
|
248
|
+
end
|
|
249
|
+
if verification_output
|
|
250
|
+
currentDID["verification"] += "'log' reference in DID Document: " + term.to_s + "\n"
|
|
251
|
+
currentDID["verification"] += "✅ is hash of TERMINATE log record:" + "\n"
|
|
252
|
+
currentDID["verification"] += JSON.pretty_generate(el) + "\n"
|
|
253
|
+
currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#calculate_hash)" + "\n\n"
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
# check if there is a revocation entry
|
|
257
|
+
revocation_record = {}
|
|
258
|
+
revoc_term = el["doc"]
|
|
259
|
+
revoc_term = revoc_term.split("@").first
|
|
260
|
+
revoc_term_found = false
|
|
261
|
+
log_array, msg = retrieve_log(did_hash, did10 + ".log", log_location, options)
|
|
262
|
+
log_array.each do |log_el|
|
|
263
|
+
log_el_structure = log_el.dup
|
|
264
|
+
if log_el["op"].to_i == 1 # TERMINATE
|
|
265
|
+
log_el_structure.delete("previous")
|
|
266
|
+
end
|
|
267
|
+
if hash(canonical(log_el_structure)) == revoc_term
|
|
268
|
+
revoc_term_found = true
|
|
269
|
+
revocation_record = log_el.dup
|
|
270
|
+
if verification_output
|
|
271
|
+
currentDID["verification"] += "'doc' reference in TERMINATE log record: " + revoc_term.to_s + "\n"
|
|
272
|
+
currentDID["verification"] += "✅ is hash of REVOCATION log record (without 'previous' attribute):" + "\n"
|
|
273
|
+
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
|
274
|
+
currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#calculate_hash)" + "\n\n"
|
|
275
|
+
end
|
|
276
|
+
break
|
|
277
|
+
end
|
|
278
|
+
end unless log_array.nil?
|
|
279
|
+
# this should actually be covered by retrieve_log in the block above
|
|
280
|
+
# (actually I wasn't able to craft a test case covering this part...)
|
|
281
|
+
# if !options.transform_keys(&:to_s)["log_location"].nil?
|
|
282
|
+
# log_array, msg = retrieve_log(revoc_term, did10 + ".log", options.transform_keys(&:to_s)["log_location"], options)
|
|
283
|
+
# log_array.each do |log_el|
|
|
284
|
+
# if log_el["op"] == 1 # TERMINATE
|
|
285
|
+
# log_el_structure = log_el.delete("previous")
|
|
286
|
+
# else
|
|
287
|
+
# log_el_structure = log_el
|
|
288
|
+
# end
|
|
289
|
+
# if hash(canonical(log_el_structure)) == revoc_term
|
|
290
|
+
# revoc_term_found = true
|
|
291
|
+
# revocation_record = log_el.dup
|
|
292
|
+
# if verification_output
|
|
293
|
+
# currentDID["verification"] += "'doc' reference in TERMINATE log record: " + revoc_term.to_s + "\n"
|
|
294
|
+
# currentDID["verification"] += "✅ is hash of REVOCATION log record (without 'previous' attribute):" + "\n"
|
|
295
|
+
# currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
|
296
|
+
# currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#calculate_hash)" + "\n\n"
|
|
297
|
+
# end
|
|
298
|
+
# break
|
|
299
|
+
# end
|
|
300
|
+
# end
|
|
301
|
+
# end
|
|
302
|
+
|
|
303
|
+
if revoc_term_found
|
|
304
|
+
update_term_found = false
|
|
305
|
+
log_array.each do |log_el|
|
|
306
|
+
if log_el["op"].to_i == 3
|
|
307
|
+
if log_el["previous"].include?(hash(canonical(revocation_record)))
|
|
308
|
+
update_term_found = true
|
|
309
|
+
message = log_el["doc"].to_s
|
|
310
|
+
|
|
311
|
+
signature = log_el["sig"]
|
|
312
|
+
public_key = current_public_doc_key.to_s
|
|
313
|
+
signature_verification = verify(message, signature, public_key).first
|
|
314
|
+
if signature_verification
|
|
315
|
+
if verification_output
|
|
316
|
+
currentDID["verification"] += "found UPDATE log record:" + "\n"
|
|
317
|
+
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
|
318
|
+
currentDID["verification"] += "✅ public key from last DID Document: " + current_public_doc_key.to_s + "\n"
|
|
319
|
+
currentDID["verification"] += "verifies 'doc' reference of new DID Document: " + log_el["doc"].to_s + "\n"
|
|
320
|
+
currentDID["verification"] += log_el["sig"].to_s + "\n"
|
|
321
|
+
currentDID["verification"] += "of next DID Document (Details: https://ownyourdata.github.io/oydid/#verify_signature)" + "\n"
|
|
322
|
+
|
|
323
|
+
next_doc_did = log_el["doc"].to_s
|
|
324
|
+
next_doc_location = get_location(next_doc_did)
|
|
325
|
+
next_did_hash = next_doc_did.delete_prefix("did:oyd:")
|
|
326
|
+
next_did_hash = next_did_hash.split("@").first
|
|
327
|
+
next_did10 = next_did_hash[0,10]
|
|
328
|
+
next_doc = retrieve_document_raw(next_doc_did, next_did10 + ".doc", next_doc_location, {})
|
|
329
|
+
if next_doc.first.nil?
|
|
330
|
+
currentDID["error"] = 2
|
|
331
|
+
currentDID["message"] = next_doc.last
|
|
332
|
+
return currentDID
|
|
333
|
+
end
|
|
334
|
+
next_doc = next_doc.first["doc"]
|
|
335
|
+
if public_key == next_doc["key"].split(":").first
|
|
336
|
+
currentDID["verification"] += "⚠️ no key rotation in updated DID Document" + "\n"
|
|
337
|
+
end
|
|
338
|
+
currentDID["verification"] += "\n"
|
|
339
|
+
end
|
|
340
|
+
else
|
|
341
|
+
currentDID["error"] = 1
|
|
342
|
+
currentDID["message"] = "Signature does not match"
|
|
343
|
+
if verification_output
|
|
344
|
+
new_doc_did = log_el["doc"].to_s
|
|
345
|
+
new_doc_location = get_location(new_doc_did)
|
|
346
|
+
new_did_hash = new_doc_did.delete_prefix("did:oyd:")
|
|
347
|
+
new_did_hash = new_did_hash.split("@").first
|
|
348
|
+
new_did10 = new_did_hash[0,10]
|
|
349
|
+
new_doc = retrieve_document(new_doc_did, new_did10 + ".doc", new_doc_location, {}).first
|
|
350
|
+
currentDID["verification"] += "found UPDATE log record:" + "\n"
|
|
351
|
+
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
|
352
|
+
currentDID["verification"] += "⛔ public key from last DID Document: " + current_public_doc_key.to_s + "\n"
|
|
353
|
+
currentDID["verification"] += "does not verify 'doc' reference of new DID Document: " + log_el["doc"].to_s + "\n"
|
|
354
|
+
currentDID["verification"] += log_el["sig"].to_s + "\n"
|
|
355
|
+
currentDID["verification"] += "next DID Document (Details: https://ownyourdata.github.io/oydid/#verify_signature)" + "\n"
|
|
356
|
+
currentDID["verification"] += JSON.pretty_generate(new_doc) + "\n\n"
|
|
357
|
+
end
|
|
358
|
+
return currentDID
|
|
359
|
+
end
|
|
360
|
+
break
|
|
361
|
+
end
|
|
362
|
+
end
|
|
363
|
+
end
|
|
364
|
+
|
|
365
|
+
else
|
|
366
|
+
if verification_output
|
|
367
|
+
currentDID["verification"] += "Revocation reference in log record: " + revoc_term.to_s + "\n"
|
|
368
|
+
currentDID["verification"] += "✅ cannot find revocation record searching at" + "\n"
|
|
369
|
+
currentDID["verification"] += "- " + log_location + "\n"
|
|
370
|
+
if !options.transform_keys(&:to_s)["log_location"].nil?
|
|
371
|
+
currentDID["verification"] += "- " + options.transform_keys(&:to_s)["log_location"].to_s + "\n"
|
|
372
|
+
end
|
|
373
|
+
currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#retrieve_log)" + "\n\n"
|
|
374
|
+
end
|
|
375
|
+
break
|
|
376
|
+
end
|
|
377
|
+
when 1 # revocation log entry
|
|
378
|
+
# do nothing
|
|
379
|
+
else
|
|
380
|
+
currentDID["error"] = 2
|
|
381
|
+
currentDID["message"] = "FATAL ERROR: op code '" + el["op"].to_s + "' not implemented"
|
|
382
|
+
return currentDID
|
|
383
|
+
|
|
384
|
+
end
|
|
385
|
+
i += 1
|
|
386
|
+
end unless currentDID["log"].nil?
|
|
387
|
+
|
|
388
|
+
return currentDID
|
|
389
|
+
end
|
|
390
|
+
|
|
391
|
+
end
|