oydid 0.5.4 → 0.5.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/VERSION +1 -1
- data/lib/oydid/basic.rb +80 -4
- data/lib/oydid/didcomm.rb +2 -2
- data/lib/oydid/log.rb +144 -39
- data/lib/oydid/vc.rb +4 -2
- data/lib/oydid.rb +417 -179
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4a02a41e3567cca1fb47d9f5577892707d4a3841d9505138d1164aa5e84ced98
|
4
|
+
data.tar.gz: 519f74a4640ebe9b8936ab1b9bc60fcd84601c6b9ee750538f28c8e1fd2b3990
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 54a1637fd933c8f077eead73351ebca631a8d33992d72870e8e08d6776d034b54ca9bf0350c630b85b0b69afcb6e3840ce01096f4806beabc7a3019897c193d3
|
7
|
+
data.tar.gz: 2b4d266c2bd558aea5be65c8579567e135ce05ff4277c3ae0ef0727ee65214c86149f7e2a3ec31288b783543327416817764ccda6364f42f270470482f5c6aad
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.5.
|
1
|
+
0.5.5
|
data/lib/oydid/basic.rb
CHANGED
@@ -54,7 +54,11 @@ class Oydid
|
|
54
54
|
end
|
55
55
|
|
56
56
|
def self.get_digest(message)
|
57
|
-
|
57
|
+
decoded_message, error = Oydid.multi_decode(message)
|
58
|
+
if decoded_message.nil?
|
59
|
+
return [nil, error]
|
60
|
+
end
|
61
|
+
retVal = Multihashes.decode decoded_message
|
58
62
|
if retVal[:hash_function].to_s != ""
|
59
63
|
return [retVal[:hash_function].to_s, ""]
|
60
64
|
end
|
@@ -144,6 +148,29 @@ class Oydid
|
|
144
148
|
end
|
145
149
|
end
|
146
150
|
|
151
|
+
def self.getPrivateKey(enc, pwd, dsk, dfl, options)
|
152
|
+
if enc.to_s == "" # usually read from options[:doc_enc]
|
153
|
+
if pwd.to_s == "" # usually read from options[:doc_pwd]
|
154
|
+
if dsk.to_s == "" # usually read from options[:doc_key]
|
155
|
+
if dfl.to_s == "" # default file name for key
|
156
|
+
return [nil, "no reference"]
|
157
|
+
else
|
158
|
+
privateKey, msg = read_private_key(dfl.to_s, options)
|
159
|
+
end
|
160
|
+
else
|
161
|
+
privateKey, msg = read_private_key(dsk.to_s, options)
|
162
|
+
end
|
163
|
+
else
|
164
|
+
privateKey, msg = generate_private_key(pwd, 'ed25519-priv', options)
|
165
|
+
end
|
166
|
+
else
|
167
|
+
privateKey, msg = decode_private_key(enc.to_s, options)
|
168
|
+
end
|
169
|
+
return [privateKey, msg]
|
170
|
+
end
|
171
|
+
|
172
|
+
# if the identifier is already the public key there is no validation if it is a valid key
|
173
|
+
# (this is a privacy-preserving feature)
|
147
174
|
def self.getPubKeyFromDID(did)
|
148
175
|
identifier = did.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first rescue did
|
149
176
|
identifier = identifier.delete_prefix("did:oyd:")
|
@@ -166,6 +193,52 @@ class Oydid
|
|
166
193
|
end
|
167
194
|
end
|
168
195
|
|
196
|
+
# available key_types
|
197
|
+
# * doc - document key
|
198
|
+
# * rev - revocation key
|
199
|
+
def self.getDelegatedPubKeysFromDID(did, key_type = "doc")
|
200
|
+
# retrieve DID
|
201
|
+
did_document, msg = read(did, {})
|
202
|
+
keys, msg = getDelegatedPubKeysFromFullDidDocument(did_document, key_type)
|
203
|
+
if keys.nil?
|
204
|
+
return [nil, msg]
|
205
|
+
else
|
206
|
+
return [keys, ""]
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
def self.getDelegatedPubKeysFromFullDidDocument(did_document, key_type = "doc")
|
211
|
+
# get current public key
|
212
|
+
case key_type
|
213
|
+
when "doc"
|
214
|
+
keys = [did_document["doc"]["key"].split(":").first] rescue nil
|
215
|
+
when "rev"
|
216
|
+
keys = [did_document["doc"]["key"].split(":").last] rescue nil
|
217
|
+
else
|
218
|
+
return [nil, "invalid key type: " + key_type]
|
219
|
+
end
|
220
|
+
if keys.nil?
|
221
|
+
return [nil, "cannot retrieve current key"]
|
222
|
+
end
|
223
|
+
|
224
|
+
# travers through log and get active delegation public keys
|
225
|
+
log = did_document["log"]
|
226
|
+
log.each do |item|
|
227
|
+
if item["op"] == 5 # DELEGATE
|
228
|
+
# !!!OPEN: check if log entry is confirmed / referenced in a termination entry
|
229
|
+
item_keys = item["doc"]
|
230
|
+
if key_type == "doc" && item_keys[0..3] == "doc:"
|
231
|
+
keys << item_keys[4-item_keys.length..]
|
232
|
+
elsif key_type == "rev" && item_keys[0..3] == "rev:"
|
233
|
+
keys << item_keys[4-item_keys.length..]
|
234
|
+
end
|
235
|
+
end
|
236
|
+
end unless log.nil?
|
237
|
+
|
238
|
+
# return array
|
239
|
+
return [keys.uniq, ""]
|
240
|
+
end
|
241
|
+
|
169
242
|
def self.sign(message, private_key, options)
|
170
243
|
code, length, digest = multi_decode(private_key).first.unpack('SCa*')
|
171
244
|
case Multicodecs[code].name
|
@@ -203,7 +276,7 @@ class Oydid
|
|
203
276
|
end
|
204
277
|
end
|
205
278
|
|
206
|
-
def self.encrypt(message, public_key, options)
|
279
|
+
def self.encrypt(message, public_key, options = {})
|
207
280
|
begin
|
208
281
|
code, length, digest = multi_decode(public_key).first.unpack('CCa*')
|
209
282
|
case Multicodecs[code].name
|
@@ -229,7 +302,7 @@ class Oydid
|
|
229
302
|
end
|
230
303
|
end
|
231
304
|
|
232
|
-
def self.decrypt(message, private_key, options)
|
305
|
+
def self.decrypt(message, private_key, options = {})
|
233
306
|
begin
|
234
307
|
cipher = [JSON.parse(message)["value"]].pack('H*')
|
235
308
|
nonce = [JSON.parse(message)["nonce"]].pack('H*')
|
@@ -333,7 +406,10 @@ class Oydid
|
|
333
406
|
doc_location = doc_location.sub("%3A%2F%2F","://").sub("%3A", ":")
|
334
407
|
retVal = HTTParty.get(doc_location + "/doc/" + doc_identifier)
|
335
408
|
if retVal.code != 200
|
336
|
-
msg = retVal.parsed_response
|
409
|
+
msg = retVal.parsed_response["error"].to_s rescue ""
|
410
|
+
if msg.to_s == ""
|
411
|
+
msg = "invalid response from " + doc_location.to_s + "/doc/" + doc_identifier.to_s
|
412
|
+
end
|
337
413
|
return [nil, msg]
|
338
414
|
end
|
339
415
|
if options.transform_keys(&:to_s)["trace"]
|
data/lib/oydid/didcomm.rb
CHANGED
@@ -102,12 +102,12 @@ class Oydid
|
|
102
102
|
# DID Auth for data container with challenge ---
|
103
103
|
def self.token_from_challenge(host, pwd, options = {})
|
104
104
|
sid = SecureRandom.hex(20).to_s
|
105
|
+
public_key = public_key(generate_private_key(pwd, options).first, options).first
|
105
106
|
retVal = HTTParty.post(host + "/oydid/init",
|
106
107
|
headers: { 'Content-Type' => 'application/json' },
|
107
|
-
body: { "session_id": sid }.to_json )
|
108
|
+
body: { "session_id": sid, "public_key": public_key }.to_json )
|
108
109
|
challenge = retVal.parsed_response["challenge"]
|
109
110
|
signed_challenge = sign(challenge, Oydid.generate_private_key(pwd, options).first, options).first
|
110
|
-
public_key = public_key(generate_private_key(pwd, options).first, options).first
|
111
111
|
retVal = HTTParty.post(host + "/oydid/token",
|
112
112
|
headers: { 'Content-Type' => 'application/json' },
|
113
113
|
body: {
|
data/lib/oydid/log.rb
CHANGED
@@ -5,11 +5,9 @@ class Oydid
|
|
5
5
|
# log functions -----------------------------
|
6
6
|
def self.add_hash(log)
|
7
7
|
log.map do |item|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
if item.transform_keys(&:to_s)["op"] == 1
|
12
|
-
item["sub-entry-hash"] = multi_hash(canonical(i), LOG_HASH_OPTIONS).first
|
8
|
+
item["entry-hash"] = multi_hash(canonical(item.slice("ts","op","doc","sig","previous")), LOG_HASH_OPTIONS).first
|
9
|
+
if item.transform_keys(&:to_s)["op"] == 1 # REVOKE
|
10
|
+
item["sub-entry-hash"] = multi_hash(canonical(item.slice("ts","op","doc","sig")), LOG_HASH_OPTIONS).first
|
13
11
|
end
|
14
12
|
item
|
15
13
|
end
|
@@ -37,7 +35,8 @@ class Oydid
|
|
37
35
|
|
38
36
|
case log_location
|
39
37
|
when /^http/
|
40
|
-
log_location = log_location.
|
38
|
+
log_location = log_location.gsub("%3A",":")
|
39
|
+
log_location = log_location.gsub("%2F%2F","//")
|
41
40
|
retVal = HTTParty.get(log_location + "/log/" + did_hash)
|
42
41
|
if retVal.code != 200
|
43
42
|
msg = retVal.parsed_response("error").to_s rescue
|
@@ -61,29 +60,55 @@ class Oydid
|
|
61
60
|
end
|
62
61
|
end
|
63
62
|
|
63
|
+
def self.retrieve_log_item(log_hash, log_location, options)
|
64
|
+
if log_location.to_s == ""
|
65
|
+
log_location = DEFAULT_LOCATION
|
66
|
+
end
|
67
|
+
if !log_location.start_with?("http")
|
68
|
+
log_location = "https://" + log_location
|
69
|
+
end
|
70
|
+
|
71
|
+
case log_location
|
72
|
+
when /^http/
|
73
|
+
log_location = log_location.gsub("%3A",":")
|
74
|
+
log_location = log_location.gsub("%2F%2F","//")
|
75
|
+
retVal = HTTParty.get(log_location + "/log/" + log_hash + "/item")
|
76
|
+
if retVal.code != 200
|
77
|
+
msg = retVal.parsed_response("error").to_s rescue
|
78
|
+
"invalid response from " + log_location.to_s + "/log/" + log_hash.to_s + "/item"
|
79
|
+
return [nil, msg]
|
80
|
+
end
|
81
|
+
if options.transform_keys(&:to_s)["trace"]
|
82
|
+
if options[:silent].nil? || !options[:silent]
|
83
|
+
puts "GET log entry for " + log_hash + " from " + log_location
|
84
|
+
end
|
85
|
+
end
|
86
|
+
retVal = JSON.parse(retVal.to_s) rescue nil
|
87
|
+
return [retVal, ""]
|
88
|
+
else
|
89
|
+
return [nil, "cannot read from " + log_location]
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
64
93
|
def self.dag_did(logs, options)
|
65
94
|
dag = DAG.new
|
66
95
|
dag_log = []
|
67
96
|
log_hash = []
|
68
|
-
|
97
|
+
|
69
98
|
# calculate hash values for each entry and build vertices
|
70
99
|
i = 0
|
71
100
|
create_entries = 0
|
72
101
|
create_index = nil
|
73
102
|
terminate_indices = []
|
74
103
|
logs.each do |el|
|
75
|
-
|
104
|
+
case el["op"].to_i
|
105
|
+
when 0 # TERMINATE
|
106
|
+
terminate_indices << i
|
107
|
+
when 2 # CREATE
|
76
108
|
create_entries += 1
|
77
109
|
create_index = i
|
78
110
|
end
|
79
|
-
|
80
|
-
terminate_indices << i
|
81
|
-
end
|
82
|
-
log_options = options.dup
|
83
|
-
el_hash = el["doc"].split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
84
|
-
log_options[:digest] = Oydid.get_digest(el_hash).first
|
85
|
-
log_options[:encode] = Oydid.get_encoding(el_hash).first
|
86
|
-
log_hash << Oydid.multi_hash(Oydid.canonical(el), LOG_HASH_OPTIONS).first
|
111
|
+
log_hash << Oydid.multi_hash(Oydid.canonical(el.slice("ts","op","doc","sig","previous")), LOG_HASH_OPTIONS).first
|
87
112
|
dag_log << dag.add_vertex(id: i)
|
88
113
|
i += 1
|
89
114
|
end unless logs.nil?
|
@@ -94,7 +119,7 @@ class Oydid
|
|
94
119
|
return [nil, nil, nil, "missing TERMINATE entries" ]
|
95
120
|
end
|
96
121
|
|
97
|
-
# create edges between vertices
|
122
|
+
# create provisional edges between vertices
|
98
123
|
i = 0
|
99
124
|
logs.each do |el|
|
100
125
|
el["previous"].each do |p|
|
@@ -127,6 +152,47 @@ class Oydid
|
|
127
152
|
return [nil, nil, nil, "cannot resolve DID" ]
|
128
153
|
end
|
129
154
|
end
|
155
|
+
|
156
|
+
# create actual edges between vertices (but only use last terminate index for delegates)
|
157
|
+
dag = DAG.new
|
158
|
+
dag_log = []
|
159
|
+
log_hash = []
|
160
|
+
|
161
|
+
# calculate hash values for each entry and build vertices
|
162
|
+
i = 0
|
163
|
+
create_entries = 0
|
164
|
+
create_index = nil
|
165
|
+
terminate_indices = []
|
166
|
+
logs.each do |el|
|
167
|
+
case el["op"].to_i
|
168
|
+
when 0 # TERMINATE
|
169
|
+
terminate_indices << i
|
170
|
+
when 2 # CREATE
|
171
|
+
create_entries += 1
|
172
|
+
create_index = i
|
173
|
+
end
|
174
|
+
log_hash << Oydid.multi_hash(Oydid.canonical(el.slice("ts","op","doc","sig","previous")), LOG_HASH_OPTIONS).first
|
175
|
+
dag_log << dag.add_vertex(id: i)
|
176
|
+
i += 1
|
177
|
+
end unless logs.nil?
|
178
|
+
i = 0
|
179
|
+
logs.each do |el|
|
180
|
+
el["previous"].each do |p|
|
181
|
+
position = log_hash.find_index(p)
|
182
|
+
if !position.nil?
|
183
|
+
if logs[position]["op"].to_i == 5 # DELEGATE
|
184
|
+
if i == terminate_index
|
185
|
+
# only delegates in the last terminate index are relevant
|
186
|
+
dag.add_edge from: dag_log[position], to: dag_log[i]
|
187
|
+
end
|
188
|
+
else
|
189
|
+
dag.add_edge from: dag_log[position], to: dag_log[i]
|
190
|
+
end
|
191
|
+
end
|
192
|
+
end unless el["previous"] == []
|
193
|
+
i += 1
|
194
|
+
end unless logs.nil?
|
195
|
+
|
130
196
|
return [dag, create_index, terminate_index, ""]
|
131
197
|
end
|
132
198
|
|
@@ -154,9 +220,29 @@ class Oydid
|
|
154
220
|
result
|
155
221
|
end
|
156
222
|
|
223
|
+
def self.dag2array_terminate(dag, log_array, index, result, options)
|
224
|
+
if options.transform_keys(&:to_s)["trace"]
|
225
|
+
if options[:silent].nil? || !options[:silent]
|
226
|
+
puts " vertex " + index.to_s + " at " + log_array[index]["ts"].to_s + " op: " + log_array[index]["op"].to_s + " doc: " + log_array[index]["doc"].to_s
|
227
|
+
end
|
228
|
+
end
|
229
|
+
dag.vertices[index].predecessors.each do |p|
|
230
|
+
if p[:id] != index
|
231
|
+
if options.transform_keys(&:to_s)["trace"]
|
232
|
+
if options[:silent].nil? || !options[:silent]
|
233
|
+
puts " vertex " + p[:id].to_s + " at " + log_array[p[:id]]["ts"].to_s + " op: " + log_array[p[:id]]["op"].to_s + " doc: " + log_array[p[:id]]["doc"].to_s
|
234
|
+
end
|
235
|
+
end
|
236
|
+
result << log_array[p[:id]]
|
237
|
+
end
|
238
|
+
end unless dag.vertices[index].nil?
|
239
|
+
result << log_array[index]
|
240
|
+
result
|
241
|
+
end
|
242
|
+
|
157
243
|
def self.dag_update(currentDID, options)
|
158
244
|
i = 0
|
159
|
-
doc_location =
|
245
|
+
doc_location = options[:doc_location].to_s
|
160
246
|
initial_did = currentDID["did"].to_s.dup
|
161
247
|
initial_did = initial_did.delete_prefix("did:oyd:")
|
162
248
|
if initial_did.include?(LOCATION_PREFIX)
|
@@ -164,6 +250,13 @@ class Oydid
|
|
164
250
|
initial_did = tmp[0]
|
165
251
|
doc_location = tmp[1]
|
166
252
|
end
|
253
|
+
if initial_did.include?(CGI.escape LOCATION_PREFIX)
|
254
|
+
tmp = initial_did.split(CGI.escape LOCATION_PREFIX)
|
255
|
+
initial_did = tmp[0]
|
256
|
+
doc_location = tmp[1]
|
257
|
+
end
|
258
|
+
doc_location = doc_location.gsub("%3A",":")
|
259
|
+
doc_location = doc_location.gsub("%2F%2F","//")
|
167
260
|
current_public_doc_key = ""
|
168
261
|
verification_output = false
|
169
262
|
currentDID["log"].each do |el|
|
@@ -172,8 +265,9 @@ class Oydid
|
|
172
265
|
currentDID["doc_log_id"] = i
|
173
266
|
doc_did = el["doc"]
|
174
267
|
did_hash = doc_did.delete_prefix("did:oyd:")
|
175
|
-
did_hash = did_hash.split(LOCATION_PREFIX).first
|
268
|
+
did_hash = did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
176
269
|
did10 = did_hash[0,10]
|
270
|
+
|
177
271
|
doc = retrieve_document_raw(doc_did, did10 + ".doc", doc_location, {})
|
178
272
|
if doc.first.nil?
|
179
273
|
currentDID["error"] = 2
|
@@ -225,7 +319,7 @@ class Oydid
|
|
225
319
|
|
226
320
|
doc_did = currentDID["did"]
|
227
321
|
did_hash = doc_did.delete_prefix("did:oyd:")
|
228
|
-
did_hash = did_hash.split(LOCATION_PREFIX).first
|
322
|
+
did_hash = did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
229
323
|
did10 = did_hash[0,10]
|
230
324
|
doc = retrieve_document_raw(doc_did, did10 + ".doc", doc_location, {})
|
231
325
|
# since it retrieves a DID that previously existed, this test is not necessary
|
@@ -236,16 +330,16 @@ class Oydid
|
|
236
330
|
# end
|
237
331
|
doc = doc.first["doc"]
|
238
332
|
term = doc["log"]
|
239
|
-
log_location = term.split(LOCATION_PREFIX)
|
240
|
-
if log_location.to_s == ""
|
333
|
+
log_location = term.split(LOCATION_PREFIX).last.split(CGI.escape LOCATION_PREFIX).last rescue ""
|
334
|
+
if log_location.to_s == "" || log_location == term
|
241
335
|
log_location = DEFAULT_LOCATION
|
242
336
|
end
|
243
|
-
term = term.split(LOCATION_PREFIX).first
|
337
|
+
term = term.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
244
338
|
log_options = options.dup
|
245
339
|
el_hash = el["doc"].split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
246
|
-
log_options[:digest] =
|
247
|
-
log_options[:encode] =
|
248
|
-
if multi_hash(canonical(el), log_options).first != term
|
340
|
+
log_options[:digest] = get_digest(el_hash).first
|
341
|
+
log_options[:encode] = get_encoding(el_hash).first
|
342
|
+
if multi_hash(canonical(el.slice("ts","op","doc","sig","previous")), log_options).first != term
|
249
343
|
currentDID["error"] = 1
|
250
344
|
currentDID["message"] = "Log reference and record don't match"
|
251
345
|
if verification_output
|
@@ -266,7 +360,7 @@ class Oydid
|
|
266
360
|
# check if there is a revocation entry
|
267
361
|
revocation_record = {}
|
268
362
|
revoc_term = el["doc"]
|
269
|
-
revoc_term = revoc_term.split(LOCATION_PREFIX).first
|
363
|
+
revoc_term = revoc_term.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
270
364
|
revoc_term_found = false
|
271
365
|
log_array, msg = retrieve_log(did_hash, did10 + ".log", log_location, options)
|
272
366
|
log_array.each do |log_el|
|
@@ -274,7 +368,7 @@ class Oydid
|
|
274
368
|
if log_el["op"].to_i == 1 # TERMINATE
|
275
369
|
log_el_structure.delete("previous")
|
276
370
|
end
|
277
|
-
if multi_hash(canonical(log_el_structure), log_options).first == revoc_term
|
371
|
+
if multi_hash(canonical(log_el_structure.slice("ts","op","doc","sig","previous")), log_options).first == revoc_term
|
278
372
|
revoc_term_found = true
|
279
373
|
revocation_record = log_el.dup
|
280
374
|
if verification_output
|
@@ -291,7 +385,7 @@ class Oydid
|
|
291
385
|
# if !options.transform_keys(&:to_s)["log_location"].nil?
|
292
386
|
# log_array, msg = retrieve_log(revoc_term, did10 + ".log", options.transform_keys(&:to_s)["log_location"], options)
|
293
387
|
# log_array.each do |log_el|
|
294
|
-
# if log_el["op"] == 1 #
|
388
|
+
# if log_el["op"] == 1 # REVOKE
|
295
389
|
# log_el_structure = log_el.delete("previous")
|
296
390
|
# else
|
297
391
|
# log_el_structure = log_el
|
@@ -317,15 +411,26 @@ class Oydid
|
|
317
411
|
if log_el["previous"].include?(multi_hash(canonical(revocation_record), LOG_HASH_OPTIONS).first)
|
318
412
|
update_term_found = true
|
319
413
|
message = log_el["doc"].to_s
|
320
|
-
|
321
414
|
signature = log_el["sig"]
|
322
|
-
public_key = current_public_doc_key.to_s
|
323
|
-
|
415
|
+
# public_key = current_public_doc_key.to_s
|
416
|
+
extend_currentDID = currentDID.dup
|
417
|
+
extend_currentDID["log"] = extend_currentDID["full_log"]
|
418
|
+
# !!!TODO: check for delegates only at certain point in time
|
419
|
+
pubKeys, msg = Oydid.getDelegatedPubKeysFromFullDidDocument(extend_currentDID, "doc")
|
420
|
+
signature_verification = false
|
421
|
+
used_pubkey = ""
|
422
|
+
pubKeys.each do |key|
|
423
|
+
if Oydid.verify(message, signature, key).first
|
424
|
+
signature_verification = true
|
425
|
+
used_pubkey = key
|
426
|
+
break
|
427
|
+
end
|
428
|
+
end
|
324
429
|
if signature_verification
|
325
430
|
if verification_output
|
326
431
|
currentDID["verification"] += "found UPDATE log record:" + "\n"
|
327
432
|
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
328
|
-
currentDID["verification"] += "✅ public key
|
433
|
+
currentDID["verification"] += "✅ public key: " + used_pubkey.to_s + "\n"
|
329
434
|
currentDID["verification"] += "verifies 'doc' reference of new DID Document: " + log_el["doc"].to_s + "\n"
|
330
435
|
currentDID["verification"] += log_el["sig"].to_s + "\n"
|
331
436
|
currentDID["verification"] += "of next DID Document (Details: https://ownyourdata.github.io/oydid/#verify_signature)" + "\n"
|
@@ -333,7 +438,7 @@ class Oydid
|
|
333
438
|
next_doc_did = log_el["doc"].to_s
|
334
439
|
next_doc_location = doc_location
|
335
440
|
next_did_hash = next_doc_did.delete_prefix("did:oyd:")
|
336
|
-
next_did_hash = next_did_hash.split(LOCATION_PREFIX).first
|
441
|
+
next_did_hash = next_did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
337
442
|
next_did10 = next_did_hash[0,10]
|
338
443
|
next_doc = retrieve_document_raw(next_doc_did, next_did10 + ".doc", next_doc_location, {})
|
339
444
|
if next_doc.first.nil?
|
@@ -342,7 +447,7 @@ class Oydid
|
|
342
447
|
return currentDID
|
343
448
|
end
|
344
449
|
next_doc = next_doc.first["doc"]
|
345
|
-
if
|
450
|
+
if pubKeys.include?(next_doc["key"].split(":").first)
|
346
451
|
currentDID["verification"] += "⚠️ no key rotation in updated DID Document" + "\n"
|
347
452
|
end
|
348
453
|
currentDID["verification"] += "\n"
|
@@ -354,12 +459,12 @@ class Oydid
|
|
354
459
|
new_doc_did = log_el["doc"].to_s
|
355
460
|
new_doc_location = doc_location
|
356
461
|
new_did_hash = new_doc_did.delete_prefix("did:oyd:")
|
357
|
-
new_did_hash = new_did_hash.split(LOCATION_PREFIX).first
|
462
|
+
new_did_hash = new_did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
358
463
|
new_did10 = new_did_hash[0,10]
|
359
464
|
new_doc = retrieve_document(new_doc_did, new_did10 + ".doc", new_doc_location, {}).first
|
360
465
|
currentDID["verification"] += "found UPDATE log record:" + "\n"
|
361
466
|
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
362
|
-
currentDID["verification"] += "⛔
|
467
|
+
currentDID["verification"] += "⛔ none of available public keys (" + pubKeys.join(", ") + ")\n"
|
363
468
|
currentDID["verification"] += "does not verify 'doc' reference of new DID Document: " + log_el["doc"].to_s + "\n"
|
364
469
|
currentDID["verification"] += log_el["sig"].to_s + "\n"
|
365
470
|
currentDID["verification"] += "next DID Document (Details: https://ownyourdata.github.io/oydid/#verify_signature)" + "\n"
|
@@ -386,6 +491,8 @@ class Oydid
|
|
386
491
|
end
|
387
492
|
when 1 # revocation log entry
|
388
493
|
# do nothing
|
494
|
+
when 5 # DELEGATE
|
495
|
+
# do nothing
|
389
496
|
else
|
390
497
|
currentDID["error"] = 2
|
391
498
|
currentDID["message"] = "FATAL ERROR: op code '" + el["op"].to_s + "' not implemented"
|
@@ -394,8 +501,6 @@ class Oydid
|
|
394
501
|
end
|
395
502
|
i += 1
|
396
503
|
end unless currentDID["log"].nil?
|
397
|
-
|
398
504
|
return currentDID
|
399
505
|
end
|
400
|
-
|
401
506
|
end
|
data/lib/oydid/vc.rb
CHANGED
@@ -52,7 +52,6 @@ class Oydid
|
|
52
52
|
return [nil, msg]
|
53
53
|
exit
|
54
54
|
end
|
55
|
-
|
56
55
|
retVal = HTTParty.get(vc_url,
|
57
56
|
headers: {'Authorization' => 'Bearer ' + access_token})
|
58
57
|
if retVal.code != 200
|
@@ -111,7 +110,7 @@ class Oydid
|
|
111
110
|
proof["type"] = "Ed25519Signature2020"
|
112
111
|
proof["verificationMethod"] = options[:issuer].to_s
|
113
112
|
proof["proofPurpose"] = "assertionMethod"
|
114
|
-
proof["proofValue"] = sign(vercred["credentialSubject"].to_json_c14n, options[:issuer_privateKey], []).first
|
113
|
+
proof["proofValue"] = sign(vercred["credentialSubject"].transform_keys(&:to_s).to_json_c14n, options[:issuer_privateKey], []).first
|
115
114
|
vercred["proof"] = proof
|
116
115
|
else
|
117
116
|
vercred["proof"] = content["proof"]
|
@@ -126,6 +125,9 @@ class Oydid
|
|
126
125
|
end
|
127
126
|
|
128
127
|
def self.create_vc_proof(content, options)
|
128
|
+
if content["id"].nil?
|
129
|
+
content["id"] = options[:holder]
|
130
|
+
end
|
129
131
|
proof = {}
|
130
132
|
proof["type"] = "Ed25519Signature2020"
|
131
133
|
proof["verificationMethod"] = options[:issuer].to_s
|