oydid 0.5.4 → 0.5.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/VERSION +1 -1
- data/lib/oydid/basic.rb +255 -12
- data/lib/oydid/didcomm.rb +2 -2
- data/lib/oydid/log.rb +194 -42
- data/lib/oydid/vc.rb +4 -2
- data/lib/oydid.rb +499 -184
- metadata +102 -102
data/lib/oydid/log.rb
CHANGED
@@ -5,11 +5,9 @@ class Oydid
|
|
5
5
|
# log functions -----------------------------
|
6
6
|
def self.add_hash(log)
|
7
7
|
log.map do |item|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
if item.transform_keys(&:to_s)["op"] == 1
|
12
|
-
item["sub-entry-hash"] = multi_hash(canonical(i), LOG_HASH_OPTIONS).first
|
8
|
+
item["entry-hash"] = multi_hash(canonical(item.slice("ts","op","doc","sig","previous")), LOG_HASH_OPTIONS).first
|
9
|
+
if item.transform_keys(&:to_s)["op"] == 1 # REVOKE
|
10
|
+
item["sub-entry-hash"] = multi_hash(canonical(item.slice("ts","op","doc","sig")), LOG_HASH_OPTIONS).first
|
13
11
|
end
|
14
12
|
item
|
15
13
|
end
|
@@ -37,7 +35,8 @@ class Oydid
|
|
37
35
|
|
38
36
|
case log_location
|
39
37
|
when /^http/
|
40
|
-
log_location = log_location.
|
38
|
+
log_location = log_location.gsub("%3A",":")
|
39
|
+
log_location = log_location.gsub("%2F%2F","//")
|
41
40
|
retVal = HTTParty.get(log_location + "/log/" + did_hash)
|
42
41
|
if retVal.code != 200
|
43
42
|
msg = retVal.parsed_response("error").to_s rescue
|
@@ -61,29 +60,55 @@ class Oydid
|
|
61
60
|
end
|
62
61
|
end
|
63
62
|
|
63
|
+
def self.retrieve_log_item(log_hash, log_location, options)
|
64
|
+
if log_location.to_s == ""
|
65
|
+
log_location = DEFAULT_LOCATION
|
66
|
+
end
|
67
|
+
if !log_location.start_with?("http")
|
68
|
+
log_location = "https://" + log_location
|
69
|
+
end
|
70
|
+
|
71
|
+
case log_location
|
72
|
+
when /^http/
|
73
|
+
log_location = log_location.gsub("%3A",":")
|
74
|
+
log_location = log_location.gsub("%2F%2F","//")
|
75
|
+
retVal = HTTParty.get(log_location + "/log/" + log_hash + "/item")
|
76
|
+
if retVal.code != 200
|
77
|
+
msg = retVal.parsed_response("error").to_s rescue
|
78
|
+
"invalid response from " + log_location.to_s + "/log/" + log_hash.to_s + "/item"
|
79
|
+
return [nil, msg]
|
80
|
+
end
|
81
|
+
if options.transform_keys(&:to_s)["trace"]
|
82
|
+
if options[:silent].nil? || !options[:silent]
|
83
|
+
puts "GET log entry for " + log_hash + " from " + log_location
|
84
|
+
end
|
85
|
+
end
|
86
|
+
retVal = JSON.parse(retVal.to_s) rescue nil
|
87
|
+
return [retVal, ""]
|
88
|
+
else
|
89
|
+
return [nil, "cannot read from " + log_location]
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
64
93
|
def self.dag_did(logs, options)
|
65
94
|
dag = DAG.new
|
66
95
|
dag_log = []
|
67
96
|
log_hash = []
|
68
|
-
|
97
|
+
|
69
98
|
# calculate hash values for each entry and build vertices
|
70
99
|
i = 0
|
71
100
|
create_entries = 0
|
72
101
|
create_index = nil
|
73
102
|
terminate_indices = []
|
74
103
|
logs.each do |el|
|
75
|
-
|
104
|
+
case el["op"].to_i
|
105
|
+
when 0 # TERMINATE
|
106
|
+
terminate_indices << i
|
107
|
+
when 2 # CREATE
|
76
108
|
create_entries += 1
|
77
109
|
create_index = i
|
78
110
|
end
|
79
|
-
|
80
|
-
terminate_indices << i
|
81
|
-
end
|
82
|
-
log_options = options.dup
|
83
|
-
el_hash = el["doc"].split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
84
|
-
log_options[:digest] = Oydid.get_digest(el_hash).first
|
85
|
-
log_options[:encode] = Oydid.get_encoding(el_hash).first
|
86
|
-
log_hash << Oydid.multi_hash(Oydid.canonical(el), LOG_HASH_OPTIONS).first
|
111
|
+
log_hash << Oydid.multi_hash(Oydid.canonical(el.slice("ts","op","doc","sig","previous")), LOG_HASH_OPTIONS).first
|
87
112
|
dag_log << dag.add_vertex(id: i)
|
88
113
|
i += 1
|
89
114
|
end unless logs.nil?
|
@@ -94,7 +119,7 @@ class Oydid
|
|
94
119
|
return [nil, nil, nil, "missing TERMINATE entries" ]
|
95
120
|
end
|
96
121
|
|
97
|
-
# create edges between vertices
|
122
|
+
# create provisional edges between vertices
|
98
123
|
i = 0
|
99
124
|
logs.each do |el|
|
100
125
|
el["previous"].each do |p|
|
@@ -112,21 +137,71 @@ class Oydid
|
|
112
137
|
terminate_overall = 0
|
113
138
|
terminate_index = nil
|
114
139
|
logs.each do |el|
|
115
|
-
if el["op"].to_i == 0
|
140
|
+
if el["op"].to_i == 0 # TERMINATE
|
116
141
|
if dag.vertices[i].successors.length == 0
|
117
142
|
terminate_entries += 1
|
118
143
|
terminate_index = i
|
119
144
|
end
|
120
145
|
terminate_overall += 1
|
146
|
+
elsif el["op"].to_i == 1 # REVOKE
|
147
|
+
# get terminate_index for revoked DIDs
|
148
|
+
if dag.vertices[i].successors.length == 0
|
149
|
+
dag.vertices[i].predecessors.each do |l|
|
150
|
+
if logs[l[:id]]["op"].to_i == 0 # TERMINATE
|
151
|
+
terminate_index = l[:id]
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
121
155
|
end
|
122
156
|
i += 1
|
123
157
|
end unless logs.nil?
|
124
158
|
|
125
|
-
if terminate_entries != 1 && !options[:log_complete]
|
159
|
+
if terminate_entries != 1 && !options[:log_complete] && !options[:followAlsoKnownAs]
|
126
160
|
if options[:silent].nil? || !options[:silent]
|
127
161
|
return [nil, nil, nil, "cannot resolve DID" ]
|
128
162
|
end
|
129
163
|
end
|
164
|
+
|
165
|
+
# create actual edges between vertices (but only use last terminate index for delegates)
|
166
|
+
dag = DAG.new
|
167
|
+
dag_log = []
|
168
|
+
log_hash = []
|
169
|
+
|
170
|
+
# calculate hash values for each entry and build vertices
|
171
|
+
i = 0
|
172
|
+
create_entries = 0
|
173
|
+
create_index = nil
|
174
|
+
terminate_indices = []
|
175
|
+
logs.each do |el|
|
176
|
+
case el["op"].to_i
|
177
|
+
when 0 # TERMINATE
|
178
|
+
terminate_indices << i
|
179
|
+
when 2 # CREATE
|
180
|
+
create_entries += 1
|
181
|
+
create_index = i
|
182
|
+
end
|
183
|
+
log_hash << Oydid.multi_hash(Oydid.canonical(el.slice("ts","op","doc","sig","previous")), LOG_HASH_OPTIONS).first
|
184
|
+
dag_log << dag.add_vertex(id: i)
|
185
|
+
i += 1
|
186
|
+
end unless logs.nil?
|
187
|
+
i = 0
|
188
|
+
logs.each do |el|
|
189
|
+
el["previous"].each do |p|
|
190
|
+
position = log_hash.find_index(p)
|
191
|
+
if !position.nil?
|
192
|
+
if logs[position]["op"].to_i == 5 # DELEGATE
|
193
|
+
if i == terminate_index
|
194
|
+
# only delegates in the last terminate index are relevant
|
195
|
+
dag.add_edge from: dag_log[position], to: dag_log[i]
|
196
|
+
end
|
197
|
+
else
|
198
|
+
dag.add_edge from: dag_log[position], to: dag_log[i]
|
199
|
+
end
|
200
|
+
end
|
201
|
+
end unless el["previous"] == []
|
202
|
+
i += 1
|
203
|
+
end unless logs.nil?
|
204
|
+
|
130
205
|
return [dag, create_index, terminate_index, ""]
|
131
206
|
end
|
132
207
|
|
@@ -151,12 +226,32 @@ class Oydid
|
|
151
226
|
end unless s.predecessors.length < 2
|
152
227
|
dag2array(dag, log_array, s[:id], result, options)
|
153
228
|
end unless dag.vertices[index].successors.count == 0
|
154
|
-
result
|
229
|
+
result.uniq
|
230
|
+
end
|
231
|
+
|
232
|
+
def self.dag2array_terminate(dag, log_array, index, result, options)
|
233
|
+
if options.transform_keys(&:to_s)["trace"]
|
234
|
+
if options[:silent].nil? || !options[:silent]
|
235
|
+
puts " vertex " + index.to_s + " at " + log_array[index]["ts"].to_s + " op: " + log_array[index]["op"].to_s + " doc: " + log_array[index]["doc"].to_s
|
236
|
+
end
|
237
|
+
end
|
238
|
+
dag.vertices[index].predecessors.each do |p|
|
239
|
+
if p[:id] != index
|
240
|
+
if options.transform_keys(&:to_s)["trace"]
|
241
|
+
if options[:silent].nil? || !options[:silent]
|
242
|
+
puts " vertex " + p[:id].to_s + " at " + log_array[p[:id]]["ts"].to_s + " op: " + log_array[p[:id]]["op"].to_s + " doc: " + log_array[p[:id]]["doc"].to_s
|
243
|
+
end
|
244
|
+
end
|
245
|
+
result << log_array[p[:id]]
|
246
|
+
end
|
247
|
+
end unless dag.vertices[index].nil?
|
248
|
+
result << log_array[index]
|
249
|
+
result.uniq
|
155
250
|
end
|
156
251
|
|
157
252
|
def self.dag_update(currentDID, options)
|
158
253
|
i = 0
|
159
|
-
doc_location =
|
254
|
+
doc_location = options[:doc_location].to_s
|
160
255
|
initial_did = currentDID["did"].to_s.dup
|
161
256
|
initial_did = initial_did.delete_prefix("did:oyd:")
|
162
257
|
if initial_did.include?(LOCATION_PREFIX)
|
@@ -164,6 +259,13 @@ class Oydid
|
|
164
259
|
initial_did = tmp[0]
|
165
260
|
doc_location = tmp[1]
|
166
261
|
end
|
262
|
+
if initial_did.include?(CGI.escape LOCATION_PREFIX)
|
263
|
+
tmp = initial_did.split(CGI.escape LOCATION_PREFIX)
|
264
|
+
initial_did = tmp[0]
|
265
|
+
doc_location = tmp[1]
|
266
|
+
end
|
267
|
+
doc_location = doc_location.gsub("%3A",":")
|
268
|
+
doc_location = doc_location.gsub("%2F%2F","//")
|
167
269
|
current_public_doc_key = ""
|
168
270
|
verification_output = false
|
169
271
|
currentDID["log"].each do |el|
|
@@ -172,8 +274,9 @@ class Oydid
|
|
172
274
|
currentDID["doc_log_id"] = i
|
173
275
|
doc_did = el["doc"]
|
174
276
|
did_hash = doc_did.delete_prefix("did:oyd:")
|
175
|
-
did_hash = did_hash.split(LOCATION_PREFIX).first
|
277
|
+
did_hash = did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
176
278
|
did10 = did_hash[0,10]
|
279
|
+
|
177
280
|
doc = retrieve_document_raw(doc_did, did10 + ".doc", doc_location, {})
|
178
281
|
if doc.first.nil?
|
179
282
|
currentDID["error"] = 2
|
@@ -225,7 +328,7 @@ class Oydid
|
|
225
328
|
|
226
329
|
doc_did = currentDID["did"]
|
227
330
|
did_hash = doc_did.delete_prefix("did:oyd:")
|
228
|
-
did_hash = did_hash.split(LOCATION_PREFIX).first
|
331
|
+
did_hash = did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
229
332
|
did10 = did_hash[0,10]
|
230
333
|
doc = retrieve_document_raw(doc_did, did10 + ".doc", doc_location, {})
|
231
334
|
# since it retrieves a DID that previously existed, this test is not necessary
|
@@ -236,16 +339,16 @@ class Oydid
|
|
236
339
|
# end
|
237
340
|
doc = doc.first["doc"]
|
238
341
|
term = doc["log"]
|
239
|
-
log_location = term.split(LOCATION_PREFIX)
|
240
|
-
if log_location.to_s == ""
|
342
|
+
log_location = term.split(LOCATION_PREFIX).last.split(CGI.escape LOCATION_PREFIX).last rescue ""
|
343
|
+
if log_location.to_s == "" || log_location == term
|
241
344
|
log_location = DEFAULT_LOCATION
|
242
345
|
end
|
243
|
-
term = term.split(LOCATION_PREFIX).first
|
346
|
+
term = term.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
244
347
|
log_options = options.dup
|
245
348
|
el_hash = el["doc"].split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
246
|
-
log_options[:digest] =
|
247
|
-
log_options[:encode] =
|
248
|
-
if multi_hash(canonical(el), log_options).first != term
|
349
|
+
log_options[:digest] = get_digest(el_hash).first
|
350
|
+
log_options[:encode] = get_encoding(el_hash).first
|
351
|
+
if multi_hash(canonical(el.slice("ts","op","doc","sig","previous")), log_options).first != term
|
249
352
|
currentDID["error"] = 1
|
250
353
|
currentDID["message"] = "Log reference and record don't match"
|
251
354
|
if verification_output
|
@@ -266,7 +369,7 @@ class Oydid
|
|
266
369
|
# check if there is a revocation entry
|
267
370
|
revocation_record = {}
|
268
371
|
revoc_term = el["doc"]
|
269
|
-
revoc_term = revoc_term.split(LOCATION_PREFIX).first
|
372
|
+
revoc_term = revoc_term.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
270
373
|
revoc_term_found = false
|
271
374
|
log_array, msg = retrieve_log(did_hash, did10 + ".log", log_location, options)
|
272
375
|
log_array.each do |log_el|
|
@@ -274,7 +377,7 @@ class Oydid
|
|
274
377
|
if log_el["op"].to_i == 1 # TERMINATE
|
275
378
|
log_el_structure.delete("previous")
|
276
379
|
end
|
277
|
-
if multi_hash(canonical(log_el_structure), log_options).first == revoc_term
|
380
|
+
if multi_hash(canonical(log_el_structure.slice("ts","op","doc","sig","previous")), log_options).first == revoc_term
|
278
381
|
revoc_term_found = true
|
279
382
|
revocation_record = log_el.dup
|
280
383
|
if verification_output
|
@@ -291,7 +394,7 @@ class Oydid
|
|
291
394
|
# if !options.transform_keys(&:to_s)["log_location"].nil?
|
292
395
|
# log_array, msg = retrieve_log(revoc_term, did10 + ".log", options.transform_keys(&:to_s)["log_location"], options)
|
293
396
|
# log_array.each do |log_el|
|
294
|
-
# if log_el["op"] == 1 #
|
397
|
+
# if log_el["op"] == 1 # REVOKE
|
295
398
|
# log_el_structure = log_el.delete("previous")
|
296
399
|
# else
|
297
400
|
# log_el_structure = log_el
|
@@ -317,15 +420,26 @@ class Oydid
|
|
317
420
|
if log_el["previous"].include?(multi_hash(canonical(revocation_record), LOG_HASH_OPTIONS).first)
|
318
421
|
update_term_found = true
|
319
422
|
message = log_el["doc"].to_s
|
320
|
-
|
321
423
|
signature = log_el["sig"]
|
322
|
-
public_key = current_public_doc_key.to_s
|
323
|
-
|
424
|
+
# public_key = current_public_doc_key.to_s
|
425
|
+
extend_currentDID = currentDID.dup
|
426
|
+
extend_currentDID["log"] = extend_currentDID["full_log"]
|
427
|
+
# !!!TODO: check for delegates only at certain point in time
|
428
|
+
pubKeys, msg = Oydid.getDelegatedPubKeysFromFullDidDocument(extend_currentDID, "doc")
|
429
|
+
signature_verification = false
|
430
|
+
used_pubkey = ""
|
431
|
+
pubKeys.each do |key|
|
432
|
+
if Oydid.verify(message, signature, key).first
|
433
|
+
signature_verification = true
|
434
|
+
used_pubkey = key
|
435
|
+
break
|
436
|
+
end
|
437
|
+
end
|
324
438
|
if signature_verification
|
325
439
|
if verification_output
|
326
440
|
currentDID["verification"] += "found UPDATE log record:" + "\n"
|
327
441
|
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
328
|
-
currentDID["verification"] += "✅ public key
|
442
|
+
currentDID["verification"] += "✅ public key: " + used_pubkey.to_s + "\n"
|
329
443
|
currentDID["verification"] += "verifies 'doc' reference of new DID Document: " + log_el["doc"].to_s + "\n"
|
330
444
|
currentDID["verification"] += log_el["sig"].to_s + "\n"
|
331
445
|
currentDID["verification"] += "of next DID Document (Details: https://ownyourdata.github.io/oydid/#verify_signature)" + "\n"
|
@@ -333,7 +447,7 @@ class Oydid
|
|
333
447
|
next_doc_did = log_el["doc"].to_s
|
334
448
|
next_doc_location = doc_location
|
335
449
|
next_did_hash = next_doc_did.delete_prefix("did:oyd:")
|
336
|
-
next_did_hash = next_did_hash.split(LOCATION_PREFIX).first
|
450
|
+
next_did_hash = next_did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
337
451
|
next_did10 = next_did_hash[0,10]
|
338
452
|
next_doc = retrieve_document_raw(next_doc_did, next_did10 + ".doc", next_doc_location, {})
|
339
453
|
if next_doc.first.nil?
|
@@ -342,7 +456,7 @@ class Oydid
|
|
342
456
|
return currentDID
|
343
457
|
end
|
344
458
|
next_doc = next_doc.first["doc"]
|
345
|
-
if
|
459
|
+
if pubKeys.include?(next_doc["key"].split(":").first)
|
346
460
|
currentDID["verification"] += "⚠️ no key rotation in updated DID Document" + "\n"
|
347
461
|
end
|
348
462
|
currentDID["verification"] += "\n"
|
@@ -354,12 +468,12 @@ class Oydid
|
|
354
468
|
new_doc_did = log_el["doc"].to_s
|
355
469
|
new_doc_location = doc_location
|
356
470
|
new_did_hash = new_doc_did.delete_prefix("did:oyd:")
|
357
|
-
new_did_hash = new_did_hash.split(LOCATION_PREFIX).first
|
471
|
+
new_did_hash = new_did_hash.split(LOCATION_PREFIX).first.split(CGI.escape LOCATION_PREFIX).first
|
358
472
|
new_did10 = new_did_hash[0,10]
|
359
473
|
new_doc = retrieve_document(new_doc_did, new_did10 + ".doc", new_doc_location, {}).first
|
360
474
|
currentDID["verification"] += "found UPDATE log record:" + "\n"
|
361
475
|
currentDID["verification"] += JSON.pretty_generate(log_el) + "\n"
|
362
|
-
currentDID["verification"] += "⛔
|
476
|
+
currentDID["verification"] += "⛔ none of available public keys (" + pubKeys.join(", ") + ")\n"
|
363
477
|
currentDID["verification"] += "does not verify 'doc' reference of new DID Document: " + log_el["doc"].to_s + "\n"
|
364
478
|
currentDID["verification"] += log_el["sig"].to_s + "\n"
|
365
479
|
currentDID["verification"] += "next DID Document (Details: https://ownyourdata.github.io/oydid/#verify_signature)" + "\n"
|
@@ -385,6 +499,46 @@ class Oydid
|
|
385
499
|
break
|
386
500
|
end
|
387
501
|
when 1 # revocation log entry
|
502
|
+
# handle DID Rotation
|
503
|
+
if (i == (currentDID["log"].length-1))
|
504
|
+
if options[:followAlsoKnownAs]
|
505
|
+
current_doc = currentDID["doc"]
|
506
|
+
if current_doc["doc"].transform_keys(&:to_s).has_key?("alsoKnownAs")
|
507
|
+
rotate_DID = current_doc["doc"].transform_keys(&:to_s)["alsoKnownAs"]
|
508
|
+
if rotate_DID.start_with?("did:")
|
509
|
+
rotate_DID_method = rotate_DID.split(":").take(2).join(":")
|
510
|
+
did_orig = currentDID["did"]
|
511
|
+
if !did_orig.start_with?("did:oyd")
|
512
|
+
did_orig = "did:oyd:" + did_orig
|
513
|
+
end
|
514
|
+
case rotate_DID_method
|
515
|
+
when "did:ebsi"
|
516
|
+
public_resolver = DEFAULT_PUBLIC_RESOLVER
|
517
|
+
rotate_DID_Document = HTTParty.get(public_resolver + rotate_DID)
|
518
|
+
rotate_ddoc = JSON.parse(rotate_DID_Document.parsed_response)
|
519
|
+
rotate_ddoc = rotate_ddoc.except("didDocumentMetadata", "didResolutionMetadata")
|
520
|
+
|
521
|
+
# checks
|
522
|
+
# 1) is original DID revoked -> fulfilled, otherwise we would not be in this branch
|
523
|
+
# 2) das new DID reference back original DID
|
524
|
+
currentDID["did"] = rotate_DID
|
525
|
+
currentDID["doc"]["doc"] = rotate_ddoc
|
526
|
+
if verification_output
|
527
|
+
currentDID["verification"] += "DID rotation to: " + rotate_DID.to_s + "\n"
|
528
|
+
currentDID["verification"] += "✅ original DID (" + did_orig + ") revoked and referenced in alsoKnownAs\n"
|
529
|
+
currentDID["verification"] += "(Details: https://ownyourdata.github.io/oydid/#did_rotation)" + "\n\n"
|
530
|
+
end
|
531
|
+
when "did:oyd"
|
532
|
+
puts "try to resolve did:oyd with our own resolver"
|
533
|
+
puts "add verification text"
|
534
|
+
else
|
535
|
+
# do nothing: DID Rotation is not supported for this DID method yet
|
536
|
+
end
|
537
|
+
end
|
538
|
+
end
|
539
|
+
end
|
540
|
+
end
|
541
|
+
when 5 # DELEGATE
|
388
542
|
# do nothing
|
389
543
|
else
|
390
544
|
currentDID["error"] = 2
|
@@ -394,8 +548,6 @@ class Oydid
|
|
394
548
|
end
|
395
549
|
i += 1
|
396
550
|
end unless currentDID["log"].nil?
|
397
|
-
|
398
551
|
return currentDID
|
399
552
|
end
|
400
|
-
|
401
553
|
end
|
data/lib/oydid/vc.rb
CHANGED
@@ -52,7 +52,6 @@ class Oydid
|
|
52
52
|
return [nil, msg]
|
53
53
|
exit
|
54
54
|
end
|
55
|
-
|
56
55
|
retVal = HTTParty.get(vc_url,
|
57
56
|
headers: {'Authorization' => 'Bearer ' + access_token})
|
58
57
|
if retVal.code != 200
|
@@ -111,7 +110,7 @@ class Oydid
|
|
111
110
|
proof["type"] = "Ed25519Signature2020"
|
112
111
|
proof["verificationMethod"] = options[:issuer].to_s
|
113
112
|
proof["proofPurpose"] = "assertionMethod"
|
114
|
-
proof["proofValue"] = sign(vercred["credentialSubject"].to_json_c14n, options[:issuer_privateKey], []).first
|
113
|
+
proof["proofValue"] = sign(vercred["credentialSubject"].transform_keys(&:to_s).to_json_c14n, options[:issuer_privateKey], []).first
|
115
114
|
vercred["proof"] = proof
|
116
115
|
else
|
117
116
|
vercred["proof"] = content["proof"]
|
@@ -126,6 +125,9 @@ class Oydid
|
|
126
125
|
end
|
127
126
|
|
128
127
|
def self.create_vc_proof(content, options)
|
128
|
+
if content["id"].nil?
|
129
|
+
content["id"] = options[:holder]
|
130
|
+
end
|
129
131
|
proof = {}
|
130
132
|
proof["type"] = "Ed25519Signature2020"
|
131
133
|
proof["verificationMethod"] = options[:issuer].to_s
|