dkg 8.0.0a2__py3-none-any.whl → 8.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. dkg/__init__.py +1 -1
  2. dkg/assertion.py +2 -2
  3. dkg/clients/__init__.py +4 -0
  4. dkg/clients/async_dkg.py +109 -0
  5. dkg/{main.py → clients/dkg.py} +42 -21
  6. dkg/constants.py +117 -6
  7. dkg/data/interfaces/AskStorage.json +366 -0
  8. dkg/data/interfaces/Chronos.json +202 -0
  9. dkg/data/interfaces/Hub.json +294 -2
  10. dkg/data/interfaces/IdentityStorage.json +58 -0
  11. dkg/data/interfaces/{ContentAsset.json → KnowledgeCollection.json} +256 -343
  12. dkg/data/interfaces/KnowledgeCollectionStorage.json +2312 -0
  13. dkg/data/interfaces/Paranet.json +30 -214
  14. dkg/data/interfaces/ParanetIncentivesPoolFactory.json +18 -2
  15. dkg/data/interfaces/ParanetKnowledgeMinersRegistry.json +20 -4
  16. dkg/data/interfaces/{ParanetNeurowebIncentivesPool.json → ParanetNeuroIncentivesPool.json} +7 -7
  17. dkg/data/interfaces/ParanetsRegistry.json +102 -32
  18. dkg/data/interfaces/Token.json +146 -17
  19. dkg/managers/__init__.py +0 -0
  20. dkg/managers/async_manager.py +69 -0
  21. dkg/{manager.py → managers/manager.py} +5 -3
  22. dkg/method.py +5 -2
  23. dkg/modules/__init__.py +0 -0
  24. dkg/modules/asset/__init__.py +0 -0
  25. dkg/modules/asset/asset.py +739 -0
  26. dkg/modules/asset/async_asset.py +751 -0
  27. dkg/modules/async_module.py +66 -0
  28. dkg/modules/graph/__init__.py +0 -0
  29. dkg/modules/graph/async_graph.py +118 -0
  30. dkg/modules/graph/graph.py +94 -0
  31. dkg/{module.py → modules/module.py} +1 -1
  32. dkg/modules/network/__init__.py +0 -0
  33. dkg/{network.py → modules/network/network.py} +4 -4
  34. dkg/modules/node/__init__.py +0 -0
  35. dkg/modules/node/async_node.py +39 -0
  36. dkg/{node.py → modules/node/node.py} +2 -2
  37. dkg/modules/paranet/__init__.py +0 -0
  38. dkg/{paranet.py → modules/paranet/paranet.py} +2 -2
  39. dkg/providers/__init__.py +9 -2
  40. dkg/providers/blockchain/__init__.py +4 -0
  41. dkg/providers/blockchain/async_blockchain.py +245 -0
  42. dkg/providers/blockchain/base_blockchain.py +102 -0
  43. dkg/providers/{blockchain.py → blockchain/blockchain.py} +15 -96
  44. dkg/providers/node/__init__.py +4 -0
  45. dkg/providers/node/async_node_http.py +72 -0
  46. dkg/providers/node/base_node_http.py +25 -0
  47. dkg/providers/{node_http.py → node/node_http.py} +12 -10
  48. dkg/services/__init__.py +0 -0
  49. dkg/services/blockchain_services/__init__.py +0 -0
  50. dkg/services/blockchain_services/async_blockchain_service.py +180 -0
  51. dkg/services/blockchain_services/blockchain_service.py +174 -0
  52. dkg/services/input_service.py +183 -0
  53. dkg/services/node_services/__init__.py +0 -0
  54. dkg/services/node_services/async_node_service.py +184 -0
  55. dkg/services/node_services/node_service.py +167 -0
  56. dkg/types/__init__.py +11 -11
  57. dkg/utils/blockchain_request.py +68 -42
  58. dkg/utils/knowledge_asset_tools.py +5 -0
  59. dkg/utils/knowledge_collection_tools.py +248 -0
  60. dkg/utils/node_request.py +60 -13
  61. dkg/utils/rdf.py +9 -3
  62. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/METADATA +28 -19
  63. dkg-8.0.1.dist-info/RECORD +82 -0
  64. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/WHEEL +1 -1
  65. dkg/asset.py +0 -912
  66. dkg/data/interfaces/AssertionStorage.json +0 -229
  67. dkg/data/interfaces/ContentAssetStorage.json +0 -706
  68. dkg/data/interfaces/ServiceAgreementStorageProxy.json +0 -1314
  69. dkg/graph.py +0 -63
  70. dkg-8.0.0a2.dist-info/RECORD +0 -52
  71. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/LICENSE +0 -0
  72. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/NOTICE +0 -0
@@ -0,0 +1,739 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ import json
19
+ import hashlib
20
+ from typing import Literal
21
+ from pyld import jsonld
22
+ from web3 import Web3
23
+ from web3.constants import ADDRESS_ZERO
24
+ from web3.types import TxReceipt
25
+ from itertools import chain
26
+ from eth_abi.packed import encode_packed
27
+ from eth_account.messages import encode_defunct
28
+ from eth_account import Account
29
+ from hexbytes import HexBytes
30
+
31
+ from dkg.constants import (
32
+ PRIVATE_ASSERTION_PREDICATE,
33
+ PRIVATE_RESOURCE_PREDICATE,
34
+ PRIVATE_HASH_SUBJECT_PREFIX,
35
+ CHUNK_BYTE_SIZE,
36
+ MAX_FILE_SIZE,
37
+ DEFAULT_RDF_FORMAT,
38
+ Operations,
39
+ OutputTypes,
40
+ )
41
+ from dkg.dataclasses import (
42
+ NodeResponseDict,
43
+ )
44
+ from dkg.managers.manager import DefaultRequestManager
45
+ from dkg.method import Method
46
+ from dkg.modules.module import Module
47
+ from dkg.types import JSONLD, UAL, Address, HexStr
48
+ from dkg.utils.blockchain_request import (
49
+ BlockchainRequest,
50
+ )
51
+ from dkg.utils.node_request import (
52
+ NodeRequest,
53
+ OperationStatus,
54
+ get_operation_status_object,
55
+ )
56
+ from dkg.utils.ual import format_ual, parse_ual
57
+ import dkg.utils.knowledge_collection_tools as kc_tools
58
+ import dkg.utils.knowledge_asset_tools as ka_tools
59
+ from dkg.services.input_service import InputService
60
+ from dkg.services.node_services.node_service import NodeService
61
+ from dkg.services.blockchain_services.blockchain_service import BlockchainService
62
+
63
+
64
+ class KnowledgeAsset(Module):
65
+ def __init__(
66
+ self,
67
+ manager: DefaultRequestManager,
68
+ input_service: InputService,
69
+ node_service: NodeService,
70
+ blockchain_service: BlockchainService,
71
+ ):
72
+ self.manager = manager
73
+ self.input_service = input_service
74
+ self.node_service = node_service
75
+ self.blockchain_service = blockchain_service
76
+
77
+ def is_valid_ual(self, ual: UAL) -> bool:
78
+ if not ual or not isinstance(ual, str):
79
+ raise ValueError("UAL must be a non-empty string.")
80
+
81
+ parts = ual.split("/")
82
+ if len(parts) != 3:
83
+ raise ValueError("UAL format is incorrect.")
84
+
85
+ prefixes = parts[0].split(":")
86
+ prefixes_number = len(prefixes)
87
+ if prefixes_number != 3 and prefixes_number != 4:
88
+ raise ValueError("Prefix format in UAL is incorrect.")
89
+
90
+ if prefixes[0] != "did":
91
+ raise ValueError(
92
+ f"Invalid DID prefix. Expected: 'did'. Received: '{prefixes[0]}'."
93
+ )
94
+
95
+ if prefixes[1] != "dkg":
96
+ raise ValueError(
97
+ f"Invalid DKG prefix. Expected: 'dkg'. Received: '{prefixes[1]}'."
98
+ )
99
+
100
+ if prefixes[2] != (
101
+ blockchain_name := (
102
+ self.manager.blockchain_provider.blockchain_id.split(":")[0]
103
+ )
104
+ ):
105
+ raise ValueError(
106
+ "Invalid blockchain name in the UAL prefix. "
107
+ f"Expected: '{blockchain_name}'. Received: '${prefixes[2]}'."
108
+ )
109
+
110
+ if prefixes_number == 4:
111
+ chain_id = self.manager.blockchain_provider.blockchain_id.split(":")[1]
112
+
113
+ if int(prefixes[3]) != int(chain_id):
114
+ raise ValueError(
115
+ "Chain ID in UAL does not match the blockchain. "
116
+ f"Expected: '${chain_id}'. Received: '${prefixes[3]}'."
117
+ )
118
+
119
+ contract_address = self.manager.blockchain_provider.contracts[
120
+ "ContentAssetStorage"
121
+ ].address
122
+
123
+ if parts[1].lower() != contract_address.lower():
124
+ raise ValueError(
125
+ "Contract address in UAL does not match. "
126
+ f"Expected: '${contract_address.lower()}'. "
127
+ f"Received: '${parts[1].lower()}'."
128
+ )
129
+
130
+ try:
131
+ owner = self.blockchain_service.get_owner(int(parts[2]))
132
+
133
+ if not owner or owner == ADDRESS_ZERO:
134
+ raise ValueError("Token does not exist or has no owner.")
135
+
136
+ return True
137
+ except Exception as err:
138
+ raise ValueError(f"Error fetching asset owner: {err}")
139
+
140
+ def process_content(self, content: str) -> list:
141
+ return [line.strip() for line in content.split("\n") if line.strip() != ""]
142
+
143
+ def solidity_packed_sha256(self, types: list[str], values: list) -> str:
144
+ # Encode the values using eth_abi's encode_packed
145
+ packed_data = encode_packed(types, values)
146
+
147
+ # Calculate SHA256
148
+ sha256_hash = hashlib.sha256(packed_data).hexdigest()
149
+
150
+ return f"0x{sha256_hash}"
151
+
152
+ def insert_triple_sorted(self, triples_list: list, new_triple: str) -> int:
153
+ # Assuming triples_list is already sorted
154
+ left = 0
155
+ right = len(triples_list)
156
+
157
+ while left < right:
158
+ mid = (left + right) // 2
159
+ if triples_list[mid] < new_triple:
160
+ left = mid + 1
161
+ else:
162
+ right = mid
163
+
164
+ # Insert the new triple at the correct position
165
+ triples_list.insert(left, new_triple)
166
+ return left
167
+
168
+ def get_operation_status_dict(self, operation_result, operation_id):
169
+ # Check if data exists and has errorType
170
+ operation_data = (
171
+ {"status": operation_result.get("status"), **operation_result.get("data")}
172
+ if operation_result.get("data")
173
+ and operation_result.get("data", {}).get("errorType")
174
+ else {"status": operation_result.get("status")}
175
+ )
176
+
177
+ return {"operationId": operation_id, **operation_data}
178
+
179
+ def get_message_signer_address(self, dataset_root: str, signature: dict):
180
+ message = encode_defunct(HexBytes(dataset_root))
181
+ r, s, v = signature.get("r"), signature.get("s"), signature.get("v")
182
+ r = r[2:] if r.startswith("0x") else r
183
+ s = s[2:] if s.startswith("0x") else s
184
+
185
+ sig = "0x" + r + s + hex(v)[2:].zfill(2)
186
+
187
+ return Account.recover_message(message, signature=sig)
188
+
189
+ def create(
190
+ self,
191
+ content: dict[Literal["public", "private"], JSONLD],
192
+ options: dict = {},
193
+ ) -> dict[str, UAL | HexStr | dict[str, dict[str, str] | TxReceipt]]:
194
+ arguments = self.input_service.get_asset_create_arguments(options)
195
+
196
+ max_number_of_retries = arguments.get("max_number_of_retries")
197
+ frequency = arguments.get("frequency")
198
+ epochs_num = arguments.get("epochs_num")
199
+ hash_function_id = arguments.get("hash_function_id")
200
+ immutable = arguments.get("immutable")
201
+ token_amount = arguments.get("token_amount")
202
+ payer = arguments.get("payer")
203
+ minimum_number_of_finalization_confirmations = arguments.get(
204
+ "minimum_number_of_finalization_confirmations"
205
+ )
206
+ minimum_number_of_node_replications = arguments.get(
207
+ "minimum_number_of_node_replications"
208
+ )
209
+ blockchain_id = self.manager.blockchain_provider.blockchain_id
210
+
211
+ dataset = {}
212
+ public_content = dataset.get("public")
213
+ private_content = dataset.get("private")
214
+ if isinstance(content, str):
215
+ dataset["public"] = self.process_content(content)
216
+ elif isinstance(public_content, str) or (
217
+ not public_content and private_content and isinstance(private_content, str)
218
+ ):
219
+ if public_content:
220
+ dataset["public"] = self.process_content(public_content)
221
+ else:
222
+ dataset["public"] = []
223
+
224
+ if private_content and isinstance(private_content, str):
225
+ dataset["private"] = self.process_content(private_content)
226
+ else:
227
+ dataset = kc_tools.format_dataset(content)
228
+
229
+ public_triples_grouped = []
230
+
231
+ dataset["public"] = kc_tools.generate_missing_ids_for_blank_nodes(
232
+ dataset.get("public")
233
+ )
234
+
235
+ if dataset.get("private") and len(dataset.get("private")):
236
+ dataset["private"] = kc_tools.generate_missing_ids_for_blank_nodes(
237
+ dataset.get("private")
238
+ )
239
+
240
+ # Group private triples by subject and flatten
241
+ private_triples_grouped = kc_tools.group_nquads_by_subject(
242
+ dataset.get("private"), True
243
+ )
244
+
245
+ dataset["private"] = list(chain.from_iterable(private_triples_grouped))
246
+
247
+ # Compute private root and add to public
248
+ private_root = kc_tools.calculate_merkle_root(dataset.get("private"))
249
+ dataset["public"].append(
250
+ f'<{ka_tools.generate_named_node()}> <{PRIVATE_ASSERTION_PREDICATE}> "{private_root}" .'
251
+ )
252
+
253
+ # Compute private root and add to public
254
+ public_triples_grouped = kc_tools.group_nquads_by_subject(
255
+ dataset.get("public"), True
256
+ )
257
+
258
+ # Create a dictionary for public subject -> index for quick lookup
259
+ public_subject_dict = {}
260
+ for i in range(len(public_triples_grouped)):
261
+ public_subject = public_triples_grouped[i][0].split(" ")[0]
262
+ public_subject_dict[public_subject] = i
263
+
264
+ private_triple_subject_hashes_grouped_without_public_pair = []
265
+
266
+ # Integrate private subjects into public or store separately if no match to be appended later
267
+ for private_triples in private_triples_grouped:
268
+ private_subject = private_triples[0].split(" ")[
269
+ 0
270
+ ] # Extract the private subject
271
+
272
+ private_subject_hash = self.solidity_packed_sha256(
273
+ types=["string"],
274
+ values=[private_subject[1:-1]],
275
+ )
276
+
277
+ if (
278
+ private_subject in public_subject_dict
279
+ ): # Check if there's a public pair
280
+ # If there's a public pair, insert a representation in that group
281
+ public_index = public_subject_dict.get(private_subject)
282
+ self.insert_triple_sorted(
283
+ public_triples_grouped[public_index],
284
+ f"{private_subject} <{PRIVATE_RESOURCE_PREDICATE}> <{ka_tools.generate_named_node()}> .",
285
+ )
286
+ else:
287
+ # If no public pair, maintain separate list, inserting sorted by hash
288
+ self.insert_triple_sorted(
289
+ private_triple_subject_hashes_grouped_without_public_pair,
290
+ f"<{PRIVATE_HASH_SUBJECT_PREFIX}{private_subject_hash}> <{PRIVATE_RESOURCE_PREDICATE}> <{ka_tools.generate_named_node()}> .",
291
+ )
292
+
293
+ for triple in private_triple_subject_hashes_grouped_without_public_pair:
294
+ public_triples_grouped.append([triple])
295
+
296
+ dataset["public"] = list(chain.from_iterable(public_triples_grouped))
297
+ else:
298
+ # No private triples, just group and flatten public
299
+ public_triples_grouped = kc_tools.group_nquads_by_subject(
300
+ dataset.get("public"), True
301
+ )
302
+ dataset["public"] = list(chain.from_iterable(public_triples_grouped))
303
+
304
+ # Calculate the number of chunks
305
+ number_of_chunks = kc_tools.calculate_number_of_chunks(
306
+ dataset.get("public"), CHUNK_BYTE_SIZE
307
+ )
308
+ dataset_size = number_of_chunks * CHUNK_BYTE_SIZE
309
+
310
+ # Validate the assertion size in bytes
311
+ if dataset_size > MAX_FILE_SIZE:
312
+ raise ValueError(f"File size limit is {MAX_FILE_SIZE / (1024 * 1024)}MB.")
313
+
314
+ # Calculate the Merkle root
315
+ dataset_root = kc_tools.calculate_merkle_root(dataset.get("public"))
316
+
317
+ # Get the contract address for KnowledgeCollectionStorage
318
+ content_asset_storage_address = (
319
+ self.blockchain_service.get_asset_storage_address(
320
+ "KnowledgeCollectionStorage"
321
+ )
322
+ )
323
+
324
+ publish_operation_id = self.node_service.publish(
325
+ dataset_root,
326
+ dataset,
327
+ blockchain_id,
328
+ hash_function_id,
329
+ minimum_number_of_node_replications,
330
+ )["operationId"]
331
+ publish_operation_result = self.node_service.get_operation_result(
332
+ publish_operation_id,
333
+ Operations.PUBLISH.value,
334
+ max_number_of_retries,
335
+ frequency,
336
+ )
337
+
338
+ if publish_operation_result.get(
339
+ "status"
340
+ ) != OperationStatus.COMPLETED and not publish_operation_result.get(
341
+ "data", {}
342
+ ).get("minAcksReached"):
343
+ return {
344
+ "datasetRoot": dataset_root,
345
+ "operation": {
346
+ "publish": self.get_operation_status_dict(
347
+ publish_operation_result, publish_operation_id
348
+ )
349
+ },
350
+ }
351
+
352
+ data = publish_operation_result.get("data", {})
353
+ signatures = data.get("signatures")
354
+
355
+ publisher_node_signature = data.get("publisherNodeSignature", {})
356
+ publisher_node_identity_id = publisher_node_signature.get("identityId")
357
+ publisher_node_r = publisher_node_signature.get("r")
358
+ publisher_node_vs = publisher_node_signature.get("vs")
359
+
360
+ identity_ids, r, vs = [], [], []
361
+
362
+ for signature in signatures:
363
+ try:
364
+ signer_address = self.get_message_signer_address(
365
+ dataset_root, signature
366
+ )
367
+
368
+ key_is_operational_wallet = (
369
+ self.blockchain_service.key_is_operational_wallet(
370
+ signature.get("identityId"),
371
+ Web3.solidity_keccak(["address"], [signer_address]),
372
+ 2, # IdentityLib.OPERATIONAL_KEY
373
+ )
374
+ )
375
+
376
+ # If valid, append the signature components
377
+ if key_is_operational_wallet:
378
+ identity_ids.append(signature.get("identityId"))
379
+ r.append(signature.get("r"))
380
+ vs.append(signature.get("vs"))
381
+
382
+ except Exception:
383
+ continue
384
+
385
+ if token_amount:
386
+ estimated_publishing_cost = token_amount
387
+ else:
388
+ time_until_next_epoch = self.blockchain_service.time_until_next_epoch()
389
+ epoch_length = self.blockchain_service.epoch_length()
390
+ stake_weighted_average_ask = (
391
+ self.blockchain_service.get_stake_weighted_average_ask()
392
+ )
393
+
394
+ # Convert to integers and perform calculation
395
+ estimated_publishing_cost = (
396
+ (
397
+ int(stake_weighted_average_ask)
398
+ * (
399
+ int(epochs_num) * int(1e18)
400
+ + (int(time_until_next_epoch) * int(1e18)) // int(epoch_length)
401
+ )
402
+ * int(dataset_size)
403
+ )
404
+ // 1024
405
+ // int(1e18)
406
+ )
407
+
408
+ knowledge_collection_id = None
409
+ mint_knowledge_asset_receipt = None
410
+
411
+ knowledge_collection_result = (
412
+ self.blockchain_service.create_knowledge_collection(
413
+ {
414
+ "publishOperationId": publish_operation_id,
415
+ "merkleRoot": dataset_root,
416
+ "knowledgeAssetsAmount": kc_tools.count_distinct_subjects(
417
+ dataset.get("public")
418
+ ),
419
+ "byteSize": dataset_size,
420
+ "epochs": epochs_num,
421
+ "tokenAmount": estimated_publishing_cost,
422
+ "isImmutable": immutable,
423
+ "paymaster": payer,
424
+ "publisherNodeIdentityId": publisher_node_identity_id,
425
+ "publisherNodeR": publisher_node_r,
426
+ "publisherNodeVS": publisher_node_vs,
427
+ "identityIds": identity_ids,
428
+ "r": r,
429
+ "vs": vs,
430
+ },
431
+ None,
432
+ None,
433
+ )
434
+ )
435
+ knowledge_collection_id = knowledge_collection_result.knowledge_collection_id
436
+ mint_knowledge_asset_receipt = knowledge_collection_result.receipt
437
+
438
+ ual = format_ual(
439
+ blockchain_id, content_asset_storage_address, knowledge_collection_id
440
+ )
441
+
442
+ finality_status_result = 0
443
+ if minimum_number_of_finalization_confirmations > 0:
444
+ finality_status_result = self.node_service.finality_status(
445
+ ual,
446
+ minimum_number_of_finalization_confirmations,
447
+ max_number_of_retries,
448
+ frequency,
449
+ )
450
+
451
+ return json.loads(
452
+ Web3.to_json(
453
+ {
454
+ "UAL": ual,
455
+ "datasetRoot": dataset_root,
456
+ "signatures": publish_operation_result.get("data", {}).get(
457
+ "signatures"
458
+ ),
459
+ "operation": {
460
+ "mintKnowledgeAsset": mint_knowledge_asset_receipt,
461
+ "publish": get_operation_status_object(
462
+ publish_operation_result, publish_operation_id
463
+ ),
464
+ "finality": {
465
+ "status": (
466
+ "FINALIZED"
467
+ if finality_status_result
468
+ >= minimum_number_of_finalization_confirmations
469
+ else "NOT FINALIZED"
470
+ )
471
+ },
472
+ "numberOfConfirmations": finality_status_result,
473
+ "requiredConfirmations": minimum_number_of_finalization_confirmations,
474
+ },
475
+ }
476
+ )
477
+ )
478
+
479
+ _submit_knowledge_asset = Method(BlockchainRequest.submit_knowledge_asset)
480
+
481
+ def submit_to_paranet(
482
+ self, ual: UAL, paranet_ual: UAL
483
+ ) -> dict[str, UAL | Address | TxReceipt]:
484
+ parsed_ual = parse_ual(ual)
485
+ knowledge_asset_storage, knowledge_asset_token_id = (
486
+ parsed_ual["contract_address"],
487
+ parsed_ual["token_id"],
488
+ )
489
+
490
+ parsed_paranet_ual = parse_ual(paranet_ual)
491
+ paranet_knowledge_asset_storage, paranet_knowledge_asset_token_id = (
492
+ parsed_paranet_ual["contract_address"],
493
+ parsed_paranet_ual["token_id"],
494
+ )
495
+
496
+ receipt: TxReceipt = self._submit_knowledge_asset(
497
+ paranet_knowledge_asset_storage,
498
+ paranet_knowledge_asset_token_id,
499
+ knowledge_asset_storage,
500
+ knowledge_asset_token_id,
501
+ )
502
+
503
+ return {
504
+ "UAL": ual,
505
+ "paranetUAL": paranet_ual,
506
+ "paranetId": Web3.to_hex(
507
+ Web3.solidity_keccak(
508
+ ["address", "uint256"],
509
+ [knowledge_asset_storage, knowledge_asset_token_id],
510
+ )
511
+ ),
512
+ "operation": json.loads(Web3.to_json(receipt)),
513
+ }
514
+
515
+ _transfer = Method(BlockchainRequest.transfer_asset)
516
+
517
+ def transfer(
518
+ self,
519
+ ual: UAL,
520
+ new_owner: Address,
521
+ ) -> dict[str, UAL | Address | TxReceipt]:
522
+ token_id = parse_ual(ual)["token_id"]
523
+
524
+ receipt: TxReceipt = self._transfer(
525
+ self.manager.blockchain_provider.account,
526
+ new_owner,
527
+ token_id,
528
+ )
529
+
530
+ return {
531
+ "UAL": ual,
532
+ "owner": new_owner,
533
+ "operation": json.loads(Web3.to_json(receipt)),
534
+ }
535
+
536
+ _burn_asset = Method(BlockchainRequest.burn_asset)
537
+
538
+ def burn(self, ual: UAL) -> dict[str, UAL | TxReceipt]:
539
+ token_id = parse_ual(ual)["token_id"]
540
+
541
+ receipt: TxReceipt = self._burn_asset(token_id)
542
+
543
+ return {"UAL": ual, "operation": json.loads(Web3.to_json(receipt))}
544
+
545
+ _get_latest_assertion_id = Method(BlockchainRequest.get_latest_assertion_id)
546
+
547
+ _get = Method(NodeRequest.get)
548
+ _query = Method(NodeRequest.query)
549
+
550
+ def get(self, ual: UAL, options: dict = {}) -> dict:
551
+ arguments = self.input_service.get_asset_get_arguments(options)
552
+
553
+ max_number_of_retries = arguments.get("max_number_of_retries")
554
+ frequency = arguments.get("frequency")
555
+ state = arguments.get("state")
556
+ include_metadata = arguments.get("include_metadata")
557
+ content_type = arguments.get("content_type")
558
+ validate = arguments.get("validate")
559
+ output_format = arguments.get("output_format")
560
+ hash_function_id = arguments.get("hash_function_id")
561
+ paranet_ual = arguments.get("paranet_ual")
562
+ subject_ual = arguments.get("subject_ual")
563
+
564
+ ual_with_state = f"{ual}:{state}" if state else ual
565
+ get_public_operation_id: NodeResponseDict = self.node_service.get(
566
+ ual_with_state,
567
+ content_type,
568
+ include_metadata,
569
+ hash_function_id,
570
+ paranet_ual,
571
+ subject_ual,
572
+ )["operationId"]
573
+
574
+ get_public_operation_result = self.node_service.get_operation_result(
575
+ get_public_operation_id,
576
+ Operations.GET.value,
577
+ max_number_of_retries,
578
+ frequency,
579
+ )
580
+
581
+ if subject_ual:
582
+ if get_public_operation_result.get("data"):
583
+ return {
584
+ "operation": {
585
+ "get": get_operation_status_object(
586
+ get_public_operation_result, get_public_operation_id
587
+ ),
588
+ },
589
+ "subject_ual_pairs": get_public_operation_result.get("data"),
590
+ }
591
+ if get_public_operation_result.get("status") != "FAILED":
592
+ get_public_operation_result["data"] = {
593
+ "errorType": "DKG_CLIENT_ERROR",
594
+ "errorMessage": "Unable to find assertion on the network!",
595
+ }
596
+ get_public_operation_result["status"] = "FAILED"
597
+
598
+ return {
599
+ "operation": {
600
+ "get": get_operation_status_object(
601
+ get_public_operation_result, get_public_operation_id
602
+ ),
603
+ },
604
+ }
605
+ metadata = get_public_operation_result.get("data")
606
+ assertion = get_public_operation_result.get("data", {}).get("assertion", None)
607
+
608
+ if not assertion:
609
+ if get_public_operation_result.get("status") != "FAILED":
610
+ get_public_operation_result["data"] = {
611
+ "errorType": "DKG_CLIENT_ERROR",
612
+ "errorMessage": "Unable to find assertion on the network!",
613
+ }
614
+ get_public_operation_result["status"] = "FAILED"
615
+
616
+ return {
617
+ "operation": {
618
+ "get": get_operation_status_object(
619
+ get_public_operation_result, get_public_operation_id
620
+ ),
621
+ },
622
+ }
623
+
624
+ if validate:
625
+ is_valid = True # #TODO: Implement assertion validation logic
626
+ if not is_valid:
627
+ get_public_operation_result["data"] = {
628
+ "error_type": "DKG_CLIENT_ERROR",
629
+ "error_message": "Calculated root hashes don't match!",
630
+ }
631
+
632
+ formatted_assertion = "\n".join(
633
+ assertion.get("public", [])
634
+ + (
635
+ assertion.get("private")
636
+ if isinstance(assertion.get("private"), list)
637
+ else []
638
+ )
639
+ )
640
+
641
+ formatted_metadata = None
642
+ if output_format == OutputTypes.JSONLD.value:
643
+ formatted_assertion = self.to_jsonld(formatted_assertion)
644
+
645
+ if include_metadata:
646
+ formatted_metadata = self.to_jsonld("\n".join(metadata))
647
+
648
+ if output_format == OutputTypes.NQUADS.value:
649
+ formatted_assertion = self.to_nquads(
650
+ formatted_assertion, DEFAULT_RDF_FORMAT
651
+ )
652
+ if include_metadata:
653
+ formatted_metadata = self.to_nquads(
654
+ "\n".join(metadata), DEFAULT_RDF_FORMAT
655
+ )
656
+
657
+ result = {
658
+ "assertion": formatted_assertion,
659
+ "operation": {
660
+ "get": get_operation_status_object(
661
+ get_public_operation_result, get_public_operation_id
662
+ ),
663
+ },
664
+ }
665
+
666
+ if include_metadata and metadata:
667
+ result["metadata"] = formatted_metadata
668
+
669
+ return result
670
+
671
+ # _extend_storing_period = Method(BlockchainRequest.extend_asset_storing_period)
672
+
673
+ # def extend_storing_period(
674
+ # self,
675
+ # ual: UAL,
676
+ # additional_epochs: int,
677
+ # token_amount: Wei | None = None,
678
+ # ) -> dict[str, UAL | TxReceipt]:
679
+ # parsed_ual = parse_ual(ual)
680
+ # blockchain_id, content_asset_storage_address, token_id = (
681
+ # parsed_ual["blockchain"],
682
+ # parsed_ual["contract_address"],
683
+ # parsed_ual["token_id"],
684
+ # )
685
+
686
+ # if token_amount is None:
687
+ # latest_finalized_state = self._get_latest_assertion_id(token_id)
688
+ # latest_finalized_state_size = self._get_assertion_size(
689
+ # latest_finalized_state
690
+ # )
691
+
692
+ # token_amount = int(
693
+ # self._get_bid_suggestion(
694
+ # blockchain_id,
695
+ # additional_epochs,
696
+ # latest_finalized_state_size,
697
+ # content_asset_storage_address,
698
+ # latest_finalized_state,
699
+ # DefaultParameters.HASH_FUNCTION_ID.value,
700
+ # token_amount or BidSuggestionRange.LOW,
701
+ # )["bidSuggestion"]
702
+ # )
703
+
704
+ # receipt: TxReceipt = self._extend_storing_period(
705
+ # token_id, additional_epochs, token_amount
706
+ # )
707
+
708
+ # return {
709
+ # "UAL": ual,
710
+ # "operation": json.loads(Web3.to_json(receipt)),
711
+ # }
712
+
713
+ _get_assertion_size = Method(BlockchainRequest.get_assertion_size)
714
+
715
+ def to_jsonld(self, nquads: str):
716
+ options = {
717
+ "algorithm": "URDNA2015",
718
+ "format": "application/n-quads",
719
+ }
720
+
721
+ return jsonld.from_rdf(nquads, options)
722
+
723
+ def to_nquads(self, content, input_format):
724
+ options = {
725
+ "algorithm": "URDNA2015",
726
+ "format": "application/n-quads",
727
+ }
728
+
729
+ if input_format:
730
+ options["inputFormat"] = input_format
731
+ try:
732
+ jsonld_data = jsonld.from_rdf(content, options)
733
+ canonized = jsonld.to_rdf(jsonld_data, options)
734
+
735
+ if isinstance(canonized, str):
736
+ return [line for line in canonized.split("\n") if line.strip()]
737
+
738
+ except Exception as e:
739
+ raise ValueError(f"Error processing content: {e}")