dkg 1.1.1__py3-none-any.whl → 8.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. dkg/asset.py +589 -683
  2. dkg/constants.py +106 -45
  3. dkg/data/interfaces/AskStorage.json +366 -0
  4. dkg/data/interfaces/Chronos.json +202 -0
  5. dkg/data/interfaces/Hub.json +294 -2
  6. dkg/data/interfaces/IdentityStorage.json +400 -0
  7. dkg/data/interfaces/KnowledgeCollection.json +610 -0
  8. dkg/data/interfaces/KnowledgeCollectionStorage.json +2312 -0
  9. dkg/data/interfaces/Paranet.json +656 -135
  10. dkg/data/interfaces/ParanetIncentivesPoolFactory.json +18 -2
  11. dkg/data/interfaces/ParanetKnowledgeMinersRegistry.json +20 -4
  12. dkg/data/interfaces/{ParanetNeurowebIncentivesPool.json → ParanetNeuroIncentivesPool.json} +7 -7
  13. dkg/data/interfaces/ParanetsRegistry.json +844 -36
  14. dkg/data/interfaces/Token.json +146 -17
  15. dkg/dataclasses.py +11 -1
  16. dkg/exceptions.py +1 -0
  17. dkg/graph.py +55 -24
  18. dkg/main.py +18 -2
  19. dkg/network.py +2 -2
  20. dkg/node.py +7 -0
  21. dkg/paranet.py +336 -33
  22. dkg/providers/blockchain.py +23 -24
  23. dkg/providers/node_http.py +11 -9
  24. dkg/services/input_service.py +183 -0
  25. dkg/services/node_service.py +164 -0
  26. dkg/types/__init__.py +15 -3
  27. dkg/types/general.py +15 -8
  28. dkg/utils/blockchain_request.py +189 -64
  29. dkg/utils/knowledge_asset_tools.py +5 -0
  30. dkg/utils/knowledge_collection_tools.py +236 -0
  31. dkg/utils/merkle.py +1 -1
  32. dkg/utils/node_request.py +33 -33
  33. dkg/utils/rdf.py +10 -6
  34. dkg/utils/string_transformations.py +1 -0
  35. {dkg-1.1.1.dist-info → dkg-8.0.0.dist-info}/METADATA +36 -26
  36. dkg-8.0.0.dist-info/RECORD +56 -0
  37. {dkg-1.1.1.dist-info → dkg-8.0.0.dist-info}/WHEEL +1 -1
  38. dkg/data/interfaces/AssertionStorage.json +0 -229
  39. dkg/data/interfaces/ContentAsset.json +0 -801
  40. dkg/data/interfaces/ContentAssetStorage.json +0 -782
  41. dkg/data/interfaces/ServiceAgreementStorageProxy.json +0 -1314
  42. dkg/data/interfaces/UnfinalizedStateStorage.json +0 -171
  43. dkg-1.1.1.dist-info/RECORD +0 -52
  44. {dkg-1.1.1.dist-info → dkg-8.0.0.dist-info}/LICENSE +0 -0
  45. {dkg-1.1.1.dist-info → dkg-8.0.0.dist-info}/NOTICE +0 -0
dkg/asset.py CHANGED
@@ -16,62 +16,57 @@
16
16
  # under the License.
17
17
 
18
18
  import json
19
- import math
20
- import re
21
- from typing import Literal, Type
22
-
19
+ import hashlib
20
+ from typing import Literal, Dict, Optional, Any
23
21
  from pyld import jsonld
24
22
  from web3 import Web3
25
- from web3.constants import ADDRESS_ZERO, HASH_ZERO
26
- from web3.exceptions import ContractLogicError
23
+ from web3.constants import ADDRESS_ZERO
27
24
  from web3.types import TxReceipt
25
+ from itertools import chain
26
+ from eth_abi.packed import encode_packed
27
+ from eth_account.messages import encode_defunct
28
+ from eth_account import Account
29
+ from hexbytes import HexBytes
28
30
 
29
31
  from dkg.constants import (
30
- DEFAULT_HASH_FUNCTION_ID,
31
- DEFAULT_PROXIMITY_SCORE_FUNCTIONS_PAIR_IDS,
32
32
  PRIVATE_ASSERTION_PREDICATE,
33
- PRIVATE_CURRENT_REPOSITORY,
34
- PRIVATE_HISTORICAL_REPOSITORY,
33
+ PRIVATE_RESOURCE_PREDICATE,
34
+ PRIVATE_HASH_SUBJECT_PREFIX,
35
+ CHUNK_BYTE_SIZE,
36
+ MAX_FILE_SIZE,
37
+ DEFAULT_RDF_FORMAT,
38
+ Operations,
39
+ OutputTypes,
40
+ DefaultParameters,
41
+ ZERO_ADDRESS,
35
42
  )
36
43
  from dkg.dataclasses import (
37
44
  BidSuggestionRange,
38
- KnowledgeAssetContentVisibility,
39
- KnowledgeAssetEnumStates,
40
45
  NodeResponseDict,
41
46
  )
42
- from dkg.exceptions import (
43
- DatasetOutputFormatNotSupported,
44
- InvalidKnowledgeAsset,
45
- InvalidStateOption,
46
- InvalidTokenAmount,
47
- MissingKnowledgeAssetState,
48
- OperationNotFinished,
49
- )
50
47
  from dkg.manager import DefaultRequestManager
51
48
  from dkg.method import Method
52
49
  from dkg.module import Module
53
- from dkg.types import JSONLD, UAL, Address, AgreementData, HexStr, Wei
54
- from dkg.utils.blockchain_request import BlockchainRequest
55
- from dkg.utils.decorators import retry
56
- from dkg.utils.merkle import MerkleTree, hash_assertion_with_indexes
57
- from dkg.utils.metadata import (
58
- generate_agreement_id,
59
- generate_assertion_metadata,
60
- generate_keyword,
50
+ from dkg.types import JSONLD, UAL, Address, HexStr, Wei
51
+ from dkg.utils.blockchain_request import (
52
+ BlockchainRequest,
53
+ KnowledgeCollectionResult,
54
+ AllowanceResult,
61
55
  )
62
56
  from dkg.utils.node_request import (
63
57
  NodeRequest,
64
58
  OperationStatus,
65
- StoreTypes,
66
- validate_operation_status,
67
59
  )
68
- from dkg.utils.rdf import format_content, normalize_dataset
69
60
  from dkg.utils.ual import format_ual, parse_ual
61
+ import dkg.utils.knowledge_collection_tools as kc_tools
62
+ import dkg.utils.knowledge_asset_tools as ka_tools
70
63
 
71
64
 
72
65
  class KnowledgeAsset(Module):
73
- def __init__(self, manager: DefaultRequestManager):
66
+ def __init__(self, manager: DefaultRequestManager, input_service, node_service):
74
67
  self.manager = manager
68
+ self.input_service = input_service
69
+ self.node_service = node_service
75
70
 
76
71
  _owner = Method(BlockchainRequest.owner_of)
77
72
 
@@ -140,226 +135,490 @@ class KnowledgeAsset(Module):
140
135
 
141
136
  _get_contract_address = Method(BlockchainRequest.get_contract_address)
142
137
  _get_current_allowance = Method(BlockchainRequest.allowance)
138
+ _increase_allowance = Method(BlockchainRequest.increase_allowance)
139
+ _decrease_allowance = Method(BlockchainRequest.decrease_allowance)
140
+ _chain_id = Method(BlockchainRequest.chain_id)
141
+ _get_asset_storage_address = Method(BlockchainRequest.get_asset_storage_address)
142
+ _mint_paranet_knowledge_asset = Method(BlockchainRequest.mint_knowledge_asset)
143
+ _key_is_operational_wallet = Method(BlockchainRequest.key_is_operational_wallet)
144
+ _time_until_next_epoch = Method(BlockchainRequest.time_until_next_epoch)
145
+ _epoch_length = Method(BlockchainRequest.epoch_length)
146
+ _get_stake_weighted_average_ask = Method(
147
+ BlockchainRequest.get_stake_weighted_average_ask
148
+ )
149
+ _get_bid_suggestion = Method(NodeRequest.bid_suggestion)
150
+ _create_knowledge_collection = Method(BlockchainRequest.create_knowledge_collection)
151
+ _mint_knowledge_asset = Method(BlockchainRequest.mint_knowledge_asset)
152
+
153
+ def get_operation_status_object(
154
+ self, operation_result: Dict[str, Any], operation_id: str
155
+ ) -> Dict[str, Any]:
156
+ """
157
+ Creates an operation status object from operation result and ID.
158
+
159
+ Args:
160
+ operation_result: Dictionary containing operation result data
161
+ operation_id: The ID of the operation
162
+
163
+ Returns:
164
+ Dictionary containing operation status information
165
+ """
166
+ # Check if error_type exists in operation_result.data
167
+ operation_data = (
168
+ {"status": operation_result.get("status"), **operation_result.get("data")}
169
+ if operation_result.get("data", {}).get("errorType")
170
+ else {"status": operation_result.get("status")}
171
+ )
172
+
173
+ return {"operationId": operation_id, **operation_data}
143
174
 
144
- def get_current_allowance(self, spender: Address | None = None) -> Wei:
145
- if spender is None:
146
- spender = self._get_contract_address("ServiceAgreementV1")
175
+ def decrease_knowledge_collection_allowance(
176
+ self,
177
+ allowance_gap: int,
178
+ ):
179
+ knowledge_collection_address = self._get_contract_address("KnowledgeCollection")
180
+ self._decrease_allowance(knowledge_collection_address, allowance_gap)
181
+
182
+ def increase_knowledge_collection_allowance(
183
+ self,
184
+ sender: str,
185
+ token_amount: str,
186
+ ) -> AllowanceResult:
187
+ """
188
+ Increases the allowance for knowledge collection if necessary.
189
+
190
+ Args:
191
+ sender: The address of the sender
192
+ token_amount: The amount of tokens to check/increase allowance for
193
+
194
+ Returns:
195
+ AllowanceResult containing whether allowance was increased and the gap
196
+ """
197
+ knowledge_collection_address = self._get_contract_address("KnowledgeCollection")
147
198
 
148
- return int(
149
- self._get_current_allowance(
150
- self.manager.blockchain_provider.account.address, spender
199
+ allowance = self._get_current_allowance(sender, knowledge_collection_address)
200
+ allowance_gap = int(token_amount) - int(allowance)
201
+
202
+ if allowance_gap > 0:
203
+ self._increase_allowance(knowledge_collection_address, allowance_gap)
204
+
205
+ return AllowanceResult(
206
+ allowance_increased=True, allowance_gap=allowance_gap
151
207
  )
152
- )
153
208
 
154
- _increase_allowance = Method(BlockchainRequest.increase_allowance)
155
- _decrease_allowance = Method(BlockchainRequest.decrease_allowance)
209
+ return AllowanceResult(allowance_increased=False, allowance_gap=allowance_gap)
156
210
 
157
- def set_allowance(self, token_amount: Wei, spender: Address | None = None) -> Wei:
158
- if spender is None:
159
- spender = self._get_contract_address("ServiceAgreementV1")
211
+ def create_knowledge_collection(
212
+ self,
213
+ request: dict,
214
+ paranet_ka_contract: Optional[Address] = None,
215
+ paranet_token_id: Optional[int] = None,
216
+ ) -> KnowledgeCollectionResult:
217
+ """
218
+ Creates a knowledge collection on the blockchain.
219
+
220
+ Args:
221
+ request: dict containing all collection parameters
222
+ paranet_ka_contract: Optional paranet contract address
223
+ paranet_token_id: Optional paranet token ID
224
+ blockchain: Blockchain configuration
225
+
226
+ Returns:
227
+ KnowledgeCollectionResult containing collection ID and transaction receipt
228
+
229
+ Raises:
230
+ BlockchainError: If the collection creation fails
231
+ """
232
+ sender = self.manager.blockchain_provider.account.address
233
+ allowance_increased = False
234
+ allowance_gap = 0
160
235
 
161
- current_allowance = self.get_current_allowance(spender)
236
+ try:
237
+ # Handle allowance
238
+ if request.get("paymaster") and request.get("paymaster") != ZERO_ADDRESS:
239
+ pass
240
+ else:
241
+ allowance_result = self.increase_knowledge_collection_allowance(
242
+ sender=sender,
243
+ token_amount=request.get("tokenAmount"),
244
+ )
245
+ allowance_increased = allowance_result.allowance_increased
246
+ allowance_gap = allowance_result.allowance_gap
247
+
248
+ if not paranet_ka_contract and not paranet_token_id:
249
+ receipt = self._create_knowledge_collection(
250
+ request.get("publishOperationId"),
251
+ Web3.to_bytes(hexstr=request.get("merkleRoot")),
252
+ request.get("knowledgeAssetsAmount"),
253
+ request.get("byteSize"),
254
+ request.get("epochs"),
255
+ request.get("tokenAmount"),
256
+ request.get("isImmutable"),
257
+ request.get("paymaster"),
258
+ request.get("publisherNodeIdentityId"),
259
+ Web3.to_bytes(hexstr=request.get("publisherNodeR")),
260
+ Web3.to_bytes(hexstr=request.get("publisherNodeVS")),
261
+ request.get("identityIds"),
262
+ [Web3.to_bytes(hexstr=x) for x in request.get("r")],
263
+ [Web3.to_bytes(hexstr=x) for x in request.get("vs")],
264
+ )
265
+ else:
266
+ receipt = self._mint_knowledge_asset(
267
+ paranet_ka_contract,
268
+ paranet_token_id,
269
+ list(request.values()),
270
+ )
162
271
 
163
- allowance_difference = token_amount - current_allowance
272
+ event_data = self.manager.blockchain_provider.decode_logs_event(
273
+ receipt=receipt,
274
+ contract_name="KnowledgeCollectionStorage",
275
+ event_name="KnowledgeCollectionCreated",
276
+ )
277
+ collection_id = (
278
+ int(getattr(event_data[0].get("args", {}), "id", None))
279
+ if event_data
280
+ else None
281
+ )
164
282
 
165
- if allowance_difference > 0:
166
- self._increase_allowance(spender, allowance_difference)
167
- elif allowance_difference < 0:
168
- self._decrease_allowance(spender, -allowance_difference)
283
+ return KnowledgeCollectionResult(
284
+ knowledge_collection_id=collection_id, receipt=receipt
285
+ )
169
286
 
170
- return allowance_difference
287
+ except Exception as e:
288
+ if allowance_increased:
289
+ self.decrease_knowledge_collection_allowance(allowance_gap)
290
+ raise e
171
291
 
172
- def increase_allowance(
173
- self, token_amount: Wei, spender: Address | None = None
174
- ) -> Wei:
175
- if spender is None:
176
- spender = self._get_contract_address("ServiceAgreementV1")
292
+ def process_content(self, content: str) -> list:
293
+ return [line.strip() for line in content.split("\n") if line.strip() != ""]
177
294
 
178
- self._increase_allowance(spender, token_amount)
295
+ def solidity_packed_sha256(self, types: list[str], values: list) -> str:
296
+ # Encode the values using eth_abi's encode_packed
297
+ packed_data = encode_packed(types, values)
179
298
 
180
- return token_amount
299
+ # Calculate SHA256
300
+ sha256_hash = hashlib.sha256(packed_data).hexdigest()
181
301
 
182
- def decrease_allowance(
183
- self, token_amount: Wei, spender: Address | None = None
184
- ) -> Wei:
185
- if spender is None:
186
- spender = self._get_contract_address("ServiceAgreementV1")
302
+ return f"0x{sha256_hash}"
187
303
 
188
- current_allowance = self.get_current_allowance(spender)
189
- subtracted_value = min(token_amount, current_allowance)
304
+ def insert_triple_sorted(self, triples_list: list, new_triple: str) -> int:
305
+ # Assuming triples_list is already sorted
306
+ left = 0
307
+ right = len(triples_list)
190
308
 
191
- self._decrease_allowance(spender, subtracted_value)
309
+ while left < right:
310
+ mid = (left + right) // 2
311
+ if triples_list[mid] < new_triple:
312
+ left = mid + 1
313
+ else:
314
+ right = mid
315
+
316
+ # Insert the new triple at the correct position
317
+ triples_list.insert(left, new_triple)
318
+ return left
319
+
320
+ def get_operation_status_dict(self, operation_result, operation_id):
321
+ # Check if data exists and has errorType
322
+ operation_data = (
323
+ {"status": operation_result.get("status"), **operation_result.get("data")}
324
+ if operation_result.get("data")
325
+ and operation_result.get("data", {}).get("errorType")
326
+ else {"status": operation_result.get("status")}
327
+ )
192
328
 
193
- return subtracted_value
329
+ return {"operationId": operation_id, **operation_data}
194
330
 
195
- _chain_id = Method(BlockchainRequest.chain_id)
331
+ def get_message_signer_address(self, dataset_root: str, signature: dict):
332
+ message = encode_defunct(HexBytes(dataset_root))
333
+ r, s, v = signature.get("r"), signature.get("s"), signature.get("v")
334
+ r = r[2:] if r.startswith("0x") else r
335
+ s = s[2:] if s.startswith("0x") else s
196
336
 
197
- _get_asset_storage_address = Method(BlockchainRequest.get_asset_storage_address)
198
- _create = Method(BlockchainRequest.create_asset)
199
- _mint_paranet_knowledge_asset = Method(BlockchainRequest.mint_knowledge_asset)
337
+ sig = "0x" + r + s + hex(v)[2:].zfill(2)
200
338
 
201
- _get_bid_suggestion = Method(NodeRequest.bid_suggestion)
202
- _local_store = Method(NodeRequest.local_store)
203
- _publish = Method(NodeRequest.publish)
339
+ return Account.recover_message(message, signature=sig)
204
340
 
205
341
  def create(
206
342
  self,
207
343
  content: dict[Literal["public", "private"], JSONLD],
208
- epochs_number: int,
209
- token_amount: Wei | None = None,
210
- immutable: bool = False,
211
- content_type: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
212
- paranet_ual: UAL | None = None,
344
+ options: dict = {},
213
345
  ) -> dict[str, UAL | HexStr | dict[str, dict[str, str] | TxReceipt]]:
346
+ arguments = self.input_service.get_asset_create_arguments(options)
347
+
348
+ max_number_of_retries = arguments.get("max_number_of_retries")
349
+ frequency = arguments.get("frequency")
350
+ epochs_num = arguments.get("epochs_num")
351
+ hash_function_id = arguments.get("hash_function_id")
352
+ immutable = arguments.get("immutable")
353
+ token_amount = arguments.get("token_amount")
354
+ payer = arguments.get("payer")
355
+ minimum_number_of_finalization_confirmations = arguments.get(
356
+ "minimum_number_of_finalization_confirmations"
357
+ )
358
+ minimum_number_of_node_replications = arguments.get(
359
+ "minimum_number_of_node_replications"
360
+ )
214
361
  blockchain_id = self.manager.blockchain_provider.blockchain_id
215
- assertions = format_content(content, content_type)
216
362
 
217
- public_assertion_id = MerkleTree(
218
- hash_assertion_with_indexes(assertions["public"]),
219
- sort_pairs=True,
220
- ).root
221
- public_assertion_metadata = generate_assertion_metadata(assertions["public"])
363
+ dataset = {}
364
+ public_content = dataset.get("public")
365
+ private_content = dataset.get("private")
366
+ if isinstance(content, str):
367
+ dataset["public"] = self.process_content(content)
368
+ elif isinstance(public_content, str) or (
369
+ not public_content and private_content and isinstance(private_content, str)
370
+ ):
371
+ if public_content:
372
+ dataset["public"] = self.process_content(public_content)
373
+ else:
374
+ dataset["public"] = []
222
375
 
223
- content_asset_storage_address = self._get_asset_storage_address(
224
- "ContentAssetStorage"
376
+ if private_content and isinstance(private_content, str):
377
+ dataset["private"] = self.process_content(private_content)
378
+ else:
379
+ dataset = kc_tools.format_dataset(content)
380
+
381
+ public_triples_grouped = []
382
+
383
+ dataset["public"] = kc_tools.generate_missing_ids_for_blank_nodes(
384
+ dataset.get("public")
225
385
  )
226
386
 
227
- if token_amount is None:
228
- token_amount = int(
229
- self._get_bid_suggestion(
230
- blockchain_id,
231
- epochs_number,
232
- public_assertion_metadata["size"],
233
- content_asset_storage_address,
234
- public_assertion_id,
235
- DEFAULT_HASH_FUNCTION_ID,
236
- token_amount or BidSuggestionRange.LOW,
237
- )["bidSuggestion"]
387
+ if dataset.get("private") and len(dataset.get("private")):
388
+ dataset["private"] = kc_tools.generate_missing_ids_for_blank_nodes(
389
+ dataset.get("private")
238
390
  )
239
391
 
240
- current_allowance = self.get_current_allowance()
241
- if is_allowance_increased := current_allowance < token_amount:
242
- self.increase_allowance(token_amount)
392
+ # Group private triples by subject and flatten
393
+ private_triples_grouped = kc_tools.group_nquads_by_subject(
394
+ dataset.get("private"), True
395
+ )
243
396
 
244
- result = {"publicAssertionId": public_assertion_id, "operation": {}}
397
+ dataset["private"] = list(chain.from_iterable(private_triples_grouped))
245
398
 
246
- try:
247
- if paranet_ual is None:
248
- receipt: TxReceipt = self._create(
249
- {
250
- "assertionId": Web3.to_bytes(hexstr=public_assertion_id),
251
- "size": public_assertion_metadata["size"],
252
- "triplesNumber": public_assertion_metadata["triples_number"],
253
- "chunksNumber": public_assertion_metadata["chunks_number"],
254
- "tokenAmount": token_amount,
255
- "epochsNumber": epochs_number,
256
- "scoreFunctionId": DEFAULT_PROXIMITY_SCORE_FUNCTIONS_PAIR_IDS[
257
- self.manager.blockchain_provider.environment
258
- ][blockchain_id],
259
- "immutable_": immutable,
260
- }
261
- )
262
- else:
263
- parsed_paranet_ual = parse_ual(paranet_ual)
264
- paranet_knowledge_asset_storage, paranet_knowledge_asset_token_id = (
265
- parsed_paranet_ual["contract_address"],
266
- parsed_paranet_ual["token_id"],
267
- )
399
+ # Compute private root and add to public
400
+ private_root = kc_tools.calculate_merkle_root(dataset.get("private"))
401
+ dataset["public"].append(
402
+ f'<{ka_tools.generate_named_node()}> <{PRIVATE_ASSERTION_PREDICATE}> "{private_root}" .'
403
+ )
268
404
 
269
- receipt: TxReceipt = self._mint_paranet_knowledge_asset(
270
- paranet_knowledge_asset_storage,
271
- paranet_knowledge_asset_token_id,
272
- {
273
- "assertionId": Web3.to_bytes(hexstr=public_assertion_id),
274
- "size": public_assertion_metadata["size"],
275
- "triplesNumber": public_assertion_metadata["triples_number"],
276
- "chunksNumber": public_assertion_metadata["chunks_number"],
277
- "tokenAmount": token_amount,
278
- "epochsNumber": epochs_number,
279
- "scoreFunctionId": DEFAULT_PROXIMITY_SCORE_FUNCTIONS_PAIR_IDS[
280
- self.manager.blockchain_provider.environment
281
- ][blockchain_id],
282
- "immutable_": immutable,
283
- },
405
+ # Compute private root and add to public
406
+ public_triples_grouped = kc_tools.group_nquads_by_subject(
407
+ dataset.get("public"), True
408
+ )
409
+
410
+ # Create a dictionary for public subject -> index for quick lookup
411
+ public_subject_dict = {}
412
+ for i in range(len(public_triples_grouped)):
413
+ public_subject = public_triples_grouped[i][0].split(" ")[0]
414
+ public_subject_dict[public_subject] = i
415
+
416
+ private_triple_subject_hashes_grouped_without_public_pair = []
417
+
418
+ # Integrate private subjects into public or store separately if no match to be appended later
419
+ for private_triples in private_triples_grouped:
420
+ private_subject = private_triples[0].split(" ")[
421
+ 0
422
+ ] # Extract the private subject
423
+
424
+ private_subject_hash = self.solidity_packed_sha256(
425
+ types=["string"],
426
+ values=[private_subject[1:-1]],
284
427
  )
285
428
 
286
- result["paranetId"] = Web3.to_hex(
287
- Web3.solidity_keccak(
288
- ["address", "uint256"],
289
- [
290
- paranet_knowledge_asset_storage,
291
- paranet_knowledge_asset_token_id,
292
- ],
429
+ if (
430
+ private_subject in public_subject_dict
431
+ ): # Check if there's a public pair
432
+ # If there's a public pair, insert a representation in that group
433
+ public_index = public_subject_dict.get(private_subject)
434
+ self.insert_triple_sorted(
435
+ public_triples_grouped[public_index],
436
+ f"{private_subject} <{PRIVATE_RESOURCE_PREDICATE}> <{ka_tools.generate_named_node()}> .",
293
437
  )
294
- )
295
- except ContractLogicError as err:
296
- if is_allowance_increased:
297
- self.decrease_allowance(token_amount)
298
- raise err
299
-
300
- events = self.manager.blockchain_provider.decode_logs_event(
301
- receipt,
302
- "ContentAsset",
303
- "AssetMinted",
438
+ else:
439
+ # If no public pair, maintain separate list, inserting sorted by hash
440
+ self.insert_triple_sorted(
441
+ private_triple_subject_hashes_grouped_without_public_pair,
442
+ f"<{PRIVATE_HASH_SUBJECT_PREFIX}{private_subject_hash}> <{PRIVATE_RESOURCE_PREDICATE}> <{ka_tools.generate_named_node()}> .",
443
+ )
444
+
445
+ for triple in private_triple_subject_hashes_grouped_without_public_pair:
446
+ public_triples_grouped.append([triple])
447
+
448
+ dataset["public"] = list(chain.from_iterable(public_triples_grouped))
449
+ else:
450
+ # No private triples, just group and flatten public
451
+ public_triples_grouped = kc_tools.group_nquads_by_subject(
452
+ dataset.get("public"), True
453
+ )
454
+ dataset["public"] = list(chain.from_iterable(public_triples_grouped))
455
+
456
+ # Calculate the number of chunks
457
+ number_of_chunks = kc_tools.calculate_number_of_chunks(
458
+ dataset.get("public"), CHUNK_BYTE_SIZE
304
459
  )
305
- token_id = events[0].args["tokenId"]
460
+ dataset_size = number_of_chunks * CHUNK_BYTE_SIZE
461
+
462
+ # Validate the assertion size in bytes
463
+ if dataset_size > MAX_FILE_SIZE:
464
+ raise ValueError(f"File size limit is {MAX_FILE_SIZE / (1024 * 1024)}MB.")
465
+
466
+ # Calculate the Merkle root
467
+ dataset_root = kc_tools.calculate_merkle_root(dataset.get("public"))
306
468
 
307
- result["UAL"] = format_ual(
308
- blockchain_id, content_asset_storage_address, token_id
469
+ # Get the contract address for KnowledgeCollectionStorage
470
+ content_asset_storage_address = self._get_asset_storage_address(
471
+ "KnowledgeCollectionStorage"
309
472
  )
310
- result["operation"]["mintKnowledgeAsset"] = json.loads(Web3.to_json(receipt))
311
473
 
312
- assertions_list = [
313
- {
314
- "blockchain": blockchain_id,
315
- "contract": content_asset_storage_address,
316
- "tokenId": token_id,
317
- "assertionId": public_assertion_id,
318
- "assertion": assertions["public"],
319
- "storeType": StoreTypes.TRIPLE,
474
+ publish_operation_id = self.node_service.publish(
475
+ dataset_root,
476
+ dataset,
477
+ blockchain_id,
478
+ hash_function_id,
479
+ minimum_number_of_node_replications,
480
+ )["operationId"]
481
+ publish_operation_result = self.node_service.get_operation_result(
482
+ publish_operation_id,
483
+ Operations.PUBLISH.value,
484
+ max_number_of_retries,
485
+ frequency,
486
+ )
487
+
488
+ if publish_operation_result.get(
489
+ "status"
490
+ ) != OperationStatus.COMPLETED and not publish_operation_result.get(
491
+ "data", {}
492
+ ).get("minAcksReached"):
493
+ return {
494
+ "datasetRoot": dataset_root,
495
+ "operation": {
496
+ "publish": self.get_operation_status_dict(
497
+ publish_operation_result, publish_operation_id
498
+ )
499
+ },
320
500
  }
321
- ]
322
501
 
323
- if content.get("private", None):
324
- assertions_list.append(
325
- {
326
- "blockchain": blockchain_id,
327
- "contract": content_asset_storage_address,
328
- "tokenId": token_id,
329
- "assertionId": MerkleTree(
330
- hash_assertion_with_indexes(assertions["private"]),
331
- sort_pairs=True,
332
- ).root,
333
- "assertion": assertions["private"],
334
- "storeType": StoreTypes.TRIPLE,
335
- }
502
+ data = publish_operation_result.get("data", {})
503
+ signatures = data.get("signatures")
504
+
505
+ publisher_node_signature = data.get("publisherNodeSignature", {})
506
+ publisher_node_identity_id = publisher_node_signature.get("identityId")
507
+ publisher_node_r = publisher_node_signature.get("r")
508
+ publisher_node_vs = publisher_node_signature.get("vs")
509
+
510
+ identity_ids, r, vs = [], [], []
511
+
512
+ for signature in signatures:
513
+ try:
514
+ signer_address = self.get_message_signer_address(
515
+ dataset_root, signature
516
+ )
517
+
518
+ key_is_operational_wallet = self._key_is_operational_wallet(
519
+ signature.get("identityId"),
520
+ Web3.solidity_keccak(["address"], [signer_address]),
521
+ 2, # IdentityLib.OPERATIONAL_KEY
522
+ )
523
+
524
+ # If valid, append the signature components
525
+ if key_is_operational_wallet:
526
+ identity_ids.append(signature.get("identityId"))
527
+ r.append(signature.get("r"))
528
+ vs.append(signature.get("vs"))
529
+
530
+ except Exception:
531
+ continue
532
+
533
+ if token_amount:
534
+ estimated_publishing_cost = token_amount
535
+ else:
536
+ time_until_next_epoch = self._time_until_next_epoch()
537
+ epoch_length = self._epoch_length()
538
+ stake_weighted_average_ask = self._get_stake_weighted_average_ask()
539
+
540
+ # Convert to integers and perform calculation
541
+ estimated_publishing_cost = (
542
+ (
543
+ int(stake_weighted_average_ask)
544
+ * (
545
+ int(epochs_num) * int(1e18)
546
+ + (int(time_until_next_epoch) * int(1e18)) // int(epoch_length)
547
+ )
548
+ * int(dataset_size)
549
+ )
550
+ // 1024
551
+ // int(1e18)
336
552
  )
337
553
 
338
- operation_id = self._publish(
339
- public_assertion_id,
340
- assertions["public"],
341
- blockchain_id,
342
- content_asset_storage_address,
343
- token_id,
344
- DEFAULT_HASH_FUNCTION_ID,
345
- )["operationId"]
346
- operation_result = self.get_operation_result(operation_id, "publish")
554
+ knowledge_collection_id = None
555
+ mint_knowledge_asset_receipt = None
347
556
 
348
- result["operation"]["publish"] = {
349
- "operationId": operation_id,
350
- "status": operation_result["status"],
351
- }
557
+ knowledge_collection_result = self.create_knowledge_collection(
558
+ {
559
+ "publishOperationId": publish_operation_id,
560
+ "merkleRoot": dataset_root,
561
+ "knowledgeAssetsAmount": kc_tools.count_distinct_subjects(
562
+ dataset.get("public")
563
+ ),
564
+ "byteSize": dataset_size,
565
+ "epochs": epochs_num,
566
+ "tokenAmount": estimated_publishing_cost,
567
+ "isImmutable": immutable,
568
+ "paymaster": payer,
569
+ "publisherNodeIdentityId": publisher_node_identity_id,
570
+ "publisherNodeR": publisher_node_r,
571
+ "publisherNodeVS": publisher_node_vs,
572
+ "identityIds": identity_ids,
573
+ "r": r,
574
+ "vs": vs,
575
+ },
576
+ None,
577
+ None,
578
+ )
579
+ knowledge_collection_id = knowledge_collection_result.knowledge_collection_id
580
+ mint_knowledge_asset_receipt = knowledge_collection_result.receipt
352
581
 
353
- if operation_result["status"] == OperationStatus.COMPLETED:
354
- operation_id = self._local_store(assertions_list)["operationId"]
355
- operation_result = self.get_operation_result(operation_id, "local-store")
582
+ ual = format_ual(
583
+ blockchain_id, content_asset_storage_address, knowledge_collection_id
584
+ )
356
585
 
357
- result["operation"]["localStore"] = {
358
- "operationId": operation_id,
359
- "status": operation_result["status"],
360
- }
586
+ finality_status_result = 0
587
+ if minimum_number_of_finalization_confirmations > 0:
588
+ finality_status_result = self.node_service.finality_status(
589
+ ual,
590
+ minimum_number_of_finalization_confirmations,
591
+ max_number_of_retries,
592
+ frequency,
593
+ )
361
594
 
362
- return result
595
+ return json.loads(
596
+ Web3.to_json(
597
+ {
598
+ "UAL": ual,
599
+ "datasetRoot": dataset_root,
600
+ "signatures": publish_operation_result.get("data", {}).get(
601
+ "signatures"
602
+ ),
603
+ "operation": {
604
+ "mintKnowledgeAsset": mint_knowledge_asset_receipt,
605
+ "publish": self.get_operation_status_object(
606
+ publish_operation_result, publish_operation_id
607
+ ),
608
+ "finality": {
609
+ "status": (
610
+ "FINALIZED"
611
+ if finality_status_result
612
+ >= minimum_number_of_finalization_confirmations
613
+ else "NOT FINALIZED"
614
+ )
615
+ },
616
+ "numberOfConfirmations": finality_status_result,
617
+ "requiredConfirmations": minimum_number_of_finalization_confirmations,
618
+ },
619
+ }
620
+ )
621
+ )
363
622
 
364
623
  _submit_knowledge_asset = Method(BlockchainRequest.submit_knowledge_asset)
365
624
 
@@ -418,143 +677,6 @@ class KnowledgeAsset(Module):
418
677
  "operation": json.loads(Web3.to_json(receipt)),
419
678
  }
420
679
 
421
- _update = Method(NodeRequest.update)
422
-
423
- _get_block = Method(BlockchainRequest.get_block)
424
-
425
- _get_service_agreement_data = Method(BlockchainRequest.get_service_agreement_data)
426
- _update_asset_state = Method(BlockchainRequest.update_asset_state)
427
-
428
- def update(
429
- self,
430
- ual: UAL,
431
- content: dict[Literal["public", "private"], JSONLD],
432
- token_amount: Wei | None = None,
433
- content_type: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
434
- ) -> dict[str, UAL | HexStr | dict[str, str]]:
435
- parsed_ual = parse_ual(ual)
436
- blockchain_id, content_asset_storage_address, token_id = (
437
- parsed_ual["blockchain"],
438
- parsed_ual["contract_address"],
439
- parsed_ual["token_id"],
440
- )
441
-
442
- assertions = format_content(content, content_type)
443
-
444
- public_assertion_id = MerkleTree(
445
- hash_assertion_with_indexes(assertions["public"]),
446
- sort_pairs=True,
447
- ).root
448
- public_assertion_metadata = generate_assertion_metadata(assertions["public"])
449
-
450
- if token_amount is None:
451
- agreement_id = self.get_agreement_id(
452
- content_asset_storage_address, token_id
453
- )
454
- # TODO: Dynamic types for namedtuples?
455
- agreement_data: Type[AgreementData] = self._get_service_agreement_data(
456
- agreement_id
457
- )
458
-
459
- timestamp_now = self._get_block("latest")["timestamp"]
460
- current_epoch = math.floor(
461
- (timestamp_now - agreement_data.startTime) / agreement_data.epochLength
462
- )
463
- epochs_left = agreement_data.epochsNumber - current_epoch
464
-
465
- token_amount = int(
466
- self._get_bid_suggestion(
467
- blockchain_id,
468
- epochs_left,
469
- public_assertion_metadata["size"],
470
- content_asset_storage_address,
471
- public_assertion_id,
472
- DEFAULT_HASH_FUNCTION_ID,
473
- token_amount or BidSuggestionRange.LOW,
474
- )["bidSuggestion"]
475
- )
476
-
477
- token_amount -= agreement_data.tokens[0]
478
- token_amount = token_amount if token_amount > 0 else 0
479
-
480
- current_allowance = self.get_current_allowance()
481
- if is_allowance_increased := current_allowance < token_amount:
482
- self.increase_allowance(token_amount)
483
-
484
- try:
485
- self._update_asset_state(
486
- token_id=token_id,
487
- assertion_id=public_assertion_id,
488
- size=public_assertion_metadata["size"],
489
- triples_number=public_assertion_metadata["triples_number"],
490
- chunks_number=public_assertion_metadata["chunks_number"],
491
- update_token_amount=token_amount,
492
- )
493
- except ContractLogicError as err:
494
- if is_allowance_increased:
495
- self.decrease_allowance(token_amount)
496
- raise err
497
-
498
- assertions_list = [
499
- {
500
- "blockchain": blockchain_id,
501
- "contract": content_asset_storage_address,
502
- "tokenId": token_id,
503
- "assertionId": public_assertion_id,
504
- "assertion": assertions["public"],
505
- "storeType": StoreTypes.PENDING,
506
- }
507
- ]
508
-
509
- if content.get("private", None):
510
- assertions_list.append(
511
- {
512
- "blockchain": blockchain_id,
513
- "contract": content_asset_storage_address,
514
- "tokenId": token_id,
515
- "assertionId": MerkleTree(
516
- hash_assertion_with_indexes(assertions["private"]),
517
- sort_pairs=True,
518
- ).root,
519
- "assertion": assertions["private"],
520
- "storeType": StoreTypes.PENDING,
521
- }
522
- )
523
-
524
- operation_id = self._local_store(assertions_list)["operationId"]
525
- self.get_operation_result(operation_id, "local-store")
526
-
527
- operation_id = self._update(
528
- public_assertion_id,
529
- assertions["public"],
530
- blockchain_id,
531
- content_asset_storage_address,
532
- token_id,
533
- DEFAULT_HASH_FUNCTION_ID,
534
- )["operationId"]
535
- operation_result = self.get_operation_result(operation_id, "update")
536
-
537
- return {
538
- "UAL": ual,
539
- "publicAssertionId": public_assertion_id,
540
- "operation": {
541
- "operationId": operation_id,
542
- "status": operation_result["status"],
543
- },
544
- }
545
-
546
- _cancel_update = Method(BlockchainRequest.cancel_asset_state_update)
547
-
548
- def cancel_update(self, ual: UAL) -> dict[str, UAL | TxReceipt]:
549
- token_id = parse_ual(ual)["token_id"]
550
-
551
- receipt: TxReceipt = self._cancel_update(token_id)
552
-
553
- return {
554
- "UAL": ual,
555
- "operation": json.loads(Web3.to_json(receipt)),
556
- }
557
-
558
680
  _burn_asset = Method(BlockchainRequest.burn_asset)
559
681
 
560
682
  def burn(self, ual: UAL) -> dict[str, UAL | TxReceipt]:
@@ -571,229 +693,124 @@ class KnowledgeAsset(Module):
571
693
  _get = Method(NodeRequest.get)
572
694
  _query = Method(NodeRequest.query)
573
695
 
574
- def get(
575
- self,
576
- ual: UAL,
577
- state: str | HexStr | int = KnowledgeAssetEnumStates.LATEST,
578
- content_visibility: str = KnowledgeAssetContentVisibility.ALL,
579
- output_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
580
- validate: bool = True,
581
- ) -> dict[str, UAL | HexStr | list[JSONLD] | dict[str, str]]:
582
- state = (
583
- state.upper()
584
- if (isinstance(state, str) and not re.match(r"^0x[a-fA-F0-9]{64}$", state))
585
- else state
586
- )
587
- content_visibility = content_visibility.upper()
588
- output_format = output_format.upper()
589
-
590
- token_id = parse_ual(ual)["token_id"]
591
-
592
- def handle_latest_state(token_id: int) -> tuple[HexStr, bool]:
593
- unfinalized_state = Web3.to_hex(self._get_unfinalized_state(token_id))
594
-
595
- if unfinalized_state and unfinalized_state != HASH_ZERO:
596
- return unfinalized_state, False
597
- else:
598
- return handle_latest_finalized_state(token_id)
599
-
600
- def handle_latest_finalized_state(token_id: int) -> tuple[HexStr, bool]:
601
- return Web3.to_hex(self._get_latest_assertion_id(token_id)), True
602
-
603
- is_state_finalized = False
604
-
605
- match state:
606
- case KnowledgeAssetEnumStates.LATEST:
607
- public_assertion_id, is_state_finalized = handle_latest_state(token_id)
608
-
609
- case KnowledgeAssetEnumStates.LATEST_FINALIZED:
610
- public_assertion_id, is_state_finalized = handle_latest_finalized_state(
611
- token_id
612
- )
613
-
614
- case _ if isinstance(state, int):
615
- assertion_ids = [
616
- Web3.to_hex(assertion_id)
617
- for assertion_id in self._get_assertion_ids(token_id)
618
- ]
619
- if 0 <= state < (states_number := len(assertion_ids)):
620
- public_assertion_id = assertion_ids[state]
621
-
622
- if state == states_number - 1:
623
- is_state_finalized = True
624
- else:
625
- raise InvalidStateOption(f"State index {state} is out of range.")
626
-
627
- case _ if isinstance(state, str) and re.match(
628
- r"^0x[a-fA-F0-9]{64}$", state
629
- ):
630
- assertion_ids = [
631
- Web3.to_hex(assertion_id)
632
- for assertion_id in self._get_assertion_ids(token_id)
633
- ]
634
-
635
- if state in assertion_ids:
636
- public_assertion_id = state
637
-
638
- if state == assertion_ids[-1]:
639
- is_state_finalized = True
640
- else:
641
- raise InvalidStateOption(
642
- f"Given state hash: {state} is not a part of the KA."
643
- )
644
-
645
- case _:
646
- raise InvalidStateOption(f"Invalid state option: {state}.")
647
-
648
- get_public_operation_id: NodeResponseDict = self._get(
649
- ual, public_assertion_id, hashFunctionId=1
696
+ def get(self, ual: UAL, options: dict = {}) -> dict:
697
+ arguments = self.input_service.get_asset_get_arguments(options)
698
+
699
+ max_number_of_retries = arguments.get("max_number_of_retries")
700
+ frequency = arguments.get("frequency")
701
+ state = arguments.get("state")
702
+ include_metadata = arguments.get("include_metadata")
703
+ content_type = arguments.get("content_type")
704
+ validate = arguments.get("validate")
705
+ output_format = arguments.get("output_format")
706
+ hash_function_id = arguments.get("hash_function_id")
707
+ paranet_ual = arguments.get("paranet_ual")
708
+ subject_ual = arguments.get("subject_ual")
709
+
710
+ ual_with_state = f"{ual}:{state}" if state else ual
711
+ get_public_operation_id: NodeResponseDict = self.node_service.get(
712
+ ual_with_state,
713
+ content_type,
714
+ include_metadata,
715
+ hash_function_id,
716
+ paranet_ual,
717
+ subject_ual,
650
718
  )["operationId"]
651
719
 
652
- get_public_operation_result = self.get_operation_result(
653
- get_public_operation_id, "get"
720
+ get_public_operation_result = self.node_service.get_operation_result(
721
+ get_public_operation_id,
722
+ Operations.GET.value,
723
+ max_number_of_retries,
724
+ frequency,
654
725
  )
655
- public_assertion = get_public_operation_result["data"].get("assertion", None)
656
-
657
- if public_assertion is None:
658
- raise MissingKnowledgeAssetState("Unable to find state on the network!")
659
-
660
- if validate:
661
- root = MerkleTree(
662
- hash_assertion_with_indexes(public_assertion), sort_pairs=True
663
- ).root
664
- if root != public_assertion_id:
665
- raise InvalidKnowledgeAsset(
666
- f"State: {public_assertion_id}. " f"Merkle Tree Root: {root}"
667
- )
668
-
669
- result = {"operation": {}}
670
- if content_visibility != KnowledgeAssetContentVisibility.PRIVATE:
671
- formatted_public_assertion = public_assertion
672
726
 
673
- match output_format:
674
- case "NQUADS" | "N-QUADS":
675
- formatted_public_assertion: list[JSONLD] = jsonld.from_rdf(
676
- "\n".join(public_assertion),
677
- {"algorithm": "URDNA2015", "format": "application/n-quads"},
678
- )
679
- case "JSONLD" | "JSON-LD":
680
- formatted_public_assertion = "\n".join(public_assertion)
681
-
682
- case _:
683
- raise DatasetOutputFormatNotSupported(
684
- f"{output_format} isn't supported!"
685
- )
686
-
687
- if content_visibility == KnowledgeAssetContentVisibility.PUBLIC:
688
- result = {
689
- **result,
690
- "asertion": formatted_public_assertion,
691
- "assertionId": public_assertion_id,
727
+ if subject_ual:
728
+ if get_public_operation_result.get("data"):
729
+ return {
730
+ "operation": {
731
+ "get": self.get_operation_status_object(
732
+ get_public_operation_result, get_public_operation_id
733
+ ),
734
+ },
735
+ "subject_ual_pairs": get_public_operation_result.get("data"),
692
736
  }
693
- else:
694
- result["public"] = {
695
- "assertion": formatted_public_assertion,
696
- "assertionId": public_assertion_id,
737
+ if get_public_operation_result.get("status") != "FAILED":
738
+ get_public_operation_result["data"] = {
739
+ "errorType": "DKG_CLIENT_ERROR",
740
+ "errorMessage": "Unable to find assertion on the network!",
697
741
  }
698
-
699
- result["operation"]["publicGet"] = {
700
- "operationId": get_public_operation_id,
701
- "status": get_public_operation_result["status"],
742
+ get_public_operation_result["status"] = "FAILED"
743
+
744
+ return {
745
+ "operation": {
746
+ "get": self.get_operation_status_object(
747
+ get_public_operation_result, get_public_operation_id
748
+ ),
749
+ },
750
+ }
751
+ metadata = get_public_operation_result.get("data")
752
+ assertion = get_public_operation_result.get("data", {}).get("assertion", None)
753
+
754
+ if not assertion:
755
+ if get_public_operation_result.get("status") != "FAILED":
756
+ get_public_operation_result["data"] = {
757
+ "errorType": "DKG_CLIENT_ERROR",
758
+ "errorMessage": "Unable to find assertion on the network!",
759
+ }
760
+ get_public_operation_result["status"] = "FAILED"
761
+
762
+ return {
763
+ "operation": {
764
+ "get": self.get_operation_status_object(
765
+ get_public_operation_result, get_public_operation_id
766
+ ),
767
+ },
702
768
  }
703
769
 
704
- if content_visibility != KnowledgeAssetContentVisibility.PUBLIC:
705
- private_assertion_link_triples = list(
706
- filter(
707
- lambda element: PRIVATE_ASSERTION_PREDICATE in element,
708
- public_assertion,
709
- )
710
- )
711
-
712
- if private_assertion_link_triples:
713
- private_assertion_id = re.search(
714
- r'"(.*?)"', private_assertion_link_triples[0]
715
- ).group(1)
716
-
717
- private_assertion = get_public_operation_result["data"].get(
718
- "privateAssertion", None
719
- )
770
+ if validate:
771
+ is_valid = True # #TODO: Implement assertion validation logic
772
+ if not is_valid:
773
+ get_public_operation_result["data"] = {
774
+ "error_type": "DKG_CLIENT_ERROR",
775
+ "error_message": "Calculated root hashes don't match!",
776
+ }
720
777
 
721
- query_private_operation_id: NodeResponseDict | None = None
722
- if private_assertion is None:
723
- query = f"""
724
- CONSTRUCT {{ ?s ?p ?o }}
725
- WHERE {{
726
- {{
727
- GRAPH <assertion:{private_assertion_id}>
728
- {{
729
- ?s ?p ?o .
730
- }}
731
- }}
732
- }}
733
- """
734
-
735
- query_private_operation_id = self._query(
736
- query,
737
- "CONSTRUCT",
738
- PRIVATE_CURRENT_REPOSITORY
739
- if is_state_finalized
740
- else PRIVATE_HISTORICAL_REPOSITORY,
741
- )["operationId"]
742
-
743
- query_private_operation_result = self.get_operation_result(
744
- query_private_operation_id, "query"
745
- )
778
+ formatted_assertion = "\n".join(
779
+ assertion.get("public", [])
780
+ + (
781
+ assertion.get("private")
782
+ if isinstance(assertion.get("private"), list)
783
+ else []
784
+ )
785
+ )
746
786
 
747
- private_assertion = normalize_dataset(
748
- query_private_operation_result["data"],
749
- "N-Quads",
750
- )
787
+ formatted_metadata = None
788
+ if output_format == OutputTypes.JSONLD.value:
789
+ formatted_assertion = self.to_jsonld(formatted_assertion)
751
790
 
752
- if validate:
753
- root = MerkleTree(
754
- hash_assertion_with_indexes(private_assertion),
755
- sort_pairs=True,
756
- ).root
757
- if root != private_assertion_id:
758
- raise InvalidKnowledgeAsset(
759
- f"State: {private_assertion_id}. "
760
- f"Merkle Tree Root: {root}"
761
- )
791
+ if include_metadata:
792
+ formatted_metadata = self.to_jsonld("\n".join(metadata))
762
793
 
763
- match output_format:
764
- case "NQUADS" | "N-QUADS":
765
- formatted_private_assertion: list[JSONLD] = jsonld.from_rdf(
766
- "\n".join(private_assertion),
767
- {
768
- "algorithm": "URDNA2015",
769
- "format": "application/n-quads",
770
- },
771
- )
772
- case "JSONLD" | "JSON-LD":
773
- formatted_private_assertion = "\n".join(private_assertion)
794
+ if output_format == OutputTypes.NQUADS.value:
795
+ formatted_assertion = self.to_nquads(
796
+ formatted_assertion, DEFAULT_RDF_FORMAT
797
+ )
798
+ if include_metadata:
799
+ formatted_metadata = self.to_nquads(
800
+ "\n".join(metadata), DEFAULT_RDF_FORMAT
801
+ )
774
802
 
775
- case _:
776
- raise DatasetOutputFormatNotSupported(
777
- f"{output_format} isn't supported!"
778
- )
803
+ result = {
804
+ "assertion": formatted_assertion,
805
+ "operation": {
806
+ "get": self.get_operation_status_object(
807
+ get_public_operation_result, get_public_operation_id
808
+ ),
809
+ },
810
+ }
779
811
 
780
- if content_visibility == KnowledgeAssetContentVisibility:
781
- result = {
782
- **result,
783
- "assertion": formatted_private_assertion,
784
- "assertionId": private_assertion_id,
785
- }
786
- else:
787
- result["private"] = {
788
- "assertion": formatted_private_assertion,
789
- "assertionId": private_assertion_id,
790
- }
791
-
792
- if query_private_operation_id is not None:
793
- result["operation"]["queryPrivate"] = {
794
- "operationId": query_private_operation_id,
795
- "status": query_private_operation_result["status"],
796
- }
812
+ if include_metadata and metadata:
813
+ result["metadata"] = formatted_metadata
797
814
 
798
815
  return result
799
816
 
@@ -825,7 +842,7 @@ class KnowledgeAsset(Module):
825
842
  latest_finalized_state_size,
826
843
  content_asset_storage_address,
827
844
  latest_finalized_state,
828
- DEFAULT_HASH_FUNCTION_ID,
845
+ DefaultParameters.HASH_FUNCTION_ID.value,
829
846
  token_amount or BidSuggestionRange.LOW,
830
847
  )["bidSuggestion"]
831
848
  )
@@ -839,148 +856,37 @@ class KnowledgeAsset(Module):
839
856
  "operation": json.loads(Web3.to_json(receipt)),
840
857
  }
841
858
 
842
- _get_assertion_size = Method(BlockchainRequest.get_assertion_size)
843
- _add_tokens = Method(BlockchainRequest.increase_asset_token_amount)
844
-
845
- def add_tokens(
846
- self,
847
- ual: UAL,
848
- token_amount: Wei | None = None,
849
- ) -> dict[str, UAL | TxReceipt]:
850
- parsed_ual = parse_ual(ual)
851
- blockchain_id, content_asset_storage_address, token_id = (
852
- parsed_ual["blockchain"],
853
- parsed_ual["contract_address"],
854
- parsed_ual["token_id"],
855
- )
856
-
857
- if token_amount is None:
858
- agreement_id = self.get_agreement_id(
859
- content_asset_storage_address, token_id
860
- )
861
- # TODO: Dynamic types for namedtuples?
862
- agreement_data: Type[AgreementData] = self._get_service_agreement_data(
863
- agreement_id
864
- )
865
-
866
- timestamp_now = self._get_block("latest")["timestamp"]
867
- current_epoch = math.floor(
868
- (timestamp_now - agreement_data.startTime) / agreement_data.epochLength
869
- )
870
- epochs_left = agreement_data.epochsNumber - current_epoch
871
-
872
- latest_finalized_state = self._get_latest_assertion_id(token_id)
873
- latest_finalized_state_size = self._get_assertion_size(
874
- latest_finalized_state
875
- )
876
-
877
- token_amount = int(
878
- self._get_bid_suggestion(
879
- blockchain_id,
880
- epochs_left,
881
- latest_finalized_state_size,
882
- content_asset_storage_address,
883
- latest_finalized_state,
884
- DEFAULT_HASH_FUNCTION_ID,
885
- token_amount or BidSuggestionRange.LOW,
886
- )["bidSuggestion"]
887
- ) - sum(agreement_data.tokensInfo)
888
-
889
- if token_amount <= 0:
890
- raise InvalidTokenAmount(
891
- "Token amount is bigger than default suggested amount, "
892
- "please specify exact token_amount if you still want to add "
893
- "more tokens!"
894
- )
895
-
896
- receipt: TxReceipt = self._add_tokens(token_id, token_amount)
897
-
898
- return {
899
- "UAL": ual,
900
- "operation": json.loads(Web3.to_json(receipt)),
901
- }
902
-
903
- _add_update_tokens = Method(BlockchainRequest.increase_asset_update_token_amount)
904
-
905
- def add_update_tokens(
906
- self,
907
- ual: UAL,
908
- token_amount: Wei | None = None,
909
- ) -> dict[str, UAL | TxReceipt]:
910
- parsed_ual = parse_ual(ual)
911
- blockchain_id, content_asset_storage_address, token_id = (
912
- parsed_ual["blockchain"],
913
- parsed_ual["contract_address"],
914
- parsed_ual["token_id"],
915
- )
916
-
917
- if token_amount is None:
918
- agreement_id = self.get_agreement_id(
919
- content_asset_storage_address, token_id
920
- )
921
- # TODO: Dynamic types for namedtuples?
922
- agreement_data: Type[AgreementData] = self._get_service_agreement_data(
923
- agreement_id
924
- )
925
-
926
- timestamp_now = self._get_block("latest")["timestamp"]
927
- current_epoch = math.floor(
928
- (timestamp_now - agreement_data.startTime) / agreement_data.epochLength
929
- )
930
- epochs_left = agreement_data.epochsNumber - current_epoch
931
-
932
- unfinalized_state = self._get_latest_assertion_id(token_id)
933
- unfinalized_state_size = self._get_assertion_size(unfinalized_state)
934
-
935
- token_amount = int(
936
- self._get_bid_suggestion(
937
- blockchain_id,
938
- epochs_left,
939
- unfinalized_state_size,
940
- content_asset_storage_address,
941
- unfinalized_state,
942
- DEFAULT_HASH_FUNCTION_ID,
943
- token_amount or BidSuggestionRange.LOW,
944
- )["bidSuggestion"]
945
- ) - sum(agreement_data.tokensInfo)
946
-
947
- if token_amount <= 0:
948
- raise InvalidTokenAmount(
949
- "Token amount is bigger than default suggested amount, "
950
- "please specify exact token_amount if you still want to add "
951
- "more update tokens!"
952
- )
953
-
954
- receipt: TxReceipt = self._add_update_tokens(token_id, token_amount)
859
+ _get_block = Method(BlockchainRequest.get_block)
955
860
 
956
- return {
957
- "UAL": ual,
958
- "operation": json.loads(Web3.to_json(receipt)),
959
- }
861
+ _get_assertion_size = Method(BlockchainRequest.get_assertion_size)
960
862
 
961
863
  def get_owner(self, ual: UAL) -> Address:
962
864
  token_id = parse_ual(ual)["token_id"]
963
865
 
964
866
  return self._owner(token_id)
965
867
 
966
- _get_assertion_id_by_index = Method(BlockchainRequest.get_assertion_id_by_index)
868
+ def to_jsonld(self, nquads: str):
869
+ options = {
870
+ "algorithm": "URDNA2015",
871
+ "format": "application/n-quads",
872
+ }
967
873
 
968
- def get_agreement_id(self, contract_address: Address, token_id: int) -> HexStr:
969
- first_assertion_id = self._get_assertion_id_by_index(token_id, 0)
970
- keyword = generate_keyword(contract_address, first_assertion_id)
971
- return generate_agreement_id(contract_address, token_id, keyword)
874
+ return jsonld.from_rdf(nquads, options)
972
875
 
973
- _get_operation_result = Method(NodeRequest.get_operation_result)
876
+ def to_nquads(self, content, input_format):
877
+ options = {
878
+ "algorithm": "URDNA2015",
879
+ "format": "application/n-quads",
880
+ }
974
881
 
975
- @retry(catch=OperationNotFinished, max_retries=5, base_delay=1, backoff=2)
976
- def get_operation_result(
977
- self, operation_id: str, operation: str
978
- ) -> NodeResponseDict:
979
- operation_result = self._get_operation_result(
980
- operation_id=operation_id,
981
- operation=operation,
982
- )
882
+ if input_format:
883
+ options["inputFormat"] = input_format
884
+ try:
885
+ jsonld_data = jsonld.from_rdf(content, options)
886
+ canonized = jsonld.to_rdf(jsonld_data, options)
983
887
 
984
- validate_operation_status(operation_result)
888
+ if isinstance(canonized, str):
889
+ return [line for line in canonized.split("\n") if line.strip()]
985
890
 
986
- return operation_result
891
+ except Exception as e:
892
+ raise ValueError(f"Error processing content: {e}")