dkg 8.0.0a2__py3-none-any.whl → 8.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. dkg/__init__.py +1 -1
  2. dkg/assertion.py +2 -2
  3. dkg/clients/__init__.py +4 -0
  4. dkg/clients/async_dkg.py +109 -0
  5. dkg/{main.py → clients/dkg.py} +42 -21
  6. dkg/constants.py +117 -6
  7. dkg/data/interfaces/AskStorage.json +366 -0
  8. dkg/data/interfaces/Chronos.json +202 -0
  9. dkg/data/interfaces/Hub.json +294 -2
  10. dkg/data/interfaces/IdentityStorage.json +58 -0
  11. dkg/data/interfaces/{ContentAsset.json → KnowledgeCollection.json} +256 -343
  12. dkg/data/interfaces/KnowledgeCollectionStorage.json +2312 -0
  13. dkg/data/interfaces/Paranet.json +30 -214
  14. dkg/data/interfaces/ParanetIncentivesPoolFactory.json +18 -2
  15. dkg/data/interfaces/ParanetKnowledgeMinersRegistry.json +20 -4
  16. dkg/data/interfaces/{ParanetNeurowebIncentivesPool.json → ParanetNeuroIncentivesPool.json} +7 -7
  17. dkg/data/interfaces/ParanetsRegistry.json +102 -32
  18. dkg/data/interfaces/Token.json +146 -17
  19. dkg/managers/__init__.py +0 -0
  20. dkg/managers/async_manager.py +69 -0
  21. dkg/{manager.py → managers/manager.py} +5 -3
  22. dkg/method.py +5 -2
  23. dkg/modules/__init__.py +0 -0
  24. dkg/modules/asset/__init__.py +0 -0
  25. dkg/modules/asset/asset.py +739 -0
  26. dkg/modules/asset/async_asset.py +751 -0
  27. dkg/modules/async_module.py +66 -0
  28. dkg/modules/graph/__init__.py +0 -0
  29. dkg/modules/graph/async_graph.py +118 -0
  30. dkg/modules/graph/graph.py +94 -0
  31. dkg/{module.py → modules/module.py} +1 -1
  32. dkg/modules/network/__init__.py +0 -0
  33. dkg/{network.py → modules/network/network.py} +4 -4
  34. dkg/modules/node/__init__.py +0 -0
  35. dkg/modules/node/async_node.py +39 -0
  36. dkg/{node.py → modules/node/node.py} +2 -2
  37. dkg/modules/paranet/__init__.py +0 -0
  38. dkg/{paranet.py → modules/paranet/paranet.py} +2 -2
  39. dkg/providers/__init__.py +9 -2
  40. dkg/providers/blockchain/__init__.py +4 -0
  41. dkg/providers/blockchain/async_blockchain.py +245 -0
  42. dkg/providers/blockchain/base_blockchain.py +102 -0
  43. dkg/providers/{blockchain.py → blockchain/blockchain.py} +15 -96
  44. dkg/providers/node/__init__.py +4 -0
  45. dkg/providers/node/async_node_http.py +72 -0
  46. dkg/providers/node/base_node_http.py +25 -0
  47. dkg/providers/{node_http.py → node/node_http.py} +12 -10
  48. dkg/services/__init__.py +0 -0
  49. dkg/services/blockchain_services/__init__.py +0 -0
  50. dkg/services/blockchain_services/async_blockchain_service.py +180 -0
  51. dkg/services/blockchain_services/blockchain_service.py +174 -0
  52. dkg/services/input_service.py +183 -0
  53. dkg/services/node_services/__init__.py +0 -0
  54. dkg/services/node_services/async_node_service.py +184 -0
  55. dkg/services/node_services/node_service.py +167 -0
  56. dkg/types/__init__.py +11 -11
  57. dkg/utils/blockchain_request.py +68 -42
  58. dkg/utils/knowledge_asset_tools.py +5 -0
  59. dkg/utils/knowledge_collection_tools.py +248 -0
  60. dkg/utils/node_request.py +60 -13
  61. dkg/utils/rdf.py +9 -3
  62. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/METADATA +28 -19
  63. dkg-8.0.1.dist-info/RECORD +82 -0
  64. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/WHEEL +1 -1
  65. dkg/asset.py +0 -912
  66. dkg/data/interfaces/AssertionStorage.json +0 -229
  67. dkg/data/interfaces/ContentAssetStorage.json +0 -706
  68. dkg/data/interfaces/ServiceAgreementStorageProxy.json +0 -1314
  69. dkg/graph.py +0 -63
  70. dkg-8.0.0a2.dist-info/RECORD +0 -52
  71. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/LICENSE +0 -0
  72. {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/NOTICE +0 -0
@@ -0,0 +1,167 @@
1
+ from dkg.managers.manager import DefaultRequestManager
2
+ from dkg.method import Method
3
+ from dkg.modules.module import Module
4
+ import time
5
+ from dkg.utils.decorators import retry
6
+ from dkg.exceptions import (
7
+ OperationNotFinished,
8
+ )
9
+ from dkg.utils.node_request import (
10
+ NodeRequest,
11
+ validate_operation_status,
12
+ )
13
+
14
+
15
+ class NodeService(Module):
16
+ def __init__(self, manager: DefaultRequestManager):
17
+ self.manager = manager
18
+
19
+ _get_operation_result = Method(NodeRequest.get_operation_result)
20
+ _finality_status = Method(NodeRequest.finality_status)
21
+ _ask = Method(NodeRequest.ask)
22
+ _publish = Method(NodeRequest.publish)
23
+ _get = Method(NodeRequest.get)
24
+ _query = Method(NodeRequest.query)
25
+
26
+ def get_operation_result(
27
+ self, operation_id: str, operation: str, max_retries: int, frequency: int
28
+ ):
29
+ @retry(
30
+ catch=OperationNotFinished,
31
+ max_retries=max_retries,
32
+ base_delay=frequency,
33
+ backoff=1,
34
+ )
35
+ def retry_get_operation_result():
36
+ operation_result = self._get_operation_result(
37
+ operation_id=operation_id,
38
+ operation=operation,
39
+ )
40
+ validate_operation_status(operation_result)
41
+
42
+ return operation_result
43
+
44
+ return retry_get_operation_result()
45
+
46
+ def finality_status(
47
+ self,
48
+ ual: str,
49
+ required_confirmations: int,
50
+ max_number_of_retries: int,
51
+ frequency: int,
52
+ ):
53
+ retries = 0
54
+ finality = 0
55
+
56
+ while finality < required_confirmations and retries <= max_number_of_retries:
57
+ if retries > max_number_of_retries:
58
+ raise Exception(
59
+ f"Unable to achieve required confirmations. "
60
+ f"Max number of retries ({max_number_of_retries}) reached."
61
+ )
62
+
63
+ if retries > 0:
64
+ time.sleep(frequency)
65
+
66
+ retries += 1
67
+
68
+ try:
69
+ try:
70
+ response = self._finality_status(ual=ual)
71
+ except Exception as e:
72
+ response = None
73
+ print(f"failed: {e}")
74
+
75
+ if response is not None:
76
+ finality = response.get("finality", 0)
77
+ if finality >= required_confirmations:
78
+ break
79
+
80
+ except Exception:
81
+ finality = 0
82
+
83
+ return finality
84
+
85
+ def ask(self, ual, required_confirmations, max_number_of_retries, frequency):
86
+ confirmations_count = 0
87
+ retries = 0
88
+
89
+ while (
90
+ confirmations_count < required_confirmations
91
+ and retries < max_number_of_retries
92
+ ):
93
+ if retries > max_number_of_retries:
94
+ raise Exception(
95
+ f"Unable to achieve required confirmations. "
96
+ f"Max number of retries ({max_number_of_retries}) reached."
97
+ )
98
+
99
+ if retries > 0:
100
+ time.sleep(frequency)
101
+
102
+ retries += 1
103
+
104
+ try:
105
+ try:
106
+ response = self._ask(
107
+ ual=ual, minimumNumberOfNodeReplications=required_confirmations
108
+ )
109
+ except Exception as e:
110
+ response = None
111
+ print(f"failed: {e}")
112
+
113
+ if response is not None:
114
+ number_of_confirmations = response.json().get(
115
+ "numberOfConfirmations", 0
116
+ )
117
+ if number_of_confirmations >= required_confirmations:
118
+ confirmations_count = number_of_confirmations
119
+
120
+ except Exception as e:
121
+ confirmations_count = 0
122
+ print(f"Retry {retries + 1}/{max_number_of_retries} failed: {e}")
123
+
124
+ return confirmations_count
125
+
126
+ def publish(
127
+ self,
128
+ dataset_root,
129
+ dataset,
130
+ blockchain_id,
131
+ hash_function_id,
132
+ minimum_number_of_node_replications,
133
+ ):
134
+ return self._publish(
135
+ dataset_root,
136
+ dataset,
137
+ blockchain_id,
138
+ hash_function_id,
139
+ minimum_number_of_node_replications,
140
+ )
141
+
142
+ def get(
143
+ self,
144
+ ual_with_state,
145
+ content_type,
146
+ include_metadata,
147
+ hash_function_id,
148
+ paranet_ual,
149
+ subject_ual,
150
+ ):
151
+ return self._get(
152
+ ual_with_state,
153
+ content_type,
154
+ include_metadata,
155
+ hash_function_id,
156
+ paranet_ual,
157
+ subject_ual,
158
+ )
159
+
160
+ def query(
161
+ self,
162
+ query,
163
+ query_type,
164
+ repository,
165
+ paranet_ual,
166
+ ):
167
+ return self._query(query, query_type, repository, paranet_ual)
dkg/types/__init__.py CHANGED
@@ -1,17 +1,17 @@
1
1
  from .general import (
2
- AutoStrEnum,
3
- AutoStrEnumCapitalize,
4
- AutoStrEnumUpperCase,
5
- ) # NOQA: F401
2
+ AutoStrEnum, # NOQA: F401
3
+ AutoStrEnumCapitalize, # NOQA: F401
4
+ AutoStrEnumUpperCase, # NOQA: F401
5
+ )
6
6
  from .blockchain import (
7
- ABI,
8
- ABIElement,
9
- ABIError,
7
+ ABI, # NOQA: F401
8
+ ABIElement, # NOQA: F401
9
+ ABIError, # NOQA: F401
10
10
  ABIEvent, # NOQA: F401
11
- ABIFunction,
12
- ABIParameter,
13
- AgreementData,
14
- Environment,
11
+ ABIFunction, # NOQA: F401
12
+ ABIParameter, # NOQA: F401
13
+ AgreementData, # NOQA: F401
14
+ Environment, # NOQA: F401
15
15
  )
16
16
  from .dkg_node import UAL # NOQA: F401
17
17
  from .encoding import BytesLike, DataHexStr, HexStr # NOQA: F401
@@ -16,7 +16,7 @@
16
16
  # under the License.
17
17
 
18
18
  from dataclasses import dataclass, field
19
- from typing import Type
19
+ from typing import Type, Dict, Any
20
20
 
21
21
  from dkg.dataclasses import ParanetIncentivizationType
22
22
  from dkg.types import Address, HexStr, Wei
@@ -52,6 +52,18 @@ class ContractCall(ContractInteraction):
52
52
  pass
53
53
 
54
54
 
55
+ @dataclass
56
+ class KnowledgeCollectionResult:
57
+ knowledge_collection_id: int
58
+ receipt: Dict[str, Any]
59
+
60
+
61
+ @dataclass
62
+ class AllowanceResult:
63
+ allowance_increased: bool
64
+ allowance_gap: int
65
+
66
+
55
67
  class BlockchainRequest:
56
68
  chain_id = JSONRPCRequest("chain_id")
57
69
  get_block = JSONRPCRequest("get_block", args={"block_identifier": str | int})
@@ -67,6 +79,30 @@ class BlockchainRequest:
67
79
  args={"assetStorageName": str},
68
80
  )
69
81
 
82
+ key_is_operational_wallet = ContractCall(
83
+ contract="IdentityStorage",
84
+ function="keyHasPurpose",
85
+ args={"identityId": int, "_key": Address, "_purpose": int},
86
+ )
87
+
88
+ time_until_next_epoch = ContractCall(
89
+ contract="Chronos",
90
+ function="timeUntilNextEpoch",
91
+ args={},
92
+ )
93
+
94
+ epoch_length = ContractCall(
95
+ contract="Chronos",
96
+ function="epochLength",
97
+ args={},
98
+ )
99
+
100
+ get_stake_weighted_average_ask = ContractCall(
101
+ contract="AskStorage",
102
+ function="getStakeWeightedAverageAsk",
103
+ args={},
104
+ )
105
+
70
106
  allowance = ContractCall(
71
107
  contract="Token",
72
108
  function="allowance",
@@ -83,11 +119,6 @@ class BlockchainRequest:
83
119
  args={"spender": Address, "subtractedValue": Wei},
84
120
  )
85
121
 
86
- create_asset = ContractTransaction(
87
- contract="ContentAsset",
88
- function="createAsset",
89
- args={"args": dict[str, bytes | int | Wei | bool]},
90
- )
91
122
  burn_asset = ContractTransaction(
92
123
  contract="ContentAsset",
93
124
  function="burnAsset",
@@ -98,27 +129,12 @@ class BlockchainRequest:
98
129
  function="extendAssetStoringPeriod",
99
130
  args={"tokenId": int, "epochsNumber": int, "tokenAmount": int},
100
131
  )
101
- increase_asset_token_amount = ContractTransaction(
102
- contract="ContentAsset",
103
- function="increaseAssetTokenAmount",
104
- args={"tokenId": int, "tokenAmount": int},
105
- )
106
132
 
107
133
  transfer_asset = ContractTransaction(
108
134
  contract="ContentAssetStorage",
109
135
  function="transferFrom",
110
136
  args={"from": Address, "to": Address, "tokenId": int},
111
137
  )
112
- get_assertion_ids = ContractCall(
113
- contract="ContentAssetStorage",
114
- function="getAssertionIds",
115
- args={"tokenId": int},
116
- )
117
- get_assertion_id_by_index = ContractCall(
118
- contract="ContentAssetStorage",
119
- function="getAssertionIdByIndex",
120
- args={"tokenId": int, "index": int},
121
- )
122
138
  get_latest_assertion_id = ContractCall(
123
139
  contract="ContentAssetStorage",
124
140
  function="getLatestAssertionId",
@@ -130,18 +146,6 @@ class BlockchainRequest:
130
146
  args={"tokenId": int},
131
147
  )
132
148
 
133
- get_unfinalized_state = ContractCall(
134
- contract="UnfinalizedStateStorage",
135
- function="getUnfinalizedState",
136
- args={"tokenId": int},
137
- )
138
-
139
- get_service_agreement_data = ContractCall(
140
- contract="ServiceAgreementStorageProxy",
141
- function="getAgreementData",
142
- args={"agreementId": bytes | HexStr},
143
- )
144
-
145
149
  get_assertion_size = ContractCall(
146
150
  contract="AssertionStorage",
147
151
  function="getAssertionSize",
@@ -299,15 +303,6 @@ class BlockchainRequest:
299
303
  "paranetServiceAddresses": list[Address],
300
304
  },
301
305
  )
302
- mint_knowledge_asset = ContractTransaction(
303
- contract="Paranet",
304
- function="mintKnowledgeAsset",
305
- args={
306
- "paranetKAStorageContract": Address,
307
- "paranetKATokenId": int,
308
- "knowledgeAssetArgs": dict[str, bytes | int | Wei | bool],
309
- },
310
- )
311
306
  submit_knowledge_asset = ContractTransaction(
312
307
  contract="Paranet",
313
308
  function="submitKnowledgeAsset",
@@ -395,3 +390,34 @@ class BlockchainRequest:
395
390
  function="claimIncentivizationProposalVoterReward",
396
391
  args={},
397
392
  )
393
+
394
+ create_knowledge_collection = ContractTransaction(
395
+ contract="KnowledgeCollection",
396
+ function="createKnowledgeCollection",
397
+ args={
398
+ "publishOperationId": str,
399
+ "merkleRoot": bytes,
400
+ "knowledgeAssetsAmount": int,
401
+ "byteSize": int,
402
+ "epochs": int,
403
+ "tokenAmount": int,
404
+ "isImmutable": bool,
405
+ "paymaster": Address,
406
+ "publisherNodeIdentityId": int,
407
+ "publisherNodeR": bytes,
408
+ "publisherNodeVS": bytes,
409
+ "identityIds": list[int],
410
+ "r": list[bytes],
411
+ "vs": list[bytes],
412
+ },
413
+ )
414
+
415
+ mint_knowledge_asset = ContractTransaction(
416
+ contract="Paranet",
417
+ function="mintKnowledgeAsset",
418
+ args={
419
+ "paranetKAStorageContract": Address,
420
+ "paranetKATokenId": int,
421
+ "knowledgeAssetArgs": dict,
422
+ },
423
+ )
@@ -0,0 +1,5 @@
1
+ from uuid import uuid4
2
+
3
+
4
+ def generate_named_node():
5
+ return f"uuid:{uuid4()}"
@@ -0,0 +1,248 @@
1
+ from typing import Literal
2
+ from dkg.constants import CHUNK_BYTE_SIZE
3
+ from dkg.exceptions import DatasetInputFormatNotSupported, InvalidDataset
4
+ from dkg.types import JSONLD, NQuads
5
+ from pyld import jsonld
6
+ from dkg.constants import DEFAULT_RDF_FORMAT, DEFAULT_CANON_ALGORITHM
7
+ from rdflib import Graph, BNode, URIRef, Literal as RDFLiteral
8
+ from uuid import uuid4
9
+ from web3 import Web3
10
+ import math
11
+ import hashlib
12
+ from eth_abi.packed import encode_packed
13
+
14
+
15
+ def normalize_dataset(
16
+ dataset: JSONLD | NQuads,
17
+ input_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
18
+ output_format=DEFAULT_RDF_FORMAT,
19
+ algorithm=DEFAULT_CANON_ALGORITHM,
20
+ ) -> NQuads:
21
+ normalization_options = {
22
+ "algorithm": algorithm,
23
+ "format": output_format,
24
+ }
25
+
26
+ match input_format.lower():
27
+ case "json-ld" | "jsonld":
28
+ pass
29
+ case "n-quads" | "nquads":
30
+ normalization_options["inputFormat"] = "application/n-quads"
31
+ case _:
32
+ raise DatasetInputFormatNotSupported(
33
+ f"Dataset input format isn't supported: {input_format}. "
34
+ "Supported formats: JSON-LD / N-Quads."
35
+ )
36
+
37
+ n_quads = jsonld.normalize(dataset, normalization_options)
38
+ assertion = [quad for quad in n_quads.split("\n") if quad]
39
+
40
+ if not assertion:
41
+ raise InvalidDataset("Invalid dataset, no quads were extracted.")
42
+
43
+ return assertion
44
+
45
+
46
+ def is_empty_dict(dictionary: dict):
47
+ return len(dictionary.keys()) == 0 and isinstance(dictionary, dict)
48
+
49
+
50
+ def format_dataset(
51
+ content: dict,
52
+ input_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
53
+ output_format=DEFAULT_RDF_FORMAT,
54
+ algorithm=DEFAULT_CANON_ALGORITHM,
55
+ ):
56
+ private_assertion = None
57
+ if content.get("private") and not is_empty_dict(content.get("private")):
58
+ private_assertion = normalize_dataset(
59
+ content.get("private"), input_format, output_format, algorithm
60
+ )
61
+ elif not content.get("public"):
62
+ content = {"public": content}
63
+
64
+ public_assertion = []
65
+ if content.get("public"):
66
+ public_assertion = normalize_dataset(
67
+ content.get("public"), input_format, output_format, algorithm
68
+ )
69
+
70
+ if (
71
+ public_assertion
72
+ and len(public_assertion) == 0
73
+ and private_assertion
74
+ and len(private_assertion) == 0
75
+ ):
76
+ raise ValueError("File format is corrupted, no n-quads are extracted.")
77
+
78
+ dataset = {"public": public_assertion}
79
+ if private_assertion:
80
+ dataset["private"] = private_assertion
81
+
82
+ return dataset
83
+
84
+
85
+ def split_into_chunks(quads, chunk_size_bytes=32):
86
+ # Concatenate the quads with newline characters
87
+ concatenated_quads = "\n".join(quads)
88
+
89
+ # Encode the concatenated string to bytes
90
+ encoded_bytes = concatenated_quads.encode("utf-8")
91
+
92
+ # Split the encoded bytes into chunks
93
+ chunks = []
94
+ start = 0
95
+
96
+ while start < len(encoded_bytes):
97
+ end = min(start + chunk_size_bytes, len(encoded_bytes))
98
+ chunk = encoded_bytes[start:end]
99
+ chunks.append(chunk.decode("utf-8")) # Decode bytes back to string
100
+ start = end
101
+
102
+ return chunks
103
+
104
+
105
+ def calculate_merkle_root(quads: list[str], chunk_size_bytes: int = CHUNK_BYTE_SIZE):
106
+ chunks = split_into_chunks(quads, chunk_size_bytes)
107
+
108
+ # Create leaves using solidityKeccak256 equivalent
109
+ leaves = [
110
+ bytes.fromhex(Web3.solidity_keccak(["string", "uint256"], [chunk, index]).hex())
111
+ for index, chunk in enumerate(chunks)
112
+ ]
113
+
114
+ while len(leaves) > 1:
115
+ next_level = []
116
+
117
+ for i in range(0, len(leaves), 2):
118
+ left = leaves[i]
119
+
120
+ if i + 1 >= len(leaves):
121
+ next_level.append(left)
122
+ break
123
+
124
+ right = leaves[i + 1]
125
+
126
+ # Combine and sort the leaves
127
+ combined = [left, right]
128
+ combined.sort()
129
+
130
+ # Calculate the hash of the combined leaves
131
+ hash_value = Web3.keccak(b"".join(combined))
132
+ next_level.append(hash_value)
133
+
134
+ leaves = next_level
135
+
136
+ return f"0x{leaves[0].hex()}"
137
+
138
+
139
+ def generate_missing_ids_for_blank_nodes(nquads_list: list[str] | None) -> list[str]:
140
+ if not nquads_list:
141
+ return [""]
142
+
143
+ generated_ids = {}
144
+
145
+ def replace_blank_node(term):
146
+ # Handle blank nodes
147
+ if isinstance(term, BNode):
148
+ if str(term) not in generated_ids:
149
+ generated_ids[str(term)] = URIRef(f"uuid:{str(uuid4())}")
150
+ return generated_ids[str(term)]
151
+
152
+ return term # Return IRIs or Literals unchanged
153
+
154
+ # Create a temporary graph for parsing individual quads
155
+ result = []
156
+
157
+ # Process each N-Quad string individually to maintain order
158
+ for nquad in nquads_list:
159
+ if not nquad.strip():
160
+ continue
161
+
162
+ # Parse single N-Quad
163
+ g = Graph()
164
+ g.parse(data=nquad, format="nquads")
165
+
166
+ # Get the triple and replace blank nodes
167
+ for s, p, o in g:
168
+ updated_quad = (
169
+ replace_blank_node(s),
170
+ replace_blank_node(p),
171
+ replace_blank_node(o),
172
+ )
173
+ # Format as N-Quad string
174
+ result.append(
175
+ f"{updated_quad[0].n3()} {updated_quad[1].n3()} {updated_quad[2].n3()} ."
176
+ )
177
+
178
+ return result
179
+
180
+
181
+ def group_nquads_by_subject(nquads_list: list[str], sort: bool = False):
182
+ grouped = {}
183
+
184
+ # Process each quad in original order
185
+ for nquad in nquads_list:
186
+ if not nquad.strip(): # Skip empty lines
187
+ continue
188
+
189
+ # Parse single quad
190
+ g = Graph()
191
+ g.parse(data=nquad, format="nquads")
192
+ quad = next(iter(g))
193
+ subject, predicate, obj = quad
194
+
195
+ # Get subject key
196
+ subject_key = (
197
+ f"<<<{subject.subject}> <{subject.predicate}> <{subject.object}>>"
198
+ if hasattr(subject, "subject")
199
+ else f"<{subject}>"
200
+ )
201
+
202
+ # Initialize group if needed
203
+ if subject_key not in grouped:
204
+ grouped[subject_key] = []
205
+
206
+ # Format object
207
+ object_value = f'"{obj}"' if isinstance(obj, RDFLiteral) else f"<{obj}>"
208
+
209
+ # Add quad to group
210
+ quad_string = f"{subject_key} <{predicate}> {object_value} ."
211
+ grouped[subject_key].append(quad_string)
212
+
213
+ # Return grouped quads (sorted if requested)
214
+ grouped_items = sorted(grouped.items()) if sort else grouped.items()
215
+ return [quads for _, quads in grouped_items]
216
+
217
+
218
+ def calculate_number_of_chunks(quads, chunk_size_bytes=CHUNK_BYTE_SIZE):
219
+ # Concatenate the quads with newline characters
220
+ concatenated_quads = "\n".join(quads)
221
+
222
+ total_size_bytes = len(concatenated_quads.encode("utf-8"))
223
+
224
+ # Calculate and return the number of chunks
225
+ return math.ceil(total_size_bytes / chunk_size_bytes)
226
+
227
+
228
+ def count_distinct_subjects(nquads_list: list[str]) -> int:
229
+ # Create a new RDF graph
230
+ graph = Graph()
231
+
232
+ # Parse the joined N-Quads
233
+ graph.parse(data="\n".join(nquads_list), format="nquads")
234
+
235
+ # Extract unique subjects using set comprehension
236
+ subjects = {str(quad[0]) for quad in graph}
237
+
238
+ return len(subjects)
239
+
240
+
241
+ def solidity_packed_sha256(types: list[str], values: list) -> str:
242
+ # Encode the values using eth_abi's encode_packed
243
+ packed_data = encode_packed(types, values)
244
+
245
+ # Calculate SHA256
246
+ sha256_hash = hashlib.sha256(packed_data).hexdigest()
247
+
248
+ return f"0x{sha256_hash}"