dkg 8.0.0a3__py3-none-any.whl → 8.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dkg/__init__.py +1 -1
- dkg/assertion.py +2 -2
- dkg/clients/__init__.py +4 -0
- dkg/clients/async_dkg.py +109 -0
- dkg/{main.py → clients/dkg.py} +42 -21
- dkg/constants.py +117 -6
- dkg/data/interfaces/AskStorage.json +366 -0
- dkg/data/interfaces/Chronos.json +202 -0
- dkg/data/interfaces/Hub.json +294 -2
- dkg/data/interfaces/IdentityStorage.json +58 -0
- dkg/data/interfaces/{ContentAsset.json → KnowledgeCollection.json} +256 -343
- dkg/data/interfaces/KnowledgeCollectionStorage.json +2312 -0
- dkg/data/interfaces/Paranet.json +30 -214
- dkg/data/interfaces/ParanetIncentivesPoolFactory.json +18 -2
- dkg/data/interfaces/ParanetKnowledgeMinersRegistry.json +20 -4
- dkg/data/interfaces/{ParanetNeurowebIncentivesPool.json → ParanetNeuroIncentivesPool.json} +7 -7
- dkg/data/interfaces/ParanetsRegistry.json +102 -32
- dkg/data/interfaces/Token.json +146 -17
- dkg/managers/__init__.py +0 -0
- dkg/managers/async_manager.py +69 -0
- dkg/{manager.py → managers/manager.py} +5 -3
- dkg/method.py +5 -2
- dkg/modules/__init__.py +0 -0
- dkg/modules/asset/__init__.py +0 -0
- dkg/modules/asset/asset.py +739 -0
- dkg/modules/asset/async_asset.py +753 -0
- dkg/modules/async_module.py +66 -0
- dkg/modules/graph/__init__.py +0 -0
- dkg/modules/graph/async_graph.py +112 -0
- dkg/modules/graph/graph.py +87 -0
- dkg/{module.py → modules/module.py} +1 -1
- dkg/modules/network/__init__.py +0 -0
- dkg/{network.py → modules/network/network.py} +4 -4
- dkg/modules/node/__init__.py +0 -0
- dkg/modules/node/async_node.py +39 -0
- dkg/{node.py → modules/node/node.py} +2 -2
- dkg/modules/paranet/__init__.py +0 -0
- dkg/{paranet.py → modules/paranet/paranet.py} +2 -6
- dkg/providers/__init__.py +9 -2
- dkg/providers/blockchain/__init__.py +4 -0
- dkg/providers/blockchain/async_blockchain.py +245 -0
- dkg/providers/blockchain/base_blockchain.py +102 -0
- dkg/providers/{blockchain.py → blockchain/blockchain.py} +15 -96
- dkg/providers/node/__init__.py +4 -0
- dkg/providers/node/async_node_http.py +72 -0
- dkg/providers/node/base_node_http.py +25 -0
- dkg/providers/{node_http.py → node/node_http.py} +12 -10
- dkg/services/__init__.py +0 -0
- dkg/services/blockchain_services/__init__.py +0 -0
- dkg/services/blockchain_services/async_blockchain_service.py +180 -0
- dkg/services/blockchain_services/blockchain_service.py +174 -0
- dkg/services/input_service.py +181 -0
- dkg/services/node_services/__init__.py +0 -0
- dkg/services/node_services/async_node_service.py +184 -0
- dkg/services/node_services/node_service.py +167 -0
- dkg/types/__init__.py +11 -11
- dkg/utils/blockchain_request.py +76 -50
- dkg/utils/knowledge_asset_tools.py +5 -0
- dkg/utils/knowledge_collection_tools.py +248 -0
- dkg/utils/node_request.py +60 -14
- dkg/utils/rdf.py +9 -3
- {dkg-8.0.0a3.dist-info → dkg-8.0.2.dist-info}/METADATA +28 -19
- dkg-8.0.2.dist-info/RECORD +82 -0
- {dkg-8.0.0a3.dist-info → dkg-8.0.2.dist-info}/WHEEL +1 -1
- dkg/asset.py +0 -912
- dkg/data/interfaces/AssertionStorage.json +0 -229
- dkg/data/interfaces/ContentAssetStorage.json +0 -706
- dkg/data/interfaces/ServiceAgreementStorageProxy.json +0 -1314
- dkg/graph.py +0 -63
- dkg-8.0.0a3.dist-info/RECORD +0 -52
- {dkg-8.0.0a3.dist-info → dkg-8.0.2.dist-info}/LICENSE +0 -0
- {dkg-8.0.0a3.dist-info → dkg-8.0.2.dist-info}/NOTICE +0 -0
dkg/types/__init__.py
CHANGED
@@ -1,17 +1,17 @@
|
|
1
1
|
from .general import (
|
2
|
-
AutoStrEnum,
|
3
|
-
AutoStrEnumCapitalize,
|
4
|
-
AutoStrEnumUpperCase,
|
5
|
-
)
|
2
|
+
AutoStrEnum, # NOQA: F401
|
3
|
+
AutoStrEnumCapitalize, # NOQA: F401
|
4
|
+
AutoStrEnumUpperCase, # NOQA: F401
|
5
|
+
)
|
6
6
|
from .blockchain import (
|
7
|
-
ABI,
|
8
|
-
ABIElement,
|
9
|
-
ABIError,
|
7
|
+
ABI, # NOQA: F401
|
8
|
+
ABIElement, # NOQA: F401
|
9
|
+
ABIError, # NOQA: F401
|
10
10
|
ABIEvent, # NOQA: F401
|
11
|
-
ABIFunction,
|
12
|
-
ABIParameter,
|
13
|
-
AgreementData,
|
14
|
-
Environment,
|
11
|
+
ABIFunction, # NOQA: F401
|
12
|
+
ABIParameter, # NOQA: F401
|
13
|
+
AgreementData, # NOQA: F401
|
14
|
+
Environment, # NOQA: F401
|
15
15
|
)
|
16
16
|
from .dkg_node import UAL # NOQA: F401
|
17
17
|
from .encoding import BytesLike, DataHexStr, HexStr # NOQA: F401
|
dkg/utils/blockchain_request.py
CHANGED
@@ -16,7 +16,7 @@
|
|
16
16
|
# under the License.
|
17
17
|
|
18
18
|
from dataclasses import dataclass, field
|
19
|
-
from typing import Type
|
19
|
+
from typing import Type, Dict, Any
|
20
20
|
|
21
21
|
from dkg.dataclasses import ParanetIncentivizationType
|
22
22
|
from dkg.types import Address, HexStr, Wei
|
@@ -52,6 +52,18 @@ class ContractCall(ContractInteraction):
|
|
52
52
|
pass
|
53
53
|
|
54
54
|
|
55
|
+
@dataclass
|
56
|
+
class KnowledgeCollectionResult:
|
57
|
+
knowledge_collection_id: int
|
58
|
+
receipt: Dict[str, Any]
|
59
|
+
|
60
|
+
|
61
|
+
@dataclass
|
62
|
+
class AllowanceResult:
|
63
|
+
allowance_increased: bool
|
64
|
+
allowance_gap: int
|
65
|
+
|
66
|
+
|
55
67
|
class BlockchainRequest:
|
56
68
|
chain_id = JSONRPCRequest("chain_id")
|
57
69
|
get_block = JSONRPCRequest("get_block", args={"block_identifier": str | int})
|
@@ -67,6 +79,30 @@ class BlockchainRequest:
|
|
67
79
|
args={"assetStorageName": str},
|
68
80
|
)
|
69
81
|
|
82
|
+
key_is_operational_wallet = ContractCall(
|
83
|
+
contract="IdentityStorage",
|
84
|
+
function="keyHasPurpose",
|
85
|
+
args={"identityId": int, "_key": Address, "_purpose": int},
|
86
|
+
)
|
87
|
+
|
88
|
+
time_until_next_epoch = ContractCall(
|
89
|
+
contract="Chronos",
|
90
|
+
function="timeUntilNextEpoch",
|
91
|
+
args={},
|
92
|
+
)
|
93
|
+
|
94
|
+
epoch_length = ContractCall(
|
95
|
+
contract="Chronos",
|
96
|
+
function="epochLength",
|
97
|
+
args={},
|
98
|
+
)
|
99
|
+
|
100
|
+
get_stake_weighted_average_ask = ContractCall(
|
101
|
+
contract="AskStorage",
|
102
|
+
function="getStakeWeightedAverageAsk",
|
103
|
+
args={},
|
104
|
+
)
|
105
|
+
|
70
106
|
allowance = ContractCall(
|
71
107
|
contract="Token",
|
72
108
|
function="allowance",
|
@@ -83,11 +119,6 @@ class BlockchainRequest:
|
|
83
119
|
args={"spender": Address, "subtractedValue": Wei},
|
84
120
|
)
|
85
121
|
|
86
|
-
create_asset = ContractTransaction(
|
87
|
-
contract="ContentAsset",
|
88
|
-
function="createAsset",
|
89
|
-
args={"args": dict[str, bytes | int | Wei | bool]},
|
90
|
-
)
|
91
122
|
burn_asset = ContractTransaction(
|
92
123
|
contract="ContentAsset",
|
93
124
|
function="burnAsset",
|
@@ -98,27 +129,12 @@ class BlockchainRequest:
|
|
98
129
|
function="extendAssetStoringPeriod",
|
99
130
|
args={"tokenId": int, "epochsNumber": int, "tokenAmount": int},
|
100
131
|
)
|
101
|
-
increase_asset_token_amount = ContractTransaction(
|
102
|
-
contract="ContentAsset",
|
103
|
-
function="increaseAssetTokenAmount",
|
104
|
-
args={"tokenId": int, "tokenAmount": int},
|
105
|
-
)
|
106
132
|
|
107
133
|
transfer_asset = ContractTransaction(
|
108
134
|
contract="ContentAssetStorage",
|
109
135
|
function="transferFrom",
|
110
136
|
args={"from": Address, "to": Address, "tokenId": int},
|
111
137
|
)
|
112
|
-
get_assertion_ids = ContractCall(
|
113
|
-
contract="ContentAssetStorage",
|
114
|
-
function="getAssertionIds",
|
115
|
-
args={"tokenId": int},
|
116
|
-
)
|
117
|
-
get_assertion_id_by_index = ContractCall(
|
118
|
-
contract="ContentAssetStorage",
|
119
|
-
function="getAssertionIdByIndex",
|
120
|
-
args={"tokenId": int, "index": int},
|
121
|
-
)
|
122
138
|
get_latest_assertion_id = ContractCall(
|
123
139
|
contract="ContentAssetStorage",
|
124
140
|
function="getLatestAssertionId",
|
@@ -130,18 +146,6 @@ class BlockchainRequest:
|
|
130
146
|
args={"tokenId": int},
|
131
147
|
)
|
132
148
|
|
133
|
-
get_unfinalized_state = ContractCall(
|
134
|
-
contract="UnfinalizedStateStorage",
|
135
|
-
function="getUnfinalizedState",
|
136
|
-
args={"tokenId": int},
|
137
|
-
)
|
138
|
-
|
139
|
-
get_service_agreement_data = ContractCall(
|
140
|
-
contract="ServiceAgreementStorageProxy",
|
141
|
-
function="getAgreementData",
|
142
|
-
args={"agreementId": bytes | HexStr},
|
143
|
-
)
|
144
|
-
|
145
149
|
get_assertion_size = ContractCall(
|
146
150
|
contract="AssertionStorage",
|
147
151
|
function="getAssertionSize",
|
@@ -299,23 +303,14 @@ class BlockchainRequest:
|
|
299
303
|
"paranetServiceAddresses": list[Address],
|
300
304
|
},
|
301
305
|
)
|
302
|
-
|
306
|
+
submit_knowledge_collection = ContractTransaction(
|
303
307
|
contract="Paranet",
|
304
|
-
function="
|
308
|
+
function="submitKnowledgeCollection",
|
305
309
|
args={
|
306
|
-
"
|
307
|
-
"
|
308
|
-
"
|
309
|
-
|
310
|
-
)
|
311
|
-
submit_knowledge_asset = ContractTransaction(
|
312
|
-
contract="Paranet",
|
313
|
-
function="submitKnowledgeAsset",
|
314
|
-
args={
|
315
|
-
"paranetKAStorageContract": Address,
|
316
|
-
"paranetKATokenId": int,
|
317
|
-
"knowledgeAssetStorageContract": Address,
|
318
|
-
"knowledgeAssetTokenId": int,
|
310
|
+
"paranetKCStorageContract": Address,
|
311
|
+
"paranetKnowledgeCollectionId": int,
|
312
|
+
"knowledgeCollectionStorageContract": Address,
|
313
|
+
"knowledgeCollectionTokenId": int,
|
319
314
|
},
|
320
315
|
)
|
321
316
|
|
@@ -339,9 +334,9 @@ class BlockchainRequest:
|
|
339
334
|
},
|
340
335
|
)
|
341
336
|
|
342
|
-
|
337
|
+
get_updating_knowledge_collection_states = ContractCall(
|
343
338
|
contract="ParanetKnowledgeMinersRegistry",
|
344
|
-
function="
|
339
|
+
function="getUpdatingKnowledgeCollectionStates",
|
345
340
|
args={
|
346
341
|
"miner": Address,
|
347
342
|
"paranetId": HexStr,
|
@@ -395,3 +390,34 @@ class BlockchainRequest:
|
|
395
390
|
function="claimIncentivizationProposalVoterReward",
|
396
391
|
args={},
|
397
392
|
)
|
393
|
+
|
394
|
+
create_knowledge_collection = ContractTransaction(
|
395
|
+
contract="KnowledgeCollection",
|
396
|
+
function="createKnowledgeCollection",
|
397
|
+
args={
|
398
|
+
"publishOperationId": str,
|
399
|
+
"merkleRoot": bytes,
|
400
|
+
"knowledgeAssetsAmount": int,
|
401
|
+
"byteSize": int,
|
402
|
+
"epochs": int,
|
403
|
+
"tokenAmount": int,
|
404
|
+
"isImmutable": bool,
|
405
|
+
"paymaster": Address,
|
406
|
+
"publisherNodeIdentityId": int,
|
407
|
+
"publisherNodeR": bytes,
|
408
|
+
"publisherNodeVS": bytes,
|
409
|
+
"identityIds": list[int],
|
410
|
+
"r": list[bytes],
|
411
|
+
"vs": list[bytes],
|
412
|
+
},
|
413
|
+
)
|
414
|
+
|
415
|
+
mint_knowledge_collection = ContractTransaction(
|
416
|
+
contract="Paranet",
|
417
|
+
function="mintKnowledgeCollection",
|
418
|
+
args={
|
419
|
+
"paranetKCStorageContract": Address,
|
420
|
+
"paranetKCTokenId": int,
|
421
|
+
"knowledgeAssetArgs": dict,
|
422
|
+
},
|
423
|
+
)
|
@@ -0,0 +1,248 @@
|
|
1
|
+
from typing import Literal
|
2
|
+
from dkg.constants import CHUNK_BYTE_SIZE
|
3
|
+
from dkg.exceptions import DatasetInputFormatNotSupported, InvalidDataset
|
4
|
+
from dkg.types import JSONLD, NQuads
|
5
|
+
from pyld import jsonld
|
6
|
+
from dkg.constants import DEFAULT_RDF_FORMAT, DEFAULT_CANON_ALGORITHM
|
7
|
+
from rdflib import Graph, BNode, URIRef, Literal as RDFLiteral
|
8
|
+
from uuid import uuid4
|
9
|
+
from web3 import Web3
|
10
|
+
import math
|
11
|
+
import hashlib
|
12
|
+
from eth_abi.packed import encode_packed
|
13
|
+
|
14
|
+
|
15
|
+
def normalize_dataset(
|
16
|
+
dataset: JSONLD | NQuads,
|
17
|
+
input_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
|
18
|
+
output_format=DEFAULT_RDF_FORMAT,
|
19
|
+
algorithm=DEFAULT_CANON_ALGORITHM,
|
20
|
+
) -> NQuads:
|
21
|
+
normalization_options = {
|
22
|
+
"algorithm": algorithm,
|
23
|
+
"format": output_format,
|
24
|
+
}
|
25
|
+
|
26
|
+
match input_format.lower():
|
27
|
+
case "json-ld" | "jsonld":
|
28
|
+
pass
|
29
|
+
case "n-quads" | "nquads":
|
30
|
+
normalization_options["inputFormat"] = "application/n-quads"
|
31
|
+
case _:
|
32
|
+
raise DatasetInputFormatNotSupported(
|
33
|
+
f"Dataset input format isn't supported: {input_format}. "
|
34
|
+
"Supported formats: JSON-LD / N-Quads."
|
35
|
+
)
|
36
|
+
|
37
|
+
n_quads = jsonld.normalize(dataset, normalization_options)
|
38
|
+
assertion = [quad for quad in n_quads.split("\n") if quad]
|
39
|
+
|
40
|
+
if not assertion:
|
41
|
+
raise InvalidDataset("Invalid dataset, no quads were extracted.")
|
42
|
+
|
43
|
+
return assertion
|
44
|
+
|
45
|
+
|
46
|
+
def is_empty_dict(dictionary: dict):
|
47
|
+
return len(dictionary.keys()) == 0 and isinstance(dictionary, dict)
|
48
|
+
|
49
|
+
|
50
|
+
def format_dataset(
|
51
|
+
content: dict,
|
52
|
+
input_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
|
53
|
+
output_format=DEFAULT_RDF_FORMAT,
|
54
|
+
algorithm=DEFAULT_CANON_ALGORITHM,
|
55
|
+
):
|
56
|
+
private_assertion = None
|
57
|
+
if content.get("private") and not is_empty_dict(content.get("private")):
|
58
|
+
private_assertion = normalize_dataset(
|
59
|
+
content.get("private"), input_format, output_format, algorithm
|
60
|
+
)
|
61
|
+
elif not content.get("public"):
|
62
|
+
content = {"public": content}
|
63
|
+
|
64
|
+
public_assertion = []
|
65
|
+
if content.get("public"):
|
66
|
+
public_assertion = normalize_dataset(
|
67
|
+
content.get("public"), input_format, output_format, algorithm
|
68
|
+
)
|
69
|
+
|
70
|
+
if (
|
71
|
+
public_assertion
|
72
|
+
and len(public_assertion) == 0
|
73
|
+
and private_assertion
|
74
|
+
and len(private_assertion) == 0
|
75
|
+
):
|
76
|
+
raise ValueError("File format is corrupted, no n-quads are extracted.")
|
77
|
+
|
78
|
+
dataset = {"public": public_assertion}
|
79
|
+
if private_assertion:
|
80
|
+
dataset["private"] = private_assertion
|
81
|
+
|
82
|
+
return dataset
|
83
|
+
|
84
|
+
|
85
|
+
def split_into_chunks(quads, chunk_size_bytes=32):
|
86
|
+
# Concatenate the quads with newline characters
|
87
|
+
concatenated_quads = "\n".join(quads)
|
88
|
+
|
89
|
+
# Encode the concatenated string to bytes
|
90
|
+
encoded_bytes = concatenated_quads.encode("utf-8")
|
91
|
+
|
92
|
+
# Split the encoded bytes into chunks
|
93
|
+
chunks = []
|
94
|
+
start = 0
|
95
|
+
|
96
|
+
while start < len(encoded_bytes):
|
97
|
+
end = min(start + chunk_size_bytes, len(encoded_bytes))
|
98
|
+
chunk = encoded_bytes[start:end]
|
99
|
+
chunks.append(chunk.decode("utf-8")) # Decode bytes back to string
|
100
|
+
start = end
|
101
|
+
|
102
|
+
return chunks
|
103
|
+
|
104
|
+
|
105
|
+
def calculate_merkle_root(quads: list[str], chunk_size_bytes: int = CHUNK_BYTE_SIZE):
|
106
|
+
chunks = split_into_chunks(quads, chunk_size_bytes)
|
107
|
+
|
108
|
+
# Create leaves using solidityKeccak256 equivalent
|
109
|
+
leaves = [
|
110
|
+
bytes.fromhex(Web3.solidity_keccak(["string", "uint256"], [chunk, index]).hex())
|
111
|
+
for index, chunk in enumerate(chunks)
|
112
|
+
]
|
113
|
+
|
114
|
+
while len(leaves) > 1:
|
115
|
+
next_level = []
|
116
|
+
|
117
|
+
for i in range(0, len(leaves), 2):
|
118
|
+
left = leaves[i]
|
119
|
+
|
120
|
+
if i + 1 >= len(leaves):
|
121
|
+
next_level.append(left)
|
122
|
+
break
|
123
|
+
|
124
|
+
right = leaves[i + 1]
|
125
|
+
|
126
|
+
# Combine and sort the leaves
|
127
|
+
combined = [left, right]
|
128
|
+
combined.sort()
|
129
|
+
|
130
|
+
# Calculate the hash of the combined leaves
|
131
|
+
hash_value = Web3.keccak(b"".join(combined))
|
132
|
+
next_level.append(hash_value)
|
133
|
+
|
134
|
+
leaves = next_level
|
135
|
+
|
136
|
+
return f"0x{leaves[0].hex()}"
|
137
|
+
|
138
|
+
|
139
|
+
def generate_missing_ids_for_blank_nodes(nquads_list: list[str] | None) -> list[str]:
|
140
|
+
if not nquads_list:
|
141
|
+
return [""]
|
142
|
+
|
143
|
+
generated_ids = {}
|
144
|
+
|
145
|
+
def replace_blank_node(term):
|
146
|
+
# Handle blank nodes
|
147
|
+
if isinstance(term, BNode):
|
148
|
+
if str(term) not in generated_ids:
|
149
|
+
generated_ids[str(term)] = URIRef(f"uuid:{str(uuid4())}")
|
150
|
+
return generated_ids[str(term)]
|
151
|
+
|
152
|
+
return term # Return IRIs or Literals unchanged
|
153
|
+
|
154
|
+
# Create a temporary graph for parsing individual quads
|
155
|
+
result = []
|
156
|
+
|
157
|
+
# Process each N-Quad string individually to maintain order
|
158
|
+
for nquad in nquads_list:
|
159
|
+
if not nquad.strip():
|
160
|
+
continue
|
161
|
+
|
162
|
+
# Parse single N-Quad
|
163
|
+
g = Graph()
|
164
|
+
g.parse(data=nquad, format="nquads")
|
165
|
+
|
166
|
+
# Get the triple and replace blank nodes
|
167
|
+
for s, p, o in g:
|
168
|
+
updated_quad = (
|
169
|
+
replace_blank_node(s),
|
170
|
+
replace_blank_node(p),
|
171
|
+
replace_blank_node(o),
|
172
|
+
)
|
173
|
+
# Format as N-Quad string
|
174
|
+
result.append(
|
175
|
+
f"{updated_quad[0].n3()} {updated_quad[1].n3()} {updated_quad[2].n3()} ."
|
176
|
+
)
|
177
|
+
|
178
|
+
return result
|
179
|
+
|
180
|
+
|
181
|
+
def group_nquads_by_subject(nquads_list: list[str], sort: bool = False):
|
182
|
+
grouped = {}
|
183
|
+
|
184
|
+
# Process each quad in original order
|
185
|
+
for nquad in nquads_list:
|
186
|
+
if not nquad.strip(): # Skip empty lines
|
187
|
+
continue
|
188
|
+
|
189
|
+
# Parse single quad
|
190
|
+
g = Graph()
|
191
|
+
g.parse(data=nquad, format="nquads")
|
192
|
+
quad = next(iter(g))
|
193
|
+
subject, predicate, obj = quad
|
194
|
+
|
195
|
+
# Get subject key
|
196
|
+
subject_key = (
|
197
|
+
f"<<<{subject.subject}> <{subject.predicate}> <{subject.object}>>"
|
198
|
+
if hasattr(subject, "subject")
|
199
|
+
else f"<{subject}>"
|
200
|
+
)
|
201
|
+
|
202
|
+
# Initialize group if needed
|
203
|
+
if subject_key not in grouped:
|
204
|
+
grouped[subject_key] = []
|
205
|
+
|
206
|
+
# Format object
|
207
|
+
object_value = f'"{obj}"' if isinstance(obj, RDFLiteral) else f"<{obj}>"
|
208
|
+
|
209
|
+
# Add quad to group
|
210
|
+
quad_string = f"{subject_key} <{predicate}> {object_value} ."
|
211
|
+
grouped[subject_key].append(quad_string)
|
212
|
+
|
213
|
+
# Return grouped quads (sorted if requested)
|
214
|
+
grouped_items = sorted(grouped.items()) if sort else grouped.items()
|
215
|
+
return [quads for _, quads in grouped_items]
|
216
|
+
|
217
|
+
|
218
|
+
def calculate_number_of_chunks(quads, chunk_size_bytes=CHUNK_BYTE_SIZE):
|
219
|
+
# Concatenate the quads with newline characters
|
220
|
+
concatenated_quads = "\n".join(quads)
|
221
|
+
|
222
|
+
total_size_bytes = len(concatenated_quads.encode("utf-8"))
|
223
|
+
|
224
|
+
# Calculate and return the number of chunks
|
225
|
+
return math.ceil(total_size_bytes / chunk_size_bytes)
|
226
|
+
|
227
|
+
|
228
|
+
def count_distinct_subjects(nquads_list: list[str]) -> int:
|
229
|
+
# Create a new RDF graph
|
230
|
+
graph = Graph()
|
231
|
+
|
232
|
+
# Parse the joined N-Quads
|
233
|
+
graph.parse(data="\n".join(nquads_list), format="nquads")
|
234
|
+
|
235
|
+
# Extract unique subjects using set comprehension
|
236
|
+
subjects = {str(quad[0]) for quad in graph}
|
237
|
+
|
238
|
+
return len(subjects)
|
239
|
+
|
240
|
+
|
241
|
+
def solidity_packed_sha256(types: list[str], values: list) -> str:
|
242
|
+
# Encode the values using eth_abi's encode_packed
|
243
|
+
packed_data = encode_packed(types, values)
|
244
|
+
|
245
|
+
# Calculate SHA256
|
246
|
+
sha256_hash = hashlib.sha256(packed_data).hexdigest()
|
247
|
+
|
248
|
+
return f"0x{sha256_hash}"
|
dkg/utils/node_request.py
CHANGED
@@ -17,11 +17,11 @@
|
|
17
17
|
|
18
18
|
from dataclasses import dataclass, field
|
19
19
|
from enum import auto, Enum
|
20
|
-
from typing import Any, Type
|
20
|
+
from typing import Any, Type, Dict
|
21
21
|
|
22
22
|
from dkg.dataclasses import BidSuggestionRange, HTTPRequestMethod
|
23
23
|
from dkg.exceptions import OperationFailed, OperationNotFinished
|
24
|
-
from dkg.types import AutoStrEnumUpperCase, UAL, Address, DataHexStr
|
24
|
+
from dkg.types import AutoStrEnumUpperCase, UAL, Address, DataHexStr
|
25
25
|
|
26
26
|
|
27
27
|
@dataclass
|
@@ -52,32 +52,55 @@ class NodeRequest:
|
|
52
52
|
path="{operation}/{operation_id}",
|
53
53
|
)
|
54
54
|
|
55
|
-
local_store = NodeCall(
|
56
|
-
method=HTTPRequestMethod.POST,
|
57
|
-
path="local-store",
|
58
|
-
data=list[dict[str, str | Address | NQuads]],
|
59
|
-
)
|
60
55
|
publish = NodeCall(
|
61
56
|
method=HTTPRequestMethod.POST,
|
62
57
|
path="publish",
|
63
58
|
data={
|
64
|
-
"
|
65
|
-
"
|
59
|
+
"datasetRoot": str,
|
60
|
+
"dataset": dict[str, list[str]],
|
66
61
|
"blockchain": str,
|
67
|
-
"contract": Address,
|
68
|
-
"tokenId": int,
|
69
62
|
"hashFunctionId": int,
|
63
|
+
"minimumNumberOfNodeReplications": int,
|
70
64
|
},
|
71
65
|
)
|
66
|
+
|
67
|
+
ask = NodeCall(
|
68
|
+
method=HTTPRequestMethod.POST,
|
69
|
+
path="ask",
|
70
|
+
params={
|
71
|
+
"ual": UAL,
|
72
|
+
"minimumNumberOfNodeReplications": int,
|
73
|
+
},
|
74
|
+
)
|
75
|
+
|
76
|
+
finality_status = NodeCall(
|
77
|
+
method=HTTPRequestMethod.GET,
|
78
|
+
path="finality",
|
79
|
+
params={"ual": UAL},
|
80
|
+
)
|
81
|
+
|
72
82
|
get = NodeCall(
|
73
83
|
method=HTTPRequestMethod.POST,
|
74
84
|
path="get",
|
75
|
-
data={
|
85
|
+
data={
|
86
|
+
"id": UAL,
|
87
|
+
"contentType": str,
|
88
|
+
"includeMetadata": bool,
|
89
|
+
"hashFunctionId": int,
|
90
|
+
"paranetUAL": UAL,
|
91
|
+
"subjectUAL": UAL,
|
92
|
+
},
|
76
93
|
)
|
94
|
+
|
77
95
|
query = NodeCall(
|
78
96
|
method=HTTPRequestMethod.POST,
|
79
|
-
path="query",
|
80
|
-
data={
|
97
|
+
path="direct-query",
|
98
|
+
data={
|
99
|
+
"query": str,
|
100
|
+
"type": str,
|
101
|
+
"repository": str | None,
|
102
|
+
"paranetUAL": str | None,
|
103
|
+
},
|
81
104
|
)
|
82
105
|
|
83
106
|
|
@@ -169,3 +192,26 @@ def validate_operation_status(operation_result: dict[str, Any]) -> None:
|
|
169
192
|
)
|
170
193
|
case _:
|
171
194
|
raise OperationNotFinished("Operation isn't finished")
|
195
|
+
|
196
|
+
|
197
|
+
def get_operation_status_object(
|
198
|
+
operation_result: Dict[str, Any], operation_id: str
|
199
|
+
) -> Dict[str, Any]:
|
200
|
+
"""
|
201
|
+
Creates an operation status object from operation result and ID.
|
202
|
+
|
203
|
+
Args:
|
204
|
+
operation_result: Dictionary containing operation result data
|
205
|
+
operation_id: The ID of the operation
|
206
|
+
|
207
|
+
Returns:
|
208
|
+
Dictionary containing operation status information
|
209
|
+
"""
|
210
|
+
# Check if error_type exists in operation_result.data
|
211
|
+
operation_data = (
|
212
|
+
{"status": operation_result.get("status"), **operation_result.get("data")}
|
213
|
+
if operation_result.get("data", {}).get("errorType")
|
214
|
+
else {"status": operation_result.get("status")}
|
215
|
+
)
|
216
|
+
|
217
|
+
return {"operationId": operation_id, **operation_data}
|
dkg/utils/rdf.py
CHANGED
@@ -17,7 +17,11 @@
|
|
17
17
|
|
18
18
|
from typing import Literal
|
19
19
|
|
20
|
-
from dkg.constants import
|
20
|
+
from dkg.constants import (
|
21
|
+
PRIVATE_ASSERTION_PREDICATE,
|
22
|
+
DEFAULT_RDF_FORMAT,
|
23
|
+
DEFAULT_CANON_ALGORITHM,
|
24
|
+
)
|
21
25
|
from dkg.exceptions import DatasetInputFormatNotSupported, InvalidDataset
|
22
26
|
from dkg.types import JSONLD, HexStr, NQuads
|
23
27
|
from dkg.utils.merkle import MerkleTree, hash_assertion_with_indexes
|
@@ -27,10 +31,12 @@ from pyld import jsonld
|
|
27
31
|
def normalize_dataset(
|
28
32
|
dataset: JSONLD | NQuads,
|
29
33
|
input_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
|
34
|
+
output_format=DEFAULT_RDF_FORMAT,
|
35
|
+
algorithm=DEFAULT_CANON_ALGORITHM,
|
30
36
|
) -> NQuads:
|
31
37
|
normalization_options = {
|
32
|
-
"algorithm":
|
33
|
-
"format":
|
38
|
+
"algorithm": algorithm,
|
39
|
+
"format": output_format,
|
34
40
|
}
|
35
41
|
|
36
42
|
match input_format.lower():
|