dkg 8.0.0a2__py3-none-any.whl → 8.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dkg/__init__.py +1 -1
- dkg/assertion.py +2 -2
- dkg/clients/__init__.py +4 -0
- dkg/clients/async_dkg.py +109 -0
- dkg/{main.py → clients/dkg.py} +42 -21
- dkg/constants.py +117 -6
- dkg/data/interfaces/AskStorage.json +366 -0
- dkg/data/interfaces/Chronos.json +202 -0
- dkg/data/interfaces/Hub.json +294 -2
- dkg/data/interfaces/IdentityStorage.json +58 -0
- dkg/data/interfaces/{ContentAsset.json → KnowledgeCollection.json} +256 -343
- dkg/data/interfaces/KnowledgeCollectionStorage.json +2312 -0
- dkg/data/interfaces/Paranet.json +30 -214
- dkg/data/interfaces/ParanetIncentivesPoolFactory.json +18 -2
- dkg/data/interfaces/ParanetKnowledgeMinersRegistry.json +20 -4
- dkg/data/interfaces/{ParanetNeurowebIncentivesPool.json → ParanetNeuroIncentivesPool.json} +7 -7
- dkg/data/interfaces/ParanetsRegistry.json +102 -32
- dkg/data/interfaces/Token.json +146 -17
- dkg/managers/__init__.py +0 -0
- dkg/managers/async_manager.py +69 -0
- dkg/{manager.py → managers/manager.py} +5 -3
- dkg/method.py +5 -2
- dkg/modules/__init__.py +0 -0
- dkg/modules/asset/__init__.py +0 -0
- dkg/modules/asset/asset.py +739 -0
- dkg/modules/asset/async_asset.py +751 -0
- dkg/modules/async_module.py +66 -0
- dkg/modules/graph/__init__.py +0 -0
- dkg/modules/graph/async_graph.py +118 -0
- dkg/modules/graph/graph.py +94 -0
- dkg/{module.py → modules/module.py} +1 -1
- dkg/modules/network/__init__.py +0 -0
- dkg/{network.py → modules/network/network.py} +4 -4
- dkg/modules/node/__init__.py +0 -0
- dkg/modules/node/async_node.py +39 -0
- dkg/{node.py → modules/node/node.py} +2 -2
- dkg/modules/paranet/__init__.py +0 -0
- dkg/{paranet.py → modules/paranet/paranet.py} +2 -2
- dkg/providers/__init__.py +9 -2
- dkg/providers/blockchain/__init__.py +4 -0
- dkg/providers/blockchain/async_blockchain.py +245 -0
- dkg/providers/blockchain/base_blockchain.py +102 -0
- dkg/providers/{blockchain.py → blockchain/blockchain.py} +15 -96
- dkg/providers/node/__init__.py +4 -0
- dkg/providers/node/async_node_http.py +72 -0
- dkg/providers/node/base_node_http.py +25 -0
- dkg/providers/{node_http.py → node/node_http.py} +12 -10
- dkg/services/__init__.py +0 -0
- dkg/services/blockchain_services/__init__.py +0 -0
- dkg/services/blockchain_services/async_blockchain_service.py +180 -0
- dkg/services/blockchain_services/blockchain_service.py +174 -0
- dkg/services/input_service.py +183 -0
- dkg/services/node_services/__init__.py +0 -0
- dkg/services/node_services/async_node_service.py +184 -0
- dkg/services/node_services/node_service.py +167 -0
- dkg/types/__init__.py +11 -11
- dkg/utils/blockchain_request.py +68 -42
- dkg/utils/knowledge_asset_tools.py +5 -0
- dkg/utils/knowledge_collection_tools.py +248 -0
- dkg/utils/node_request.py +60 -13
- dkg/utils/rdf.py +9 -3
- {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/METADATA +28 -19
- dkg-8.0.1.dist-info/RECORD +82 -0
- {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/WHEEL +1 -1
- dkg/asset.py +0 -912
- dkg/data/interfaces/AssertionStorage.json +0 -229
- dkg/data/interfaces/ContentAssetStorage.json +0 -706
- dkg/data/interfaces/ServiceAgreementStorageProxy.json +0 -1314
- dkg/graph.py +0 -63
- dkg-8.0.0a2.dist-info/RECORD +0 -52
- {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/LICENSE +0 -0
- {dkg-8.0.0a2.dist-info → dkg-8.0.1.dist-info}/NOTICE +0 -0
@@ -0,0 +1,751 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
import json
|
19
|
+
import asyncio
|
20
|
+
from typing import Literal
|
21
|
+
from pyld import jsonld
|
22
|
+
from web3 import Web3
|
23
|
+
from web3.constants import ADDRESS_ZERO
|
24
|
+
from web3.types import TxReceipt
|
25
|
+
from itertools import chain
|
26
|
+
from eth_account.messages import encode_defunct
|
27
|
+
from eth_account import Account
|
28
|
+
from hexbytes import HexBytes
|
29
|
+
|
30
|
+
from dkg.constants import (
|
31
|
+
PRIVATE_ASSERTION_PREDICATE,
|
32
|
+
PRIVATE_RESOURCE_PREDICATE,
|
33
|
+
PRIVATE_HASH_SUBJECT_PREFIX,
|
34
|
+
CHUNK_BYTE_SIZE,
|
35
|
+
MAX_FILE_SIZE,
|
36
|
+
DEFAULT_RDF_FORMAT,
|
37
|
+
Operations,
|
38
|
+
OutputTypes,
|
39
|
+
)
|
40
|
+
from dkg.managers.async_manager import AsyncRequestManager
|
41
|
+
from dkg.method import Method
|
42
|
+
from dkg.modules.async_module import AsyncModule
|
43
|
+
from dkg.types import JSONLD, UAL, Address, HexStr
|
44
|
+
from dkg.utils.blockchain_request import (
|
45
|
+
BlockchainRequest,
|
46
|
+
)
|
47
|
+
from dkg.utils.node_request import (
|
48
|
+
OperationStatus,
|
49
|
+
)
|
50
|
+
from dkg.utils.ual import format_ual, parse_ual
|
51
|
+
import dkg.utils.knowledge_collection_tools as kc_tools
|
52
|
+
import dkg.utils.knowledge_asset_tools as ka_tools
|
53
|
+
from dkg.services.input_service import InputService
|
54
|
+
from dkg.services.node_services.async_node_service import AsyncNodeService
|
55
|
+
from dkg.services.blockchain_services.async_blockchain_service import (
|
56
|
+
AsyncBlockchainService,
|
57
|
+
)
|
58
|
+
from dkg.utils.node_request import get_operation_status_object
|
59
|
+
|
60
|
+
|
61
|
+
class AsyncKnowledgeAsset(AsyncModule):
|
62
|
+
def __init__(
|
63
|
+
self,
|
64
|
+
manager: AsyncRequestManager,
|
65
|
+
input_service: InputService,
|
66
|
+
node_service: AsyncNodeService,
|
67
|
+
blockchain_service: AsyncBlockchainService,
|
68
|
+
):
|
69
|
+
self.manager = manager
|
70
|
+
self.input_service = input_service
|
71
|
+
self.node_service = node_service
|
72
|
+
self.blockchain_service = blockchain_service
|
73
|
+
|
74
|
+
async def is_valid_ual(self, ual: UAL) -> bool:
|
75
|
+
if not ual or not isinstance(ual, str):
|
76
|
+
raise ValueError("UAL must be a non-empty string.")
|
77
|
+
|
78
|
+
parts = ual.split("/")
|
79
|
+
if len(parts) != 3:
|
80
|
+
raise ValueError("UAL format is incorrect.")
|
81
|
+
|
82
|
+
prefixes = parts[0].split(":")
|
83
|
+
prefixes_number = len(prefixes)
|
84
|
+
if prefixes_number != 3 and prefixes_number != 4:
|
85
|
+
raise ValueError("Prefix format in UAL is incorrect.")
|
86
|
+
|
87
|
+
if prefixes[0] != "did":
|
88
|
+
raise ValueError(
|
89
|
+
f"Invalid DID prefix. Expected: 'did'. Received: '{prefixes[0]}'."
|
90
|
+
)
|
91
|
+
|
92
|
+
if prefixes[1] != "dkg":
|
93
|
+
raise ValueError(
|
94
|
+
f"Invalid DKG prefix. Expected: 'dkg'. Received: '{prefixes[1]}'."
|
95
|
+
)
|
96
|
+
|
97
|
+
if prefixes[2] != (
|
98
|
+
blockchain_name := (
|
99
|
+
self.manager.blockchain_provider.blockchain_id.split(":")[0]
|
100
|
+
)
|
101
|
+
):
|
102
|
+
raise ValueError(
|
103
|
+
"Invalid blockchain name in the UAL prefix. "
|
104
|
+
f"Expected: '{blockchain_name}'. Received: '${prefixes[2]}'."
|
105
|
+
)
|
106
|
+
|
107
|
+
if prefixes_number == 4:
|
108
|
+
chain_id = self.manager.blockchain_provider.blockchain_id.split(":")[1]
|
109
|
+
|
110
|
+
if int(prefixes[3]) != int(chain_id):
|
111
|
+
raise ValueError(
|
112
|
+
"Chain ID in UAL does not match the blockchain. "
|
113
|
+
f"Expected: '${chain_id}'. Received: '${prefixes[3]}'."
|
114
|
+
)
|
115
|
+
|
116
|
+
contract_address = self.manager.blockchain_provider.contracts[
|
117
|
+
"ContentAssetStorage"
|
118
|
+
].address
|
119
|
+
|
120
|
+
if parts[1].lower() != contract_address.lower():
|
121
|
+
raise ValueError(
|
122
|
+
"Contract address in UAL does not match. "
|
123
|
+
f"Expected: '${contract_address.lower()}'. "
|
124
|
+
f"Received: '${parts[1].lower()}'."
|
125
|
+
)
|
126
|
+
|
127
|
+
try:
|
128
|
+
owner = await self.blockchain_service.get_owner(int(parts[2]))
|
129
|
+
|
130
|
+
if not owner or owner == ADDRESS_ZERO:
|
131
|
+
raise ValueError("Token does not exist or has no owner.")
|
132
|
+
|
133
|
+
return True
|
134
|
+
except Exception as err:
|
135
|
+
raise ValueError(f"Error fetching asset owner: {err}")
|
136
|
+
|
137
|
+
def process_content(self, content: str) -> list:
|
138
|
+
return [line.strip() for line in content.split("\n") if line.strip() != ""]
|
139
|
+
|
140
|
+
def insert_triple_sorted(self, triples_list: list, new_triple: str) -> int:
|
141
|
+
# Assuming triples_list is already sorted
|
142
|
+
left = 0
|
143
|
+
right = len(triples_list)
|
144
|
+
|
145
|
+
while left < right:
|
146
|
+
mid = (left + right) // 2
|
147
|
+
if triples_list[mid] < new_triple:
|
148
|
+
left = mid + 1
|
149
|
+
else:
|
150
|
+
right = mid
|
151
|
+
|
152
|
+
# Insert the new triple at the correct position
|
153
|
+
triples_list.insert(left, new_triple)
|
154
|
+
return left
|
155
|
+
|
156
|
+
def get_operation_status_dict(self, operation_result, operation_id):
|
157
|
+
# Check if data exists and has errorType
|
158
|
+
operation_data = (
|
159
|
+
{"status": operation_result.get("status"), **operation_result.get("data")}
|
160
|
+
if operation_result.get("data")
|
161
|
+
and operation_result.get("data", {}).get("errorType")
|
162
|
+
else {"status": operation_result.get("status")}
|
163
|
+
)
|
164
|
+
|
165
|
+
return {"operationId": operation_id, **operation_data}
|
166
|
+
|
167
|
+
def get_message_signer_address(self, dataset_root: str, signature: dict):
|
168
|
+
message = encode_defunct(HexBytes(dataset_root))
|
169
|
+
r, s, v = signature.get("r"), signature.get("s"), signature.get("v")
|
170
|
+
r = r[2:] if r.startswith("0x") else r
|
171
|
+
s = s[2:] if s.startswith("0x") else s
|
172
|
+
|
173
|
+
sig = "0x" + r + s + hex(v)[2:].zfill(2)
|
174
|
+
|
175
|
+
return Account.recover_message(message, signature=sig)
|
176
|
+
|
177
|
+
async def process_signatures(self, signatures, dataset_root):
|
178
|
+
async def process_signature(signature):
|
179
|
+
try:
|
180
|
+
signer_address = self.get_message_signer_address(
|
181
|
+
dataset_root, signature
|
182
|
+
)
|
183
|
+
|
184
|
+
key_is_operational_wallet = (
|
185
|
+
await self.blockchain_service.key_is_operational_wallet(
|
186
|
+
signature.get("identityId"),
|
187
|
+
Web3.solidity_keccak(["address"], [signer_address]),
|
188
|
+
2, # IdentityLib.OPERATIONAL_KEY
|
189
|
+
)
|
190
|
+
)
|
191
|
+
|
192
|
+
if key_is_operational_wallet:
|
193
|
+
return {
|
194
|
+
"identityId": signature.get("identityId"),
|
195
|
+
"r": signature.get("r"),
|
196
|
+
"vs": signature.get("vs"),
|
197
|
+
}
|
198
|
+
|
199
|
+
except Exception:
|
200
|
+
pass
|
201
|
+
return None
|
202
|
+
|
203
|
+
# Run all signature processing concurrently
|
204
|
+
results = await asyncio.gather(
|
205
|
+
*(process_signature(sig) for sig in signatures), return_exceptions=False
|
206
|
+
)
|
207
|
+
|
208
|
+
# Filter out None results and organize the data
|
209
|
+
valid_results = [r for r in results if r is not None]
|
210
|
+
|
211
|
+
return {
|
212
|
+
"identity_ids": [r["identityId"] for r in valid_results],
|
213
|
+
"r": [r["r"] for r in valid_results],
|
214
|
+
"vs": [r["vs"] for r in valid_results],
|
215
|
+
}
|
216
|
+
|
217
|
+
async def create(
|
218
|
+
self,
|
219
|
+
content: dict[Literal["public", "private"], JSONLD],
|
220
|
+
options: dict = None,
|
221
|
+
) -> dict[str, UAL | HexStr | dict[str, dict[str, str] | TxReceipt]]:
|
222
|
+
if options is None:
|
223
|
+
options = {}
|
224
|
+
|
225
|
+
arguments = self.input_service.get_asset_create_arguments(options)
|
226
|
+
|
227
|
+
max_number_of_retries = arguments.get("max_number_of_retries")
|
228
|
+
frequency = arguments.get("frequency")
|
229
|
+
epochs_num = arguments.get("epochs_num")
|
230
|
+
hash_function_id = arguments.get("hash_function_id")
|
231
|
+
immutable = arguments.get("immutable")
|
232
|
+
token_amount = arguments.get("token_amount")
|
233
|
+
payer = arguments.get("payer")
|
234
|
+
minimum_number_of_finalization_confirmations = arguments.get(
|
235
|
+
"minimum_number_of_finalization_confirmations"
|
236
|
+
)
|
237
|
+
minimum_number_of_node_replications = arguments.get(
|
238
|
+
"minimum_number_of_node_replications"
|
239
|
+
)
|
240
|
+
blockchain_id = self.manager.blockchain_provider.blockchain_id
|
241
|
+
|
242
|
+
dataset = {}
|
243
|
+
public_content = dataset.get("public")
|
244
|
+
private_content = dataset.get("private")
|
245
|
+
if isinstance(content, str):
|
246
|
+
dataset["public"] = self.process_content(content)
|
247
|
+
elif isinstance(public_content, str) or (
|
248
|
+
not public_content and private_content and isinstance(private_content, str)
|
249
|
+
):
|
250
|
+
if public_content:
|
251
|
+
dataset["public"] = self.process_content(public_content)
|
252
|
+
else:
|
253
|
+
dataset["public"] = []
|
254
|
+
|
255
|
+
if private_content and isinstance(private_content, str):
|
256
|
+
dataset["private"] = self.process_content(private_content)
|
257
|
+
else:
|
258
|
+
dataset = kc_tools.format_dataset(content)
|
259
|
+
|
260
|
+
public_triples_grouped = []
|
261
|
+
|
262
|
+
dataset["public"] = kc_tools.generate_missing_ids_for_blank_nodes(
|
263
|
+
dataset.get("public")
|
264
|
+
)
|
265
|
+
|
266
|
+
if dataset.get("private") and len(dataset.get("private")):
|
267
|
+
dataset["private"] = kc_tools.generate_missing_ids_for_blank_nodes(
|
268
|
+
dataset.get("private")
|
269
|
+
)
|
270
|
+
|
271
|
+
# Group private triples by subject and flatten
|
272
|
+
private_triples_grouped = kc_tools.group_nquads_by_subject(
|
273
|
+
dataset.get("private"), True
|
274
|
+
)
|
275
|
+
|
276
|
+
dataset["private"] = list(chain.from_iterable(private_triples_grouped))
|
277
|
+
|
278
|
+
# Compute private root and add to public
|
279
|
+
private_root = kc_tools.calculate_merkle_root(dataset.get("private"))
|
280
|
+
dataset["public"].append(
|
281
|
+
f'<{ka_tools.generate_named_node()}> <{PRIVATE_ASSERTION_PREDICATE}> "{private_root}" .'
|
282
|
+
)
|
283
|
+
|
284
|
+
# Compute private root and add to public
|
285
|
+
public_triples_grouped = kc_tools.group_nquads_by_subject(
|
286
|
+
dataset.get("public"), True
|
287
|
+
)
|
288
|
+
|
289
|
+
# Create a dictionary for public subject -> index for quick lookup
|
290
|
+
public_subject_dict = {}
|
291
|
+
for i in range(len(public_triples_grouped)):
|
292
|
+
public_subject = public_triples_grouped[i][0].split(" ")[0]
|
293
|
+
public_subject_dict[public_subject] = i
|
294
|
+
|
295
|
+
private_triple_subject_hashes_grouped_without_public_pair = []
|
296
|
+
|
297
|
+
# Integrate private subjects into public or store separately if no match to be appended later
|
298
|
+
for private_triples in private_triples_grouped:
|
299
|
+
private_subject = private_triples[0].split(" ")[
|
300
|
+
0
|
301
|
+
] # Extract the private subject
|
302
|
+
|
303
|
+
private_subject_hash = kc_tools.solidity_packed_sha256(
|
304
|
+
types=["string"],
|
305
|
+
values=[private_subject[1:-1]],
|
306
|
+
)
|
307
|
+
|
308
|
+
if (
|
309
|
+
private_subject in public_subject_dict
|
310
|
+
): # Check if there's a public pair
|
311
|
+
# If there's a public pair, insert a representation in that group
|
312
|
+
public_index = public_subject_dict.get(private_subject)
|
313
|
+
self.insert_triple_sorted(
|
314
|
+
public_triples_grouped[public_index],
|
315
|
+
f"{private_subject} <{PRIVATE_RESOURCE_PREDICATE}> <{ka_tools.generate_named_node()}> .",
|
316
|
+
)
|
317
|
+
else:
|
318
|
+
# If no public pair, maintain separate list, inserting sorted by hash
|
319
|
+
self.insert_triple_sorted(
|
320
|
+
private_triple_subject_hashes_grouped_without_public_pair,
|
321
|
+
f"<{PRIVATE_HASH_SUBJECT_PREFIX}{private_subject_hash}> <{PRIVATE_RESOURCE_PREDICATE}> <{ka_tools.generate_named_node()}> .",
|
322
|
+
)
|
323
|
+
|
324
|
+
for triple in private_triple_subject_hashes_grouped_without_public_pair:
|
325
|
+
public_triples_grouped.append([triple])
|
326
|
+
|
327
|
+
dataset["public"] = list(chain.from_iterable(public_triples_grouped))
|
328
|
+
else:
|
329
|
+
# No private triples, just group and flatten public
|
330
|
+
public_triples_grouped = kc_tools.group_nquads_by_subject(
|
331
|
+
dataset.get("public"), True
|
332
|
+
)
|
333
|
+
dataset["public"] = list(chain.from_iterable(public_triples_grouped))
|
334
|
+
|
335
|
+
# Calculate the number of chunks
|
336
|
+
number_of_chunks = kc_tools.calculate_number_of_chunks(
|
337
|
+
dataset.get("public"), CHUNK_BYTE_SIZE
|
338
|
+
)
|
339
|
+
dataset_size = number_of_chunks * CHUNK_BYTE_SIZE
|
340
|
+
|
341
|
+
# Validate the assertion size in bytes
|
342
|
+
if dataset_size > MAX_FILE_SIZE:
|
343
|
+
raise ValueError(f"File size limit is {MAX_FILE_SIZE / (1024 * 1024)}MB.")
|
344
|
+
|
345
|
+
# Calculate the Merkle root
|
346
|
+
dataset_root = kc_tools.calculate_merkle_root(dataset.get("public"))
|
347
|
+
|
348
|
+
# Get the contract address for KnowledgeCollectionStorage
|
349
|
+
content_asset_storage_address = (
|
350
|
+
await self.blockchain_service.get_asset_storage_address(
|
351
|
+
"KnowledgeCollectionStorage"
|
352
|
+
)
|
353
|
+
)
|
354
|
+
|
355
|
+
result = await self.node_service.publish(
|
356
|
+
dataset_root,
|
357
|
+
dataset,
|
358
|
+
blockchain_id,
|
359
|
+
hash_function_id,
|
360
|
+
minimum_number_of_node_replications,
|
361
|
+
)
|
362
|
+
publish_operation_id = result.get("operationId")
|
363
|
+
publish_operation_result = await self.node_service.get_operation_result(
|
364
|
+
publish_operation_id,
|
365
|
+
Operations.PUBLISH.value,
|
366
|
+
max_number_of_retries,
|
367
|
+
frequency,
|
368
|
+
)
|
369
|
+
|
370
|
+
if publish_operation_result.get(
|
371
|
+
"status"
|
372
|
+
) != OperationStatus.COMPLETED and not publish_operation_result.get(
|
373
|
+
"data", {}
|
374
|
+
).get("minAcksReached"):
|
375
|
+
return {
|
376
|
+
"datasetRoot": dataset_root,
|
377
|
+
"operation": {
|
378
|
+
"publish": self.get_operation_status_dict(
|
379
|
+
publish_operation_result, publish_operation_id
|
380
|
+
)
|
381
|
+
},
|
382
|
+
}
|
383
|
+
|
384
|
+
data = publish_operation_result.get("data", {})
|
385
|
+
signatures = data.get("signatures")
|
386
|
+
|
387
|
+
publisher_node_signature = data.get("publisherNodeSignature", {})
|
388
|
+
publisher_node_identity_id = publisher_node_signature.get("identityId")
|
389
|
+
publisher_node_r = publisher_node_signature.get("r")
|
390
|
+
publisher_node_vs = publisher_node_signature.get("vs")
|
391
|
+
|
392
|
+
results = await self.process_signatures(signatures, dataset_root)
|
393
|
+
identity_ids = results["identity_ids"]
|
394
|
+
r = results["r"]
|
395
|
+
vs = results["vs"]
|
396
|
+
|
397
|
+
if token_amount:
|
398
|
+
estimated_publishing_cost = token_amount
|
399
|
+
else:
|
400
|
+
time_until_next_epoch = (
|
401
|
+
await self.blockchain_service.time_until_next_epoch()
|
402
|
+
)
|
403
|
+
epoch_length = await self.blockchain_service.epoch_length()
|
404
|
+
stake_weighted_average_ask = (
|
405
|
+
await self.blockchain_service.get_stake_weighted_average_ask()
|
406
|
+
)
|
407
|
+
|
408
|
+
# Convert to integers and perform calculation
|
409
|
+
estimated_publishing_cost = (
|
410
|
+
(
|
411
|
+
int(stake_weighted_average_ask)
|
412
|
+
* (
|
413
|
+
int(epochs_num) * int(1e18)
|
414
|
+
+ (int(time_until_next_epoch) * int(1e18)) // int(epoch_length)
|
415
|
+
)
|
416
|
+
* int(dataset_size)
|
417
|
+
)
|
418
|
+
// 1024
|
419
|
+
// int(1e18)
|
420
|
+
)
|
421
|
+
|
422
|
+
knowledge_collection_id = None
|
423
|
+
mint_knowledge_asset_receipt = None
|
424
|
+
|
425
|
+
knowledge_collection_result = (
|
426
|
+
await self.blockchain_service.create_knowledge_collection(
|
427
|
+
{
|
428
|
+
"publishOperationId": publish_operation_id,
|
429
|
+
"merkleRoot": dataset_root,
|
430
|
+
"knowledgeAssetsAmount": kc_tools.count_distinct_subjects(
|
431
|
+
dataset.get("public")
|
432
|
+
),
|
433
|
+
"byteSize": dataset_size,
|
434
|
+
"epochs": epochs_num,
|
435
|
+
"tokenAmount": estimated_publishing_cost,
|
436
|
+
"isImmutable": immutable,
|
437
|
+
"paymaster": payer,
|
438
|
+
"publisherNodeIdentityId": publisher_node_identity_id,
|
439
|
+
"publisherNodeR": publisher_node_r,
|
440
|
+
"publisherNodeVS": publisher_node_vs,
|
441
|
+
"identityIds": identity_ids,
|
442
|
+
"r": r,
|
443
|
+
"vs": vs,
|
444
|
+
},
|
445
|
+
None,
|
446
|
+
None,
|
447
|
+
)
|
448
|
+
)
|
449
|
+
knowledge_collection_id = knowledge_collection_result.knowledge_collection_id
|
450
|
+
mint_knowledge_asset_receipt = knowledge_collection_result.receipt
|
451
|
+
|
452
|
+
ual = format_ual(
|
453
|
+
blockchain_id, content_asset_storage_address, knowledge_collection_id
|
454
|
+
)
|
455
|
+
|
456
|
+
finality_status_result = 0
|
457
|
+
if minimum_number_of_finalization_confirmations > 0:
|
458
|
+
finality_status_result = await self.node_service.finality_status(
|
459
|
+
ual,
|
460
|
+
minimum_number_of_finalization_confirmations,
|
461
|
+
max_number_of_retries,
|
462
|
+
frequency,
|
463
|
+
)
|
464
|
+
|
465
|
+
return json.loads(
|
466
|
+
Web3.to_json(
|
467
|
+
{
|
468
|
+
"UAL": ual,
|
469
|
+
"datasetRoot": dataset_root,
|
470
|
+
"signatures": publish_operation_result.get("data", {}).get(
|
471
|
+
"signatures"
|
472
|
+
),
|
473
|
+
"operation": {
|
474
|
+
"mintKnowledgeAsset": mint_knowledge_asset_receipt,
|
475
|
+
"publish": get_operation_status_object(
|
476
|
+
publish_operation_result, publish_operation_id
|
477
|
+
),
|
478
|
+
"finality": {
|
479
|
+
"status": "FINALIZED"
|
480
|
+
if finality_status_result
|
481
|
+
>= minimum_number_of_finalization_confirmations
|
482
|
+
else "NOT FINALIZED"
|
483
|
+
},
|
484
|
+
"numberOfConfirmations": finality_status_result,
|
485
|
+
"requiredConfirmations": minimum_number_of_finalization_confirmations,
|
486
|
+
},
|
487
|
+
}
|
488
|
+
)
|
489
|
+
)
|
490
|
+
|
491
|
+
_submit_knowledge_asset = Method(BlockchainRequest.submit_knowledge_asset)
|
492
|
+
|
493
|
+
def submit_to_paranet(
|
494
|
+
self, ual: UAL, paranet_ual: UAL
|
495
|
+
) -> dict[str, UAL | Address | TxReceipt]:
|
496
|
+
parsed_ual = parse_ual(ual)
|
497
|
+
knowledge_asset_storage, knowledge_asset_token_id = (
|
498
|
+
parsed_ual["contract_address"],
|
499
|
+
parsed_ual["token_id"],
|
500
|
+
)
|
501
|
+
|
502
|
+
parsed_paranet_ual = parse_ual(paranet_ual)
|
503
|
+
paranet_knowledge_asset_storage, paranet_knowledge_asset_token_id = (
|
504
|
+
parsed_paranet_ual["contract_address"],
|
505
|
+
parsed_paranet_ual["token_id"],
|
506
|
+
)
|
507
|
+
|
508
|
+
receipt: TxReceipt = self._submit_knowledge_asset(
|
509
|
+
paranet_knowledge_asset_storage,
|
510
|
+
paranet_knowledge_asset_token_id,
|
511
|
+
knowledge_asset_storage,
|
512
|
+
knowledge_asset_token_id,
|
513
|
+
)
|
514
|
+
|
515
|
+
return {
|
516
|
+
"UAL": ual,
|
517
|
+
"paranetUAL": paranet_ual,
|
518
|
+
"paranetId": Web3.to_hex(
|
519
|
+
Web3.solidity_keccak(
|
520
|
+
["address", "uint256"],
|
521
|
+
[knowledge_asset_storage, knowledge_asset_token_id],
|
522
|
+
)
|
523
|
+
),
|
524
|
+
"operation": json.loads(Web3.to_json(receipt)),
|
525
|
+
}
|
526
|
+
|
527
|
+
_transfer = Method(BlockchainRequest.transfer_asset)
|
528
|
+
|
529
|
+
def transfer(
|
530
|
+
self,
|
531
|
+
ual: UAL,
|
532
|
+
new_owner: Address,
|
533
|
+
) -> dict[str, UAL | Address | TxReceipt]:
|
534
|
+
token_id = parse_ual(ual)["token_id"]
|
535
|
+
|
536
|
+
receipt: TxReceipt = self._transfer(
|
537
|
+
self.manager.blockchain_provider.account,
|
538
|
+
new_owner,
|
539
|
+
token_id,
|
540
|
+
)
|
541
|
+
|
542
|
+
return {
|
543
|
+
"UAL": ual,
|
544
|
+
"owner": new_owner,
|
545
|
+
"operation": json.loads(Web3.to_json(receipt)),
|
546
|
+
}
|
547
|
+
|
548
|
+
_burn_asset = Method(BlockchainRequest.burn_asset)
|
549
|
+
|
550
|
+
def burn(self, ual: UAL) -> dict[str, UAL | TxReceipt]:
|
551
|
+
token_id = parse_ual(ual)["token_id"]
|
552
|
+
|
553
|
+
receipt: TxReceipt = self._burn_asset(token_id)
|
554
|
+
|
555
|
+
return {"UAL": ual, "operation": json.loads(Web3.to_json(receipt))}
|
556
|
+
|
557
|
+
_get_latest_assertion_id = Method(BlockchainRequest.get_latest_assertion_id)
|
558
|
+
|
559
|
+
async def get(self, ual: UAL, options: dict = None) -> dict:
|
560
|
+
if options is None:
|
561
|
+
options = {}
|
562
|
+
|
563
|
+
arguments = self.input_service.get_asset_get_arguments(options)
|
564
|
+
|
565
|
+
max_number_of_retries = arguments.get("max_number_of_retries")
|
566
|
+
frequency = arguments.get("frequency")
|
567
|
+
state = arguments.get("state")
|
568
|
+
include_metadata = arguments.get("include_metadata")
|
569
|
+
content_type = arguments.get("content_type")
|
570
|
+
validate = arguments.get("validate")
|
571
|
+
output_format = arguments.get("output_format")
|
572
|
+
hash_function_id = arguments.get("hash_function_id")
|
573
|
+
paranet_ual = arguments.get("paranet_ual")
|
574
|
+
subject_ual = arguments.get("subject_ual")
|
575
|
+
|
576
|
+
ual_with_state = f"{ual}:{state}" if state else ual
|
577
|
+
result = await self.node_service.get(
|
578
|
+
ual_with_state,
|
579
|
+
content_type,
|
580
|
+
include_metadata,
|
581
|
+
hash_function_id,
|
582
|
+
paranet_ual,
|
583
|
+
subject_ual,
|
584
|
+
)
|
585
|
+
get_public_operation_id = result.get("operationId")
|
586
|
+
get_public_operation_result = await self.node_service.get_operation_result(
|
587
|
+
get_public_operation_id,
|
588
|
+
Operations.GET.value,
|
589
|
+
max_number_of_retries,
|
590
|
+
frequency,
|
591
|
+
)
|
592
|
+
|
593
|
+
if subject_ual:
|
594
|
+
if get_public_operation_result.get("data"):
|
595
|
+
return {
|
596
|
+
"operation": {
|
597
|
+
"get": get_operation_status_object(
|
598
|
+
get_public_operation_result, get_public_operation_id
|
599
|
+
),
|
600
|
+
},
|
601
|
+
"subject_ual_pairs": get_public_operation_result.get("data"),
|
602
|
+
}
|
603
|
+
if get_public_operation_result.get("status") != "FAILED":
|
604
|
+
get_public_operation_result["data"] = {
|
605
|
+
"errorType": "DKG_CLIENT_ERROR",
|
606
|
+
"errorMessage": "Unable to find assertion on the network!",
|
607
|
+
}
|
608
|
+
get_public_operation_result["status"] = "FAILED"
|
609
|
+
|
610
|
+
return {
|
611
|
+
"operation": {
|
612
|
+
"get": get_operation_status_object(
|
613
|
+
get_public_operation_result, get_public_operation_id
|
614
|
+
),
|
615
|
+
},
|
616
|
+
}
|
617
|
+
metadata = get_public_operation_result.get("data")
|
618
|
+
assertion = get_public_operation_result.get("data", {}).get("assertion", None)
|
619
|
+
|
620
|
+
if not assertion:
|
621
|
+
if get_public_operation_result.get("status") != "FAILED":
|
622
|
+
get_public_operation_result["data"] = {
|
623
|
+
"errorType": "DKG_CLIENT_ERROR",
|
624
|
+
"errorMessage": "Unable to find assertion on the network!",
|
625
|
+
}
|
626
|
+
get_public_operation_result["status"] = "FAILED"
|
627
|
+
|
628
|
+
return {
|
629
|
+
"operation": {
|
630
|
+
"get": get_operation_status_object(
|
631
|
+
get_public_operation_result, get_public_operation_id
|
632
|
+
),
|
633
|
+
},
|
634
|
+
}
|
635
|
+
|
636
|
+
if validate:
|
637
|
+
is_valid = True # #TODO: Implement assertion validation logic
|
638
|
+
if not is_valid:
|
639
|
+
get_public_operation_result["data"] = {
|
640
|
+
"error_type": "DKG_CLIENT_ERROR",
|
641
|
+
"error_message": "Calculated root hashes don't match!",
|
642
|
+
}
|
643
|
+
|
644
|
+
formatted_assertion = "\n".join(
|
645
|
+
assertion.get("public", [])
|
646
|
+
+ (
|
647
|
+
assertion.get("private")
|
648
|
+
if isinstance(assertion.get("private"), list)
|
649
|
+
else []
|
650
|
+
)
|
651
|
+
)
|
652
|
+
|
653
|
+
formatted_metadata = None
|
654
|
+
if output_format == OutputTypes.JSONLD.value:
|
655
|
+
formatted_assertion = self.to_jsonld(formatted_assertion)
|
656
|
+
|
657
|
+
if include_metadata:
|
658
|
+
formatted_metadata = self.to_jsonld("\n".join(metadata))
|
659
|
+
|
660
|
+
if output_format == OutputTypes.NQUADS.value:
|
661
|
+
formatted_assertion = self.to_nquads(
|
662
|
+
formatted_assertion, DEFAULT_RDF_FORMAT
|
663
|
+
)
|
664
|
+
if include_metadata:
|
665
|
+
formatted_metadata = self.to_nquads(
|
666
|
+
"\n".join(metadata), DEFAULT_RDF_FORMAT
|
667
|
+
)
|
668
|
+
|
669
|
+
result = {
|
670
|
+
"assertion": formatted_assertion,
|
671
|
+
"operation": {
|
672
|
+
"get": get_operation_status_object(
|
673
|
+
get_public_operation_result, get_public_operation_id
|
674
|
+
),
|
675
|
+
},
|
676
|
+
}
|
677
|
+
|
678
|
+
if include_metadata and metadata:
|
679
|
+
result["metadata"] = formatted_metadata
|
680
|
+
|
681
|
+
return result
|
682
|
+
|
683
|
+
# _extend_storing_period = Method(BlockchainRequest.extend_asset_storing_period)
|
684
|
+
|
685
|
+
# async def extend_storing_period(
|
686
|
+
# self,
|
687
|
+
# ual: UAL,
|
688
|
+
# additional_epochs: int,
|
689
|
+
# token_amount: Wei | None = None,
|
690
|
+
# ) -> dict[str, UAL | TxReceipt]:
|
691
|
+
# parsed_ual = parse_ual(ual)
|
692
|
+
# blockchain_id, content_asset_storage_address, token_id = (
|
693
|
+
# parsed_ual["blockchain"],
|
694
|
+
# parsed_ual["contract_address"],
|
695
|
+
# parsed_ual["token_id"],
|
696
|
+
# )
|
697
|
+
|
698
|
+
# if token_amount is None:
|
699
|
+
# latest_finalized_state = self._get_latest_assertion_id(token_id)
|
700
|
+
# latest_finalized_state_size = self._get_assertion_size(
|
701
|
+
# latest_finalized_state
|
702
|
+
# )
|
703
|
+
|
704
|
+
# token_amount = await self.node_service.get_bid_suggestion(
|
705
|
+
# blockchain_id,
|
706
|
+
# additional_epochs,
|
707
|
+
# latest_finalized_state_size,
|
708
|
+
# content_asset_storage_address,
|
709
|
+
# latest_finalized_state,
|
710
|
+
# DefaultParameters.HASH_FUNCTION_ID.value,
|
711
|
+
# token_amount or BidSuggestionRange.LOW,
|
712
|
+
# )
|
713
|
+
|
714
|
+
# receipt: TxReceipt = self._extend_storing_period(
|
715
|
+
# token_id, additional_epochs, token_amount
|
716
|
+
# )
|
717
|
+
|
718
|
+
# return {
|
719
|
+
# "UAL": ual,
|
720
|
+
# "operation": json.loads(Web3.to_json(receipt)),
|
721
|
+
# }
|
722
|
+
|
723
|
+
_get_block = Method(BlockchainRequest.get_block)
|
724
|
+
|
725
|
+
_get_assertion_size = Method(BlockchainRequest.get_assertion_size)
|
726
|
+
|
727
|
+
def to_jsonld(self, nquads: str):
|
728
|
+
options = {
|
729
|
+
"algorithm": "URDNA2015",
|
730
|
+
"format": "application/n-quads",
|
731
|
+
}
|
732
|
+
|
733
|
+
return jsonld.from_rdf(nquads, options)
|
734
|
+
|
735
|
+
def to_nquads(self, content, input_format):
|
736
|
+
options = {
|
737
|
+
"algorithm": "URDNA2015",
|
738
|
+
"format": "application/n-quads",
|
739
|
+
}
|
740
|
+
|
741
|
+
if input_format:
|
742
|
+
options["inputFormat"] = input_format
|
743
|
+
try:
|
744
|
+
jsonld_data = jsonld.from_rdf(content, options)
|
745
|
+
canonized = jsonld.to_rdf(jsonld_data, options)
|
746
|
+
|
747
|
+
if isinstance(canonized, str):
|
748
|
+
return [line for line in canonized.split("\n") if line.strip()]
|
749
|
+
|
750
|
+
except Exception as e:
|
751
|
+
raise ValueError(f"Error processing content: {e}")
|