dkg 0.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dkg/__init__.py +3 -0
- dkg/asset.py +781 -0
- dkg/constants.py +39 -0
- dkg/data/interfaces/Assertion.json +131 -0
- dkg/data/interfaces/AssertionStorage.json +229 -0
- dkg/data/interfaces/CommitManagerV1.json +534 -0
- dkg/data/interfaces/CommitManagerV1U1.json +720 -0
- dkg/data/interfaces/ContentAsset.json +671 -0
- dkg/data/interfaces/ContentAssetStorage.json +706 -0
- dkg/data/interfaces/HashingProxy.json +227 -0
- dkg/data/interfaces/Hub.json +356 -0
- dkg/data/interfaces/Identity.json +193 -0
- dkg/data/interfaces/IdentityStorage.json +342 -0
- dkg/data/interfaces/ParametersStorage.json +468 -0
- dkg/data/interfaces/Profile.json +292 -0
- dkg/data/interfaces/ProfileStorage.json +596 -0
- dkg/data/interfaces/ProofManagerV1.json +525 -0
- dkg/data/interfaces/ProofManagerV1U1.json +546 -0
- dkg/data/interfaces/ScoringProxy.json +242 -0
- dkg/data/interfaces/ServiceAgreementStorageProxy.json +1299 -0
- dkg/data/interfaces/ServiceAgreementStorageV1.json +901 -0
- dkg/data/interfaces/ServiceAgreementStorageV1U1.json +1097 -0
- dkg/data/interfaces/ServiceAgreementV1.json +741 -0
- dkg/data/interfaces/ShardingTable.json +268 -0
- dkg/data/interfaces/ShardingTableStorage.json +317 -0
- dkg/data/interfaces/Staking.json +456 -0
- dkg/data/interfaces/StakingStorage.json +407 -0
- dkg/data/interfaces/Token.json +544 -0
- dkg/data/interfaces/UnfinalizedStateStorage.json +171 -0
- dkg/data/interfaces/WhitelistStorage.json +124 -0
- dkg/dataclasses.py +45 -0
- dkg/exceptions.py +161 -0
- dkg/graph.py +63 -0
- dkg/main.py +74 -0
- dkg/manager.py +64 -0
- dkg/method.py +131 -0
- dkg/module.py +63 -0
- dkg/node.py +54 -0
- dkg/providers/__init__.py +2 -0
- dkg/providers/blockchain.py +181 -0
- dkg/providers/node_http.py +62 -0
- dkg/types/__init__.py +8 -0
- dkg/types/blockchain.py +58 -0
- dkg/types/dkg_node.py +20 -0
- dkg/types/encoding.py +22 -0
- dkg/types/evm.py +25 -0
- dkg/types/generics.py +21 -0
- dkg/types/network.py +20 -0
- dkg/types/rdf.py +21 -0
- dkg/utils/__init__.py +0 -0
- dkg/utils/blockchain_request.py +159 -0
- dkg/utils/decorators.py +46 -0
- dkg/utils/merkle.py +173 -0
- dkg/utils/metadata.py +50 -0
- dkg/utils/node_request.py +197 -0
- dkg/utils/rdf.py +51 -0
- dkg/utils/string_transformations.py +22 -0
- dkg/utils/ual.py +41 -0
- dkg-0.1.0b1.dist-info/LICENSE +202 -0
- dkg-0.1.0b1.dist-info/METADATA +453 -0
- dkg-0.1.0b1.dist-info/RECORD +62 -0
- dkg-0.1.0b1.dist-info/WHEEL +4 -0
@@ -0,0 +1,159 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
from dataclasses import dataclass, field
|
19
|
+
from typing import Type
|
20
|
+
|
21
|
+
from dkg.types import Address, HexStr, Wei
|
22
|
+
|
23
|
+
|
24
|
+
@dataclass
|
25
|
+
class JSONRPCRequest:
|
26
|
+
endpoint: str
|
27
|
+
args: dict[str, Type] = field(default_factory=dict)
|
28
|
+
|
29
|
+
|
30
|
+
@dataclass
|
31
|
+
class ContractInteraction:
|
32
|
+
contract: str
|
33
|
+
function: str
|
34
|
+
args: dict[str, Type] = field(default_factory=dict)
|
35
|
+
|
36
|
+
|
37
|
+
@dataclass
|
38
|
+
class ContractTransaction(ContractInteraction):
|
39
|
+
gas_price: Wei | None = None
|
40
|
+
gas_limit: Wei | None = None
|
41
|
+
|
42
|
+
|
43
|
+
@dataclass
|
44
|
+
class ContractCall(ContractInteraction):
|
45
|
+
pass
|
46
|
+
|
47
|
+
|
48
|
+
class BlockchainRequest:
|
49
|
+
chain_id = JSONRPCRequest("chain_id")
|
50
|
+
get_block = JSONRPCRequest("get_block", args={"block_identifier": str | int})
|
51
|
+
|
52
|
+
get_contract_address = ContractCall(
|
53
|
+
contract="Hub",
|
54
|
+
function="getContractAddress",
|
55
|
+
args={"contractName": str},
|
56
|
+
)
|
57
|
+
get_asset_storage_address = ContractCall(
|
58
|
+
contract="Hub",
|
59
|
+
function="getAssetStorageAddress",
|
60
|
+
args={"assetStorageName": str},
|
61
|
+
)
|
62
|
+
|
63
|
+
increase_allowance = ContractTransaction(
|
64
|
+
contract="Token",
|
65
|
+
function="increaseAllowance",
|
66
|
+
args={"spender": Address, "addedValue": Wei},
|
67
|
+
)
|
68
|
+
decrease_allowance = ContractTransaction(
|
69
|
+
contract="Token",
|
70
|
+
function="decreaseAllowance",
|
71
|
+
args={"spender": Address, "subtractedValue": Wei},
|
72
|
+
)
|
73
|
+
|
74
|
+
create_asset = ContractTransaction(
|
75
|
+
contract="ContentAsset",
|
76
|
+
function="createAsset",
|
77
|
+
args={"args": dict[str, bytes | int | Wei | bool]},
|
78
|
+
)
|
79
|
+
burn_asset = ContractTransaction(
|
80
|
+
contract="ContentAsset",
|
81
|
+
function="burnAsset",
|
82
|
+
args={"tokenId": int},
|
83
|
+
)
|
84
|
+
update_asset_state = ContractTransaction(
|
85
|
+
contract="ContentAsset",
|
86
|
+
function="updateAssetState",
|
87
|
+
args={
|
88
|
+
"tokenId": int,
|
89
|
+
"assertionId": bytes | HexStr,
|
90
|
+
"size": int,
|
91
|
+
"triplesNumber": int,
|
92
|
+
"chunksNumber": int,
|
93
|
+
"updateTokenAmount": int,
|
94
|
+
},
|
95
|
+
)
|
96
|
+
cancel_asset_state_update = ContractTransaction(
|
97
|
+
contract="ContentAsset",
|
98
|
+
function="cancelAssetStateUpdate",
|
99
|
+
args={"tokenId": int},
|
100
|
+
)
|
101
|
+
extend_asset_storing_period = ContractTransaction(
|
102
|
+
contract="ContentAsset",
|
103
|
+
function="extendAssetStoringPeriod",
|
104
|
+
args={"tokenId": int, "epochsNumber": int, "tokenAmount": int},
|
105
|
+
)
|
106
|
+
increase_asset_token_amount = ContractTransaction(
|
107
|
+
contract="ContentAsset",
|
108
|
+
function="increaseAssetTokenAmount",
|
109
|
+
args={"tokenId": int, "tokenAmount": int},
|
110
|
+
)
|
111
|
+
increase_asset_update_token_amount = ContractTransaction(
|
112
|
+
contract="ContentAsset",
|
113
|
+
function="increaseAssetUpdateTokenAmount",
|
114
|
+
args={"tokenId": int, "tokenAmount": int},
|
115
|
+
)
|
116
|
+
|
117
|
+
transfer_asset = ContractTransaction(
|
118
|
+
contract="ContentAssetStorage",
|
119
|
+
function="transferFrom",
|
120
|
+
args={"from": Address, "to": Address, "tokenId": int},
|
121
|
+
)
|
122
|
+
get_assertion_ids = ContractCall(
|
123
|
+
contract="ContentAssetStorage",
|
124
|
+
function="getAssertionIds",
|
125
|
+
args={"tokenId": int},
|
126
|
+
)
|
127
|
+
get_assertion_id_by_index = ContractCall(
|
128
|
+
contract="ContentAssetStorage",
|
129
|
+
function="getAssertionIdByIndex",
|
130
|
+
args={"tokenId": int, "index": int},
|
131
|
+
)
|
132
|
+
get_latest_assertion_id = ContractCall(
|
133
|
+
contract="ContentAssetStorage",
|
134
|
+
function="getLatestAssertionId",
|
135
|
+
args={"tokenId": int},
|
136
|
+
)
|
137
|
+
owner_of = ContractCall(
|
138
|
+
contract="ContentAssetStorage",
|
139
|
+
function="ownerOf",
|
140
|
+
args={"tokenId": int},
|
141
|
+
)
|
142
|
+
|
143
|
+
get_unfinalized_state = ContractCall(
|
144
|
+
contract="UnfinalizedStateStorage",
|
145
|
+
function="getUnfinalizedState",
|
146
|
+
args={"tokenId": int},
|
147
|
+
)
|
148
|
+
|
149
|
+
get_service_agreement_data = ContractCall(
|
150
|
+
contract="ServiceAgreementStorageProxy",
|
151
|
+
function="getAgreementData",
|
152
|
+
args={"agreementId": HexStr},
|
153
|
+
)
|
154
|
+
|
155
|
+
get_assertion_size = ContractCall(
|
156
|
+
contract="AssertionStorage",
|
157
|
+
function="getAssertionSize",
|
158
|
+
args={"assertionId": HexStr},
|
159
|
+
)
|
dkg/utils/decorators.py
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
import time
|
19
|
+
from functools import wraps
|
20
|
+
from typing import Any, Callable
|
21
|
+
|
22
|
+
from dkg.exceptions import NodeRequestError
|
23
|
+
|
24
|
+
|
25
|
+
def retry(
|
26
|
+
catch: Exception, max_retries: int, base_delay: int, backoff: float
|
27
|
+
) -> Callable[[Callable], Callable]:
|
28
|
+
def decorator(func: Callable) -> Callable:
|
29
|
+
@wraps(func)
|
30
|
+
def wrapper(*args, **kwargs) -> Any:
|
31
|
+
_delay = base_delay
|
32
|
+
|
33
|
+
for _ in range(max_retries):
|
34
|
+
try:
|
35
|
+
return func(*args, **kwargs)
|
36
|
+
except catch:
|
37
|
+
time.sleep(_delay)
|
38
|
+
_delay *= backoff
|
39
|
+
|
40
|
+
raise NodeRequestError(
|
41
|
+
f"Failed executing {func.__name__} after {max_retries} retries."
|
42
|
+
)
|
43
|
+
|
44
|
+
return wrapper
|
45
|
+
|
46
|
+
return decorator
|
dkg/utils/merkle.py
ADDED
@@ -0,0 +1,173 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
import copy
|
19
|
+
import hashlib
|
20
|
+
from typing import Callable
|
21
|
+
|
22
|
+
from dkg.exceptions import LeafNotInTree
|
23
|
+
from dkg.types import HexStr
|
24
|
+
from eth_abi.packed import encode_packed
|
25
|
+
from hexbytes import HexBytes
|
26
|
+
from web3 import Web3
|
27
|
+
|
28
|
+
|
29
|
+
def solidity_keccak256(data: HexStr) -> HexStr:
|
30
|
+
bytes_hash: HexBytes = Web3.solidity_keccak(
|
31
|
+
["bytes"],
|
32
|
+
[data],
|
33
|
+
)
|
34
|
+
|
35
|
+
return bytes_hash.hex()
|
36
|
+
|
37
|
+
|
38
|
+
def hash_assertion_with_indexes(
|
39
|
+
leaves: list[str],
|
40
|
+
hash_function: str | Callable[[str], HexStr] = solidity_keccak256,
|
41
|
+
sort: bool = True,
|
42
|
+
) -> list[HexStr]:
|
43
|
+
if sort:
|
44
|
+
leaves.sort()
|
45
|
+
|
46
|
+
return list(
|
47
|
+
map(
|
48
|
+
hash_function,
|
49
|
+
[
|
50
|
+
encode_packed(
|
51
|
+
["bytes32", "uint256"],
|
52
|
+
[Web3.solidity_keccak(["string"], [leaf]), i],
|
53
|
+
)
|
54
|
+
for i, leaf in enumerate(leaves)
|
55
|
+
],
|
56
|
+
)
|
57
|
+
)
|
58
|
+
|
59
|
+
|
60
|
+
class MerkleTree:
|
61
|
+
def __init__(
|
62
|
+
self,
|
63
|
+
leaves: list[str],
|
64
|
+
hash_function: str | Callable[[str], HexStr] = solidity_keccak256,
|
65
|
+
sort_leaves: bool = False,
|
66
|
+
sort_pairs: bool = False,
|
67
|
+
):
|
68
|
+
self.hash_function = self._set_hash_function(hash_function)
|
69
|
+
self.sort_leaves = sort_leaves
|
70
|
+
self.sort_pairs = sort_pairs
|
71
|
+
self.leaves = self._process_leaves(leaves)
|
72
|
+
self.tree = self.build_tree()
|
73
|
+
|
74
|
+
@property
|
75
|
+
def root(self) -> HexStr:
|
76
|
+
return self.tree[0][0]
|
77
|
+
|
78
|
+
def build_tree(self) -> list[list[HexStr]]:
|
79
|
+
tree = [self.leaves]
|
80
|
+
|
81
|
+
while len(level := tree[-1]) > 1:
|
82
|
+
next_level = []
|
83
|
+
for h1, h2 in zip(level[::2], level[1::2] + [None]):
|
84
|
+
if h2:
|
85
|
+
next_level.append(
|
86
|
+
self.hash_function(
|
87
|
+
h1 + h2[2:]
|
88
|
+
if not self.sort_pairs
|
89
|
+
else "0x" + "".join(sorted([h1[2:], h2[2:]]))
|
90
|
+
)
|
91
|
+
)
|
92
|
+
else:
|
93
|
+
next_level.append(h1)
|
94
|
+
|
95
|
+
tree.append(next_level)
|
96
|
+
|
97
|
+
tree.reverse()
|
98
|
+
return tree
|
99
|
+
|
100
|
+
def proof(self, leaf: HexStr, index: int | None = None) -> list[HexStr]:
|
101
|
+
if index is None:
|
102
|
+
for i, t_leaf in enumerate(self.leaves):
|
103
|
+
if leaf == t_leaf:
|
104
|
+
index = i
|
105
|
+
break
|
106
|
+
|
107
|
+
if index is None:
|
108
|
+
raise LeafNotInTree(f"{leaf} is not a part of the Merkle Tree.")
|
109
|
+
|
110
|
+
proof = []
|
111
|
+
levels = copy.deepcopy(self.tree[:0:-1])
|
112
|
+
for level in levels:
|
113
|
+
if (len(level) % 2) == 1:
|
114
|
+
level.append(level[-1])
|
115
|
+
|
116
|
+
if (index % 2) == 1:
|
117
|
+
proof.append(level[index - 1])
|
118
|
+
else:
|
119
|
+
proof.append(level[index + 1])
|
120
|
+
|
121
|
+
index //= 2
|
122
|
+
|
123
|
+
return proof
|
124
|
+
|
125
|
+
def verify(self, proof: list[HexStr], leaf: HexStr) -> bool:
|
126
|
+
if self.sort_pairs:
|
127
|
+
hash = leaf
|
128
|
+
for p in proof:
|
129
|
+
if hash == p:
|
130
|
+
continue
|
131
|
+
|
132
|
+
hash = self.hash_function("".join(sorted([hash, p])))
|
133
|
+
|
134
|
+
else:
|
135
|
+
for i, t_leaf in enumerate(self.leaves):
|
136
|
+
if leaf == t_leaf:
|
137
|
+
index = i
|
138
|
+
break
|
139
|
+
|
140
|
+
if index is None:
|
141
|
+
raise LeafNotInTree(f"{leaf} is not a part of the Merkle Tree.")
|
142
|
+
|
143
|
+
hash = leaf
|
144
|
+
for p in proof:
|
145
|
+
if hash == p:
|
146
|
+
continue
|
147
|
+
|
148
|
+
is_left = (index % 2) == 0
|
149
|
+
hash = self.hash_function("".join([hash, p] if is_left else [p, hash]))
|
150
|
+
index //= 2
|
151
|
+
|
152
|
+
return hash == self.root
|
153
|
+
|
154
|
+
def _process_leaves(self, leaves: list[str | HexStr]) -> list[HexStr]:
|
155
|
+
if self.sort_leaves:
|
156
|
+
leaves.sort()
|
157
|
+
|
158
|
+
return leaves
|
159
|
+
|
160
|
+
def _set_hash_function(
|
161
|
+
self, hash_function: str | Callable[[str], HexStr]
|
162
|
+
) -> Callable[[str], HexStr]:
|
163
|
+
if (
|
164
|
+
isinstance(hash_function, str)
|
165
|
+
and hash_function in hashlib.algorithms_available
|
166
|
+
):
|
167
|
+
return lambda data: getattr(hashlib, hash_function)(
|
168
|
+
data.encode()
|
169
|
+
).hexdigest()
|
170
|
+
elif isinstance(hash_function, Callable):
|
171
|
+
return hash_function
|
172
|
+
else:
|
173
|
+
raise ValueError()
|
dkg/utils/metadata.py
ADDED
@@ -0,0 +1,50 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
import hashlib
|
19
|
+
import json
|
20
|
+
|
21
|
+
from dkg.types import Address, NQuads
|
22
|
+
from eth_abi.packed import encode_packed
|
23
|
+
|
24
|
+
|
25
|
+
def generate_assertion_metadata(assertion: NQuads) -> dict[str, int]:
|
26
|
+
return {
|
27
|
+
"size": len(json.dumps(assertion, separators=(",", ":")).encode("utf-8")),
|
28
|
+
"triples_number": len(assertion),
|
29
|
+
"chunks_number": len(assertion), # TODO: Change when chunking introduced
|
30
|
+
}
|
31
|
+
|
32
|
+
|
33
|
+
def generate_keyword(contract_address: Address, assertion_id: bytes) -> bytes:
|
34
|
+
return encode_packed(
|
35
|
+
["address", "bytes32"],
|
36
|
+
[contract_address, assertion_id],
|
37
|
+
)
|
38
|
+
|
39
|
+
|
40
|
+
def generate_agreement_id(
|
41
|
+
contract_address: Address,
|
42
|
+
token_id: int,
|
43
|
+
keyword: bytes,
|
44
|
+
) -> bytes:
|
45
|
+
return hashlib.sha256(
|
46
|
+
encode_packed(
|
47
|
+
["address", "uint256", "bytes"],
|
48
|
+
[contract_address, token_id, keyword],
|
49
|
+
)
|
50
|
+
).digest()
|
@@ -0,0 +1,197 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
from dataclasses import dataclass, field
|
19
|
+
from enum import Enum
|
20
|
+
from typing import Any, Type
|
21
|
+
|
22
|
+
from dkg.dataclasses import HTTPRequestMethod
|
23
|
+
from dkg.exceptions import OperationFailed, OperationNotFinished
|
24
|
+
from dkg.types import UAL, Address, DataHexStr, NQuads
|
25
|
+
|
26
|
+
|
27
|
+
@dataclass
|
28
|
+
class NodeCall:
|
29
|
+
method: HTTPRequestMethod
|
30
|
+
path: str
|
31
|
+
params: dict[str, Type] = field(default_factory=dict)
|
32
|
+
data: dict[str, Type] | Type = field(default_factory=dict)
|
33
|
+
|
34
|
+
|
35
|
+
class NodeRequest:
|
36
|
+
info = NodeCall(method=HTTPRequestMethod.GET, path="info")
|
37
|
+
bid_suggestion = NodeCall(
|
38
|
+
method=HTTPRequestMethod.GET,
|
39
|
+
path="bid-suggestion",
|
40
|
+
params={
|
41
|
+
"blockchain": str,
|
42
|
+
"epochsNumber": int,
|
43
|
+
"assertionSize": int,
|
44
|
+
"contentAssetStorageAddress": Address,
|
45
|
+
"firstAssertionId": DataHexStr,
|
46
|
+
"hashFunctionId": int,
|
47
|
+
},
|
48
|
+
)
|
49
|
+
get_operation_result = NodeCall(
|
50
|
+
method=HTTPRequestMethod.GET,
|
51
|
+
path="{operation}/{operation_id}",
|
52
|
+
)
|
53
|
+
|
54
|
+
local_store = NodeCall(
|
55
|
+
method=HTTPRequestMethod.POST,
|
56
|
+
path="local-store",
|
57
|
+
data=list[dict[str, str | Address | NQuads]],
|
58
|
+
)
|
59
|
+
publish = NodeCall(
|
60
|
+
method=HTTPRequestMethod.POST,
|
61
|
+
path="publish",
|
62
|
+
data={
|
63
|
+
"assertionId": str,
|
64
|
+
"assertion": NQuads,
|
65
|
+
"blockchain": str,
|
66
|
+
"contract": Address,
|
67
|
+
"tokenId": int,
|
68
|
+
"hashFunctionId": int,
|
69
|
+
},
|
70
|
+
)
|
71
|
+
update = NodeCall(
|
72
|
+
method=HTTPRequestMethod.POST,
|
73
|
+
path="update",
|
74
|
+
data={
|
75
|
+
"assertionId": str,
|
76
|
+
"assertion": NQuads,
|
77
|
+
"blockchain": str,
|
78
|
+
"contract": Address,
|
79
|
+
"tokenId": int,
|
80
|
+
"hashFunctionId": int,
|
81
|
+
},
|
82
|
+
)
|
83
|
+
get = NodeCall(
|
84
|
+
method=HTTPRequestMethod.POST,
|
85
|
+
path="get",
|
86
|
+
data={"id": UAL, "state": str, "hashFunctionId": int},
|
87
|
+
)
|
88
|
+
query = NodeCall(
|
89
|
+
method=HTTPRequestMethod.POST,
|
90
|
+
path="query",
|
91
|
+
data={"query": str, "type": str, "repository": str},
|
92
|
+
)
|
93
|
+
|
94
|
+
|
95
|
+
class LocalStoreOperationStatus(Enum):
|
96
|
+
LOCAL_STORE_INIT_START = "LOCAL_STORE_INIT_START"
|
97
|
+
LOCAL_STORE_INIT_END = "LOCAL_STORE_INIT_END"
|
98
|
+
LOCAL_STORE_START = "LOCAL_STORE_START"
|
99
|
+
LOCAL_STORE_END = "LOCAL_STORE_END"
|
100
|
+
|
101
|
+
|
102
|
+
class PublishOperationStatus(Enum):
|
103
|
+
VALIDATING_PUBLISH_ASSERTION_REMOTE_START = (
|
104
|
+
"VALIDATING_PUBLISH_ASSERTION_REMOTE_START"
|
105
|
+
)
|
106
|
+
VALIDATING_PUBLISH_ASSERTION_REMOTE_END = "VALIDATING_PUBLISH_ASSERTION_REMOTE_END"
|
107
|
+
INSERTING_ASSERTION = "INSERTING_ASSERTION"
|
108
|
+
PUBLISHING_ASSERTION = "PUBLISHING_ASSERTION"
|
109
|
+
PUBLISH_START = "PUBLISH_START"
|
110
|
+
PUBLISH_INIT_START = "PUBLISH_INIT_START"
|
111
|
+
PUBLISH_INIT_END = "PUBLISH_INIT_END"
|
112
|
+
PUBLISH_LOCAL_STORE_START = "PUBLISH_LOCAL_STORE_START"
|
113
|
+
PUBLISH_LOCAL_STORE_END = "PUBLISH_LOCAL_STORE_END"
|
114
|
+
PUBLISH_REPLICATE_START = "PUBLISH_REPLICATE_START"
|
115
|
+
PUBLISH_REPLICATE_END = "PUBLISH_REPLICATE_END"
|
116
|
+
PUBLISH_END = "PUBLISH_END"
|
117
|
+
|
118
|
+
|
119
|
+
class UpdateOperationStatus(Enum):
|
120
|
+
UPDATE_START = "UPDATE_START"
|
121
|
+
UPDATE_INIT_START = "UPDATE_INIT_START"
|
122
|
+
UPDATE_INIT_END = "UPDATE_INIT_END"
|
123
|
+
UPDATE_REPLICATE_START = "UPDATE_REPLICATE_START"
|
124
|
+
UPDATE_REPLICATE_END = "UPDATE_REPLICATE_END"
|
125
|
+
VALIDATING_UPDATE_ASSERTION_REMOTE_START = (
|
126
|
+
"VALIDATING_UPDATE_ASSERTION_REMOTE_START"
|
127
|
+
)
|
128
|
+
VALIDATING_UPDATE_ASSERTION_REMOTE_END = "VALIDATING_UPDATE_ASSERTION_REMOTE_END"
|
129
|
+
UPDATE_END = "UPDATE_END"
|
130
|
+
|
131
|
+
|
132
|
+
class StoreTypes(Enum):
|
133
|
+
TRIPLE = "TRIPLE"
|
134
|
+
PENDING = "PENDING"
|
135
|
+
|
136
|
+
|
137
|
+
class GetOperationStatus(Enum):
|
138
|
+
ASSERTION_EXISTS_LOCAL_START = "ASSERTION_EXISTS_LOCAL_START"
|
139
|
+
ASSERTION_EXISTS_LOCAL_END = "ASSERTION_EXISTS_LOCAL_END"
|
140
|
+
GET_START = "GET_START"
|
141
|
+
GET_INIT_START = "GET_INIT_START"
|
142
|
+
GET_INIT_END = "GET_INIT_END"
|
143
|
+
GET_LOCAL_START = "GET_LOCAL_START"
|
144
|
+
GET_LOCAL_END = "GET_LOCAL_END"
|
145
|
+
GET_REMOTE_START = "GET_REMOTE_START"
|
146
|
+
GET_REMOTE_END = "GET_REMOTE_END"
|
147
|
+
GET_FETCH_FROM_NODES_START = "GET_FETCH_FROM_NODES_START"
|
148
|
+
GET_FETCH_FROM_NODES_END = "GET_FETCH_FROM_NODES_END"
|
149
|
+
GET_END = "GET_END"
|
150
|
+
|
151
|
+
|
152
|
+
class QueryOperationStatus(Enum):
|
153
|
+
QUERY_INIT_START = "QUERY_INIT_START"
|
154
|
+
QUERY_INIT_END = "QUERY_INIT_END"
|
155
|
+
QUERY_START = "QUERY_START"
|
156
|
+
QUERY_END = "QUERY_END"
|
157
|
+
|
158
|
+
|
159
|
+
class OperationStatus(Enum):
|
160
|
+
PENDING = "PENDING"
|
161
|
+
FAILED = "FAILED"
|
162
|
+
COMPLETED = "COMPLETED"
|
163
|
+
FIND_NODES_START = "FIND_NODES_START"
|
164
|
+
FIND_NODES_END = "FIND_NODES_END"
|
165
|
+
FIND_NODES_LOCAL_START = "FIND_NODES_LOCAL_START"
|
166
|
+
FIND_NODES_LOCAL_END = "FIND_NODES_LOCAL_END"
|
167
|
+
FIND_NODES_OPEN_CONNECTION_START = "FIND_NODES_OPEN_CONNECTION_START"
|
168
|
+
FIND_NODES_OPEN_CONNECTION_END = "FIND_NODES_OPEN_CONNECTION_END"
|
169
|
+
FIND_NODES_CREATE_STREAM_START = "FIND_NODES_CREATE_STREAM_START"
|
170
|
+
FIND_NODES_CREATE_STREAM_END = "FIND_NODES_CREATE_STREAM_END"
|
171
|
+
FIND_NODES_SEND_MESSAGE_START = "FIND_NODES_SEND_MESSAGE_START"
|
172
|
+
FIND_NODES_SEND_MESSAGE_END = "FIND_NODES_SEND_MESSAGE_END"
|
173
|
+
DIAL_PROTOCOL_START = "DIAL_PROTOCOL_START"
|
174
|
+
DIAL_PROTOCOL_END = "DIAL_PROTOCOL_END"
|
175
|
+
LOCAL_STORE = LocalStoreOperationStatus
|
176
|
+
PUBLISH = PublishOperationStatus
|
177
|
+
UPDATE = UpdateOperationStatus
|
178
|
+
GET = GetOperationStatus
|
179
|
+
QUERY = QueryOperationStatus
|
180
|
+
|
181
|
+
|
182
|
+
def validate_operation_status(operation_result: dict[str, Any]) -> None:
|
183
|
+
try:
|
184
|
+
status = OperationStatus(operation_result["status"])
|
185
|
+
except ValueError:
|
186
|
+
raise OperationNotFinished("Operation isn't finished")
|
187
|
+
|
188
|
+
match status:
|
189
|
+
case OperationStatus.COMPLETED:
|
190
|
+
return
|
191
|
+
case OperationStatus.FAILED:
|
192
|
+
raise OperationFailed(
|
193
|
+
f"Operation failed! {operation_result['data']['errorType']}: "
|
194
|
+
f"{operation_result['data']['errorMessage']}."
|
195
|
+
)
|
196
|
+
case _:
|
197
|
+
raise OperationNotFinished("Operation isn't finished")
|
dkg/utils/rdf.py
ADDED
@@ -0,0 +1,51 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
from typing import Literal
|
19
|
+
|
20
|
+
from dkg.exceptions import DatasetInputFormatNotSupported, InvalidDataset
|
21
|
+
from dkg.types import JSONLD, NQuads
|
22
|
+
from pyld import jsonld
|
23
|
+
|
24
|
+
|
25
|
+
def normalize_dataset(
|
26
|
+
dataset: JSONLD | NQuads,
|
27
|
+
input_format: Literal["JSON-LD", "N-Quads"] = "JSON-LD",
|
28
|
+
) -> NQuads:
|
29
|
+
normalization_options = {
|
30
|
+
"algorithm": "URDNA2015",
|
31
|
+
"format": "application/n-quads",
|
32
|
+
}
|
33
|
+
|
34
|
+
match input_format.lower():
|
35
|
+
case "json-ld" | "jsonld":
|
36
|
+
pass
|
37
|
+
case "n-quads" | "nquads":
|
38
|
+
normalization_options["inputFormat"] = "application/n-quads"
|
39
|
+
case _:
|
40
|
+
raise DatasetInputFormatNotSupported(
|
41
|
+
f"Dataset input format isn't supported: {input_format}. "
|
42
|
+
"Supported formats: JSON-LD / N-Quads."
|
43
|
+
)
|
44
|
+
|
45
|
+
n_quads = jsonld.normalize(dataset, normalization_options)
|
46
|
+
assertion = [quad for quad in n_quads.split("\n") if quad]
|
47
|
+
|
48
|
+
if not assertion:
|
49
|
+
raise InvalidDataset("Invalid dataset, no quads were extracted.")
|
50
|
+
|
51
|
+
return assertion
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
|
18
|
+
def snake_to_camel(string: str) -> str:
|
19
|
+
splitted_string = string.split("_")
|
20
|
+
return splitted_string[0] + "".join(
|
21
|
+
token.capitalize() for token in splitted_string[1:]
|
22
|
+
)
|