@pioneer-platform/pioneer-discovery 0.0.18 → 0.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/data.js +0 -83
- package/lib/generatedAssetData.json +85114 -45427
- package/package.json +2 -2
- package/scripts/fix-network-names.sh +256 -0
- package/scripts/purge-ibc.sh +127 -0
- package/scripts/validate.sh +231 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pioneer-platform/pioneer-discovery",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.20",
|
|
4
4
|
"main": "./lib/index.js",
|
|
5
5
|
"types": "./lib/main.d.ts",
|
|
6
6
|
"_moduleAliases": {
|
|
@@ -28,4 +28,4 @@
|
|
|
28
28
|
"@pioneer-platform/pioneer-caip": "^9.2.34",
|
|
29
29
|
"ethers": "5.7.2"
|
|
30
30
|
}
|
|
31
|
-
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Script to fix missing networkName fields in generatedAssetData.json
|
|
4
|
+
# Maps chain IDs to appropriate network names
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
|
|
8
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
9
|
+
JSON_FILE="$SCRIPT_DIR/../src/generatedAssetData.json"
|
|
10
|
+
BACKUP_FILE="$SCRIPT_DIR/../src/generatedAssetData.json.backup.$(date +%Y%m%d_%H%M%S)"
|
|
11
|
+
TEMP_FILE="$SCRIPT_DIR/../src/generatedAssetData_temp.json"
|
|
12
|
+
|
|
13
|
+
echo "========================================"
|
|
14
|
+
echo "Network Name Fix Script"
|
|
15
|
+
echo "========================================"
|
|
16
|
+
|
|
17
|
+
# Check if jq is installed
|
|
18
|
+
if ! command -v jq &> /dev/null; then
|
|
19
|
+
echo "Error: jq is required but not installed. Install it with: brew install jq"
|
|
20
|
+
exit 1
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
# Create timestamped backup
|
|
24
|
+
echo "Creating backup at $BACKUP_FILE"
|
|
25
|
+
cp "$JSON_FILE" "$BACKUP_FILE"
|
|
26
|
+
|
|
27
|
+
# Count initial statistics
|
|
28
|
+
echo ""
|
|
29
|
+
echo "Analyzing current state..."
|
|
30
|
+
TOTAL_ENTRIES=$(jq 'keys | length' "$JSON_FILE")
|
|
31
|
+
MISSING_NETWORK_NAME=$(jq -r '
|
|
32
|
+
to_entries
|
|
33
|
+
| map(select(.value.networkName == null))
|
|
34
|
+
| length
|
|
35
|
+
' "$JSON_FILE")
|
|
36
|
+
|
|
37
|
+
echo "Total entries: $TOTAL_ENTRIES"
|
|
38
|
+
echo "Entries missing networkName: $MISSING_NETWORK_NAME"
|
|
39
|
+
|
|
40
|
+
# Analyze by chain pattern
|
|
41
|
+
echo ""
|
|
42
|
+
echo "Analyzing missing networkName by chain pattern:"
|
|
43
|
+
echo ""
|
|
44
|
+
|
|
45
|
+
# Ethereum mainnet (eip155:1)
|
|
46
|
+
ETH_MISSING=$(jq -r '
|
|
47
|
+
to_entries
|
|
48
|
+
| map(select(.key | startswith("eip155:1/")))
|
|
49
|
+
| map(select(.value.networkName == null))
|
|
50
|
+
| length
|
|
51
|
+
' "$JSON_FILE")
|
|
52
|
+
echo " Ethereum (eip155:1): $ETH_MISSING entries"
|
|
53
|
+
|
|
54
|
+
# Polygon (eip155:137)
|
|
55
|
+
POLYGON_MISSING=$(jq -r '
|
|
56
|
+
to_entries
|
|
57
|
+
| map(select(.key | startswith("eip155:137/")))
|
|
58
|
+
| map(select(.value.networkName == null))
|
|
59
|
+
| length
|
|
60
|
+
' "$JSON_FILE")
|
|
61
|
+
echo " Polygon (eip155:137): $POLYGON_MISSING entries"
|
|
62
|
+
|
|
63
|
+
# Binance Smart Chain (eip155:56)
|
|
64
|
+
BSC_MISSING=$(jq -r '
|
|
65
|
+
to_entries
|
|
66
|
+
| map(select(.key | startswith("eip155:56/")))
|
|
67
|
+
| map(select(.value.networkName == null))
|
|
68
|
+
| length
|
|
69
|
+
' "$JSON_FILE")
|
|
70
|
+
echo " BSC (eip155:56): $BSC_MISSING entries"
|
|
71
|
+
|
|
72
|
+
# Avalanche (eip155:43114)
|
|
73
|
+
AVAX_MISSING=$(jq -r '
|
|
74
|
+
to_entries
|
|
75
|
+
| map(select(.key | startswith("eip155:43114/")))
|
|
76
|
+
| map(select(.value.networkName == null))
|
|
77
|
+
| length
|
|
78
|
+
' "$JSON_FILE")
|
|
79
|
+
echo " Avalanche (eip155:43114): $AVAX_MISSING entries"
|
|
80
|
+
|
|
81
|
+
# Arbitrum (eip155:42161)
|
|
82
|
+
ARB_MISSING=$(jq -r '
|
|
83
|
+
to_entries
|
|
84
|
+
| map(select(.key | startswith("eip155:42161/")))
|
|
85
|
+
| map(select(.value.networkName == null))
|
|
86
|
+
| length
|
|
87
|
+
' "$JSON_FILE")
|
|
88
|
+
echo " Arbitrum (eip155:42161): $ARB_MISSING entries"
|
|
89
|
+
|
|
90
|
+
# Optimism (eip155:10)
|
|
91
|
+
OP_MISSING=$(jq -r '
|
|
92
|
+
to_entries
|
|
93
|
+
| map(select(.key | startswith("eip155:10/")))
|
|
94
|
+
| map(select(.value.networkName == null))
|
|
95
|
+
| length
|
|
96
|
+
' "$JSON_FILE")
|
|
97
|
+
echo " Optimism (eip155:10): $OP_MISSING entries"
|
|
98
|
+
|
|
99
|
+
# Base (eip155:8453)
|
|
100
|
+
BASE_MISSING=$(jq -r '
|
|
101
|
+
to_entries
|
|
102
|
+
| map(select(.key | startswith("eip155:8453/")))
|
|
103
|
+
| map(select(.value.networkName == null))
|
|
104
|
+
| length
|
|
105
|
+
' "$JSON_FILE")
|
|
106
|
+
echo " Base (eip155:8453): $BASE_MISSING entries"
|
|
107
|
+
|
|
108
|
+
echo ""
|
|
109
|
+
echo "Fixing missing networkName fields..."
|
|
110
|
+
|
|
111
|
+
# Apply fixes based on chain ID patterns
|
|
112
|
+
jq '
|
|
113
|
+
to_entries
|
|
114
|
+
| map(
|
|
115
|
+
{
|
|
116
|
+
key: .key,
|
|
117
|
+
value: (
|
|
118
|
+
if .value.networkName == null then
|
|
119
|
+
if .key | startswith("eip155:1/") then
|
|
120
|
+
.value + {networkName: "ethereum"}
|
|
121
|
+
elif .key | startswith("eip155:137/") then
|
|
122
|
+
.value + {networkName: "polygon"}
|
|
123
|
+
elif .key | startswith("eip155:56/") then
|
|
124
|
+
.value + {networkName: "bsc"}
|
|
125
|
+
elif .key | startswith("eip155:43114/") then
|
|
126
|
+
.value + {networkName: "avalanche"}
|
|
127
|
+
elif .key | startswith("eip155:42161/") then
|
|
128
|
+
.value + {networkName: "arbitrum"}
|
|
129
|
+
elif .key | startswith("eip155:10/") then
|
|
130
|
+
.value + {networkName: "optimism"}
|
|
131
|
+
elif .key | startswith("eip155:8453/") then
|
|
132
|
+
.value + {networkName: "base"}
|
|
133
|
+
elif .key | startswith("cosmos:cosmoshub-4/") then
|
|
134
|
+
.value + {networkName: "cosmos"}
|
|
135
|
+
elif .key | startswith("cosmos:osmosis-1/") then
|
|
136
|
+
.value + {networkName: "osmosis"}
|
|
137
|
+
elif .key | startswith("cosmos:kaiyo-1/") then
|
|
138
|
+
.value + {networkName: "kujira"}
|
|
139
|
+
elif .key | startswith("cosmos:maya-mainnet-v1/") then
|
|
140
|
+
.value + {networkName: "maya"}
|
|
141
|
+
elif .key | startswith("bip122:000000000019d6689c085ae165831e93/") then
|
|
142
|
+
.value + {networkName: "bitcoin"}
|
|
143
|
+
elif .key | startswith("bip122:000000000000000000651ef99cb9fcbe/") then
|
|
144
|
+
.value + {networkName: "bitcoincash"}
|
|
145
|
+
elif .key | startswith("bip122:0000000000196a45/") then
|
|
146
|
+
.value + {networkName: "zcash"}
|
|
147
|
+
elif .key | startswith("bip122:12a765e31ffd4059bada1e25190f6e98/") then
|
|
148
|
+
.value + {networkName: "litecoin"}
|
|
149
|
+
elif .key | startswith("bip122:00000000001a91e3dace36e2be3bf030/") then
|
|
150
|
+
.value + {networkName: "dogecoin"}
|
|
151
|
+
elif .key | startswith("bip122:00000000000000000002a7c4c1e48d76/") then
|
|
152
|
+
.value + {networkName: "dash"}
|
|
153
|
+
elif .key | startswith("ripple:") then
|
|
154
|
+
.value + {networkName: "ripple"}
|
|
155
|
+
elif .key | startswith("binance:") then
|
|
156
|
+
.value + {networkName: "binance"}
|
|
157
|
+
else
|
|
158
|
+
# Try to extract a reasonable default from chainId if available
|
|
159
|
+
if .value.chainId then
|
|
160
|
+
# Extract the chain type and use as network name
|
|
161
|
+
if .value.chainId | startswith("eip155:") then
|
|
162
|
+
.value + {networkName: "ethereum-compatible"}
|
|
163
|
+
elif .value.chainId | startswith("cosmos:") then
|
|
164
|
+
.value + {networkName: "cosmos-compatible"}
|
|
165
|
+
elif .value.chainId | startswith("bip122:") then
|
|
166
|
+
.value + {networkName: "bitcoin-compatible"}
|
|
167
|
+
else
|
|
168
|
+
.value
|
|
169
|
+
end
|
|
170
|
+
else
|
|
171
|
+
.value
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
else
|
|
175
|
+
.value
|
|
176
|
+
end
|
|
177
|
+
)
|
|
178
|
+
}
|
|
179
|
+
)
|
|
180
|
+
| from_entries
|
|
181
|
+
' "$JSON_FILE" > "$TEMP_FILE"
|
|
182
|
+
|
|
183
|
+
# Validate the result
|
|
184
|
+
if jq empty "$TEMP_FILE" 2>/dev/null; then
|
|
185
|
+
# Move temp file to main file
|
|
186
|
+
mv "$TEMP_FILE" "$JSON_FILE"
|
|
187
|
+
|
|
188
|
+
# Count final statistics
|
|
189
|
+
echo ""
|
|
190
|
+
echo "✅ Success! Recalculating statistics..."
|
|
191
|
+
echo ""
|
|
192
|
+
|
|
193
|
+
FINAL_MISSING=$(jq -r '
|
|
194
|
+
to_entries
|
|
195
|
+
| map(select(.value.networkName == null))
|
|
196
|
+
| length
|
|
197
|
+
' "$JSON_FILE")
|
|
198
|
+
|
|
199
|
+
FIXED=$((MISSING_NETWORK_NAME - FINAL_MISSING))
|
|
200
|
+
|
|
201
|
+
echo "Results:"
|
|
202
|
+
echo " Initial entries missing networkName: $MISSING_NETWORK_NAME"
|
|
203
|
+
echo " Final entries missing networkName: $FINAL_MISSING"
|
|
204
|
+
echo " Fixed entries: $FIXED"
|
|
205
|
+
|
|
206
|
+
# Show breakdown of what was fixed
|
|
207
|
+
echo ""
|
|
208
|
+
echo "Fixed by network type:"
|
|
209
|
+
|
|
210
|
+
ETH_FIXED=$(jq -r '
|
|
211
|
+
to_entries
|
|
212
|
+
| map(select(.key | startswith("eip155:1/")))
|
|
213
|
+
| map(select(.value.networkName == "ethereum"))
|
|
214
|
+
| length
|
|
215
|
+
' "$JSON_FILE")
|
|
216
|
+
echo " Ethereum: $ETH_FIXED entries"
|
|
217
|
+
|
|
218
|
+
if [ "$POLYGON_MISSING" -gt 0 ]; then
|
|
219
|
+
POLYGON_FIXED=$(jq -r '
|
|
220
|
+
to_entries
|
|
221
|
+
| map(select(.key | startswith("eip155:137/")))
|
|
222
|
+
| map(select(.value.networkName == "polygon"))
|
|
223
|
+
| length
|
|
224
|
+
' "$JSON_FILE")
|
|
225
|
+
echo " Polygon: $POLYGON_FIXED entries"
|
|
226
|
+
fi
|
|
227
|
+
|
|
228
|
+
if [ "$BSC_MISSING" -gt 0 ]; then
|
|
229
|
+
BSC_FIXED=$(jq -r '
|
|
230
|
+
to_entries
|
|
231
|
+
| map(select(.key | startswith("eip155:56/")))
|
|
232
|
+
| map(select(.value.networkName == "bsc"))
|
|
233
|
+
| length
|
|
234
|
+
' "$JSON_FILE")
|
|
235
|
+
echo " BSC: $BSC_FIXED entries"
|
|
236
|
+
fi
|
|
237
|
+
|
|
238
|
+
# Show examples of fixed entries
|
|
239
|
+
echo ""
|
|
240
|
+
echo "Examples of fixed entries:"
|
|
241
|
+
jq -r '
|
|
242
|
+
to_entries
|
|
243
|
+
| map(select(.key | startswith("eip155:1/erc20:")))
|
|
244
|
+
| .[0:3]
|
|
245
|
+
| map(" " + .key + " -> networkName: " + .value.networkName)
|
|
246
|
+
| .[]
|
|
247
|
+
' "$JSON_FILE"
|
|
248
|
+
|
|
249
|
+
echo ""
|
|
250
|
+
echo "Backup saved at: $BACKUP_FILE"
|
|
251
|
+
echo "To restore: cp $BACKUP_FILE $JSON_FILE"
|
|
252
|
+
else
|
|
253
|
+
echo "❌ Error: The processed JSON is invalid. Original file preserved."
|
|
254
|
+
rm -f "$TEMP_FILE"
|
|
255
|
+
exit 1
|
|
256
|
+
fi
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Script to purge unwanted Cosmos IBC assets from generatedAssetData.json
|
|
4
|
+
# Keeps only IBC assets on Osmosis chain
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
|
|
8
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
9
|
+
JSON_FILE="$SCRIPT_DIR/../src/generatedAssetData.json"
|
|
10
|
+
BACKUP_FILE="$SCRIPT_DIR/../src/generatedAssetData.json.backup.$(date +%Y%m%d_%H%M%S)"
|
|
11
|
+
TEMP_FILE="$SCRIPT_DIR/../src/generatedAssetData_temp.json"
|
|
12
|
+
|
|
13
|
+
echo "========================================"
|
|
14
|
+
echo "IBC Asset Purge Script"
|
|
15
|
+
echo "========================================"
|
|
16
|
+
|
|
17
|
+
# Check if jq is installed
|
|
18
|
+
if ! command -v jq &> /dev/null; then
|
|
19
|
+
echo "Error: jq is required but not installed. Install it with: brew install jq"
|
|
20
|
+
exit 1
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
# Create timestamped backup
|
|
24
|
+
echo "Creating backup at $BACKUP_FILE"
|
|
25
|
+
cp "$JSON_FILE" "$BACKUP_FILE"
|
|
26
|
+
|
|
27
|
+
# Count initial entries
|
|
28
|
+
INITIAL_TOTAL=$(jq 'keys | length' "$JSON_FILE")
|
|
29
|
+
echo "Initial total entries: $INITIAL_TOTAL"
|
|
30
|
+
|
|
31
|
+
# Count IBC entries before removal
|
|
32
|
+
echo ""
|
|
33
|
+
echo "Analyzing IBC assets..."
|
|
34
|
+
TOTAL_IBC=$(jq -r 'to_entries | map(select(.key | contains("/ibc:"))) | length' "$JSON_FILE")
|
|
35
|
+
OSMOSIS_IBC=$(jq -r 'to_entries | map(select(.key | startswith("cosmos:osmosis-1/ibc:"))) | length' "$JSON_FILE")
|
|
36
|
+
OTHER_IBC=$((TOTAL_IBC - OSMOSIS_IBC))
|
|
37
|
+
|
|
38
|
+
echo " Total IBC assets: $TOTAL_IBC"
|
|
39
|
+
echo " IBC assets on Osmosis: $OSMOSIS_IBC (will be kept)"
|
|
40
|
+
echo " IBC assets on other chains: $OTHER_IBC (will be removed)"
|
|
41
|
+
|
|
42
|
+
# List some examples of what will be removed
|
|
43
|
+
echo ""
|
|
44
|
+
echo "Examples of IBC assets to be REMOVED:"
|
|
45
|
+
jq -r '
|
|
46
|
+
to_entries
|
|
47
|
+
| map(select(.key | contains("/ibc:")))
|
|
48
|
+
| map(select(.key | startswith("cosmos:osmosis-1/ibc:") | not))
|
|
49
|
+
| .[0:5]
|
|
50
|
+
| map(" - " + .key)
|
|
51
|
+
| .[]
|
|
52
|
+
' "$JSON_FILE"
|
|
53
|
+
|
|
54
|
+
echo ""
|
|
55
|
+
echo "Examples of IBC assets to be KEPT (Osmosis IBC):"
|
|
56
|
+
jq -r '
|
|
57
|
+
to_entries
|
|
58
|
+
| map(select(.key | startswith("cosmos:osmosis-1/ibc:")))
|
|
59
|
+
| .[0:5]
|
|
60
|
+
| map(" - " + .key)
|
|
61
|
+
| .[]
|
|
62
|
+
' "$JSON_FILE"
|
|
63
|
+
|
|
64
|
+
# Ask for confirmation
|
|
65
|
+
echo ""
|
|
66
|
+
read -p "Do you want to proceed with removing $OTHER_IBC IBC assets from non-Osmosis chains? (y/N) " -n 1 -r
|
|
67
|
+
echo ""
|
|
68
|
+
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
|
69
|
+
echo "Cancelled. No changes made."
|
|
70
|
+
exit 0
|
|
71
|
+
fi
|
|
72
|
+
|
|
73
|
+
# Remove unwanted IBC assets
|
|
74
|
+
echo ""
|
|
75
|
+
echo "Removing IBC assets from non-Osmosis chains..."
|
|
76
|
+
|
|
77
|
+
jq '
|
|
78
|
+
to_entries
|
|
79
|
+
| map(select(
|
|
80
|
+
(.key | contains("/ibc:") | not) or
|
|
81
|
+
(.key | startswith("cosmos:osmosis-1/ibc:"))
|
|
82
|
+
))
|
|
83
|
+
| from_entries
|
|
84
|
+
' "$JSON_FILE" > "$TEMP_FILE"
|
|
85
|
+
|
|
86
|
+
# Validate the result
|
|
87
|
+
if jq empty "$TEMP_FILE" 2>/dev/null; then
|
|
88
|
+
# Move temp file to main file
|
|
89
|
+
mv "$TEMP_FILE" "$JSON_FILE"
|
|
90
|
+
|
|
91
|
+
# Count final entries
|
|
92
|
+
FINAL_TOTAL=$(jq 'keys | length' "$JSON_FILE")
|
|
93
|
+
REMOVED=$((INITIAL_TOTAL - FINAL_TOTAL))
|
|
94
|
+
|
|
95
|
+
# Count remaining IBC entries
|
|
96
|
+
REMAINING_IBC=$(jq -r 'to_entries | map(select(.key | contains("/ibc:"))) | length' "$JSON_FILE")
|
|
97
|
+
|
|
98
|
+
echo "✅ Success!"
|
|
99
|
+
echo ""
|
|
100
|
+
echo "Statistics:"
|
|
101
|
+
echo " Initial entries: $INITIAL_TOTAL"
|
|
102
|
+
echo " Final entries: $FINAL_TOTAL"
|
|
103
|
+
echo " Removed entries: $REMOVED"
|
|
104
|
+
echo " Remaining IBC assets: $REMAINING_IBC (should all be on Osmosis)"
|
|
105
|
+
|
|
106
|
+
# Verify all remaining IBC are on Osmosis
|
|
107
|
+
NON_OSMOSIS_IBC=$(jq -r '
|
|
108
|
+
to_entries
|
|
109
|
+
| map(select(.key | contains("/ibc:")))
|
|
110
|
+
| map(select(.key | startswith("cosmos:osmosis-1/ibc:") | not))
|
|
111
|
+
| length
|
|
112
|
+
' "$JSON_FILE")
|
|
113
|
+
|
|
114
|
+
if [ "$NON_OSMOSIS_IBC" -eq 0 ]; then
|
|
115
|
+
echo " ✅ Verified: All remaining IBC assets are on Osmosis"
|
|
116
|
+
else
|
|
117
|
+
echo " ⚠️ Warning: Found $NON_OSMOSIS_IBC non-Osmosis IBC assets remaining"
|
|
118
|
+
fi
|
|
119
|
+
|
|
120
|
+
echo ""
|
|
121
|
+
echo "Backup saved at: $BACKUP_FILE"
|
|
122
|
+
echo "To restore: cp $BACKUP_FILE $JSON_FILE"
|
|
123
|
+
else
|
|
124
|
+
echo "❌ Error: The processed JSON is invalid. Original file preserved."
|
|
125
|
+
rm -f "$TEMP_FILE"
|
|
126
|
+
exit 1
|
|
127
|
+
fi
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Script to validate and clean duplicate entries in generatedAssetData.json
|
|
4
|
+
|
|
5
|
+
set -e
|
|
6
|
+
|
|
7
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
8
|
+
JSON_FILE="$SCRIPT_DIR/../src/generatedAssetData.json"
|
|
9
|
+
BACKUP_FILE="$SCRIPT_DIR/../src/generatedAssetData.json.backup"
|
|
10
|
+
|
|
11
|
+
echo "========================================"
|
|
12
|
+
echo "Asset Data Validation and Cleanup Script"
|
|
13
|
+
echo "========================================"
|
|
14
|
+
|
|
15
|
+
# Check if jq is installed
|
|
16
|
+
if ! command -v jq &> /dev/null; then
|
|
17
|
+
echo "Error: jq is required but not installed. Install it with: brew install jq"
|
|
18
|
+
exit 1
|
|
19
|
+
fi
|
|
20
|
+
|
|
21
|
+
# Create backup
|
|
22
|
+
echo "Creating backup at $BACKUP_FILE"
|
|
23
|
+
cp "$JSON_FILE" "$BACKUP_FILE"
|
|
24
|
+
|
|
25
|
+
# Function to validate JSON
|
|
26
|
+
validate_json() {
|
|
27
|
+
if jq empty "$1" 2>/dev/null; then
|
|
28
|
+
echo "✅ JSON is valid"
|
|
29
|
+
return 0
|
|
30
|
+
else
|
|
31
|
+
echo "❌ JSON is invalid"
|
|
32
|
+
return 1
|
|
33
|
+
fi
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# Initial validation
|
|
37
|
+
echo ""
|
|
38
|
+
echo "1. Initial JSON validation:"
|
|
39
|
+
if ! validate_json "$JSON_FILE"; then
|
|
40
|
+
echo "Error: Initial JSON file is invalid. Please fix syntax errors first."
|
|
41
|
+
exit 1
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
# Get file size
|
|
45
|
+
FILE_SIZE=$(wc -c < "$JSON_FILE")
|
|
46
|
+
echo " File size: $(echo "scale=2; $FILE_SIZE / 1048576" | bc) MB"
|
|
47
|
+
|
|
48
|
+
# Count total entries
|
|
49
|
+
echo ""
|
|
50
|
+
echo "2. Analyzing entries:"
|
|
51
|
+
TOTAL_ENTRIES=$(jq 'keys | length' "$JSON_FILE")
|
|
52
|
+
echo " Total entries: $TOTAL_ENTRIES"
|
|
53
|
+
|
|
54
|
+
# Find duplicate assetIds (case-insensitive)
|
|
55
|
+
echo ""
|
|
56
|
+
echo "3. Checking for duplicate assetIds:"
|
|
57
|
+
DUPLICATE_ASSET_IDS=$(jq -r '
|
|
58
|
+
to_entries
|
|
59
|
+
| map({key: .key, assetId: .value.assetId // .key, lower: (.value.assetId // .key | ascii_downcase)})
|
|
60
|
+
| group_by(.lower)
|
|
61
|
+
| map(select(length > 1))
|
|
62
|
+
| flatten
|
|
63
|
+
| map(.key)
|
|
64
|
+
| .[]
|
|
65
|
+
' "$JSON_FILE" 2>/dev/null || echo "")
|
|
66
|
+
|
|
67
|
+
if [ -z "$DUPLICATE_ASSET_IDS" ]; then
|
|
68
|
+
echo " ✅ No duplicate assetIds found"
|
|
69
|
+
else
|
|
70
|
+
echo " ⚠️ Found duplicate assetIds:"
|
|
71
|
+
echo "$DUPLICATE_ASSET_IDS" | while read -r key; do
|
|
72
|
+
echo " - $key"
|
|
73
|
+
done
|
|
74
|
+
fi
|
|
75
|
+
|
|
76
|
+
# Find entries with same chainId and symbol combination
|
|
77
|
+
echo ""
|
|
78
|
+
echo "4. Checking for duplicate chainId + symbol combinations:"
|
|
79
|
+
DUPLICATE_COMBOS=$(jq -r '
|
|
80
|
+
to_entries
|
|
81
|
+
| map({
|
|
82
|
+
key: .key,
|
|
83
|
+
chainId: .value.chainId,
|
|
84
|
+
symbol: .value.symbol,
|
|
85
|
+
combo: ((.value.chainId // "none") + ":" + (.value.symbol // "none"))
|
|
86
|
+
})
|
|
87
|
+
| group_by(.combo)
|
|
88
|
+
| map(select(length > 1))
|
|
89
|
+
| flatten
|
|
90
|
+
| map("\(.combo) (keys: " + ([.[] | .key] | join(", ")) + ")")
|
|
91
|
+
| .[]
|
|
92
|
+
' "$JSON_FILE" 2>/dev/null || echo "")
|
|
93
|
+
|
|
94
|
+
if [ -z "$DUPLICATE_COMBOS" ]; then
|
|
95
|
+
echo " ✅ No duplicate chainId + symbol combinations found"
|
|
96
|
+
else
|
|
97
|
+
echo " ⚠️ Found duplicate chainId + symbol combinations:"
|
|
98
|
+
echo "$DUPLICATE_COMBOS" | while read -r combo; do
|
|
99
|
+
echo " - $combo"
|
|
100
|
+
done
|
|
101
|
+
fi
|
|
102
|
+
|
|
103
|
+
# Check for missing required fields with detailed information
|
|
104
|
+
echo ""
|
|
105
|
+
echo "5. Checking for missing required fields:"
|
|
106
|
+
MISSING_FIELDS_DETAILED=$(jq -r '
|
|
107
|
+
to_entries
|
|
108
|
+
| map(select(
|
|
109
|
+
.value.assetId == null or
|
|
110
|
+
.value.chainId == null or
|
|
111
|
+
.value.symbol == null or
|
|
112
|
+
.value.name == null or
|
|
113
|
+
.value.networkName == null or
|
|
114
|
+
.value.precision == null
|
|
115
|
+
))
|
|
116
|
+
| map({
|
|
117
|
+
caip: .key,
|
|
118
|
+
missing: [
|
|
119
|
+
if .value.assetId == null then "assetId" else empty end,
|
|
120
|
+
if .value.chainId == null then "chainId" else empty end,
|
|
121
|
+
if .value.symbol == null then "symbol" else empty end,
|
|
122
|
+
if .value.name == null then "name" else empty end,
|
|
123
|
+
if .value.networkName == null then "networkName" else empty end,
|
|
124
|
+
if .value.precision == null then "precision" else empty end
|
|
125
|
+
]
|
|
126
|
+
})
|
|
127
|
+
| map("\(.caip) - Missing: \(.missing | join(", "))")
|
|
128
|
+
| .[]
|
|
129
|
+
' "$JSON_FILE" 2>/dev/null || echo "")
|
|
130
|
+
|
|
131
|
+
if [ -z "$MISSING_FIELDS_DETAILED" ]; then
|
|
132
|
+
echo " ✅ All entries have required fields"
|
|
133
|
+
else
|
|
134
|
+
TOTAL_WITH_MISSING=$(echo "$MISSING_FIELDS_DETAILED" | wc -l | tr -d ' ')
|
|
135
|
+
echo " ⚠️ Found $TOTAL_WITH_MISSING entries with missing required fields:"
|
|
136
|
+
echo ""
|
|
137
|
+
|
|
138
|
+
# Option to show all or limited entries
|
|
139
|
+
if [ "$2" == "--show-all" ]; then
|
|
140
|
+
echo "$MISSING_FIELDS_DETAILED" | while IFS= read -r line; do
|
|
141
|
+
echo " $line"
|
|
142
|
+
done
|
|
143
|
+
else
|
|
144
|
+
# Show first 20 entries
|
|
145
|
+
echo "$MISSING_FIELDS_DETAILED" | head -20 | while IFS= read -r line; do
|
|
146
|
+
echo " $line"
|
|
147
|
+
done
|
|
148
|
+
|
|
149
|
+
if [ "$TOTAL_WITH_MISSING" -gt 20 ]; then
|
|
150
|
+
echo ""
|
|
151
|
+
echo " ... and $((TOTAL_WITH_MISSING - 20)) more entries"
|
|
152
|
+
echo ""
|
|
153
|
+
echo " Run with '--show-all' flag to see all entries with missing fields:"
|
|
154
|
+
echo " $0 --show-all"
|
|
155
|
+
fi
|
|
156
|
+
fi
|
|
157
|
+
fi
|
|
158
|
+
|
|
159
|
+
# Check for invalid precision values
|
|
160
|
+
echo ""
|
|
161
|
+
echo "6. Checking for invalid precision values:"
|
|
162
|
+
INVALID_PRECISION=$(jq -r '
|
|
163
|
+
to_entries
|
|
164
|
+
| map(select(.value.precision != null and (.value.precision | type) != "number"))
|
|
165
|
+
| map(.key)
|
|
166
|
+
| .[]
|
|
167
|
+
' "$JSON_FILE" 2>/dev/null || echo "")
|
|
168
|
+
|
|
169
|
+
if [ -z "$INVALID_PRECISION" ]; then
|
|
170
|
+
echo " ✅ All precision values are valid"
|
|
171
|
+
else
|
|
172
|
+
echo " ⚠️ Entries with invalid precision:"
|
|
173
|
+
echo "$INVALID_PRECISION" | while read -r key; do
|
|
174
|
+
echo " - $key"
|
|
175
|
+
done
|
|
176
|
+
fi
|
|
177
|
+
|
|
178
|
+
# Clean duplicates if requested
|
|
179
|
+
if [ "$1" == "--clean" ]; then
|
|
180
|
+
echo ""
|
|
181
|
+
echo "7. Cleaning duplicates..."
|
|
182
|
+
|
|
183
|
+
# Create cleaned version (keeps first occurrence of duplicates)
|
|
184
|
+
TEMP_FILE="$SCRIPT_DIR/../src/generatedAssetData_temp.json"
|
|
185
|
+
|
|
186
|
+
jq '
|
|
187
|
+
to_entries
|
|
188
|
+
| map({
|
|
189
|
+
key: .key,
|
|
190
|
+
value: .value,
|
|
191
|
+
lower_key: (.key | ascii_downcase)
|
|
192
|
+
})
|
|
193
|
+
| unique_by(.lower_key)
|
|
194
|
+
| map({key: .key, value: .value})
|
|
195
|
+
| from_entries
|
|
196
|
+
' "$JSON_FILE" > "$TEMP_FILE"
|
|
197
|
+
|
|
198
|
+
# Validate cleaned JSON
|
|
199
|
+
if validate_json "$TEMP_FILE"; then
|
|
200
|
+
mv "$TEMP_FILE" "$JSON_FILE"
|
|
201
|
+
|
|
202
|
+
NEW_ENTRIES=$(jq 'keys | length' "$JSON_FILE")
|
|
203
|
+
REMOVED=$((TOTAL_ENTRIES - NEW_ENTRIES))
|
|
204
|
+
|
|
205
|
+
echo " ✅ Cleaned! Removed $REMOVED duplicate entries"
|
|
206
|
+
echo " New total: $NEW_ENTRIES entries"
|
|
207
|
+
|
|
208
|
+
# Final file size
|
|
209
|
+
NEW_FILE_SIZE=$(wc -c < "$JSON_FILE")
|
|
210
|
+
echo " New file size: $(echo "scale=2; $NEW_FILE_SIZE / 1048576" | bc) MB"
|
|
211
|
+
echo " Size reduction: $(echo "scale=2; ($FILE_SIZE - $NEW_FILE_SIZE) / 1024" | bc) KB"
|
|
212
|
+
else
|
|
213
|
+
echo " ❌ Error: Cleaned JSON is invalid. Original file preserved."
|
|
214
|
+
rm -f "$TEMP_FILE"
|
|
215
|
+
exit 1
|
|
216
|
+
fi
|
|
217
|
+
else
|
|
218
|
+
echo ""
|
|
219
|
+
echo "========================================"
|
|
220
|
+
echo "To clean duplicates, run: $0 --clean"
|
|
221
|
+
echo "Backup will be saved at: $BACKUP_FILE"
|
|
222
|
+
echo "========================================"
|
|
223
|
+
fi
|
|
224
|
+
|
|
225
|
+
# Pretty print a few sample entries for verification
|
|
226
|
+
echo ""
|
|
227
|
+
echo "8. Sample entries (first 3):"
|
|
228
|
+
jq -r 'to_entries | .[0:3] | map("\n Key: \(.key)\n Symbol: \(.value.symbol)\n Name: \(.value.name)\n Chain: \(.value.chainId)") | .[]' "$JSON_FILE"
|
|
229
|
+
|
|
230
|
+
echo ""
|
|
231
|
+
echo "Validation complete!"
|