uniswap-v2-loader 6.0.0 → 6.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -68,9 +68,16 @@ Methods:
68
68
  * Type: `number`
69
69
  * Default: `undefined`
70
70
  - `filename`
71
- * CSV cache path.
71
+ * Cache path. Used for pre-load data and add updates. In case `csv` set to `false` value used as prefix for:
72
+ - `${filename}_pairs.bin`
73
+ - `${filename}_tokens.bin`
74
+ - `${filename}_p2tt.bin`
72
75
  * Type: `string`
73
76
  * Default: *OS cache folder*
77
+ - `csv`
78
+ * Switch cache between CSV and binary via [DEX DB](https://github.com/calp-pro/dex_db)
79
+ * Type: `boolean`
80
+ * Default: `true`
74
81
  - `factory`
75
82
  * Smart contract factory address.
76
83
  * Type: `string`
@@ -96,13 +103,13 @@ Methods:
96
103
  * Type: `function`
97
104
  * Default: `undefined`
98
105
  - `abort_signal`
99
- * Signal to cancel loading and release workers.
100
- * Type: `AbortSignal`
101
- * Default: `undefined`
106
+ * Signal to cancel loading and release workers.
107
+ * Type: [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
108
+ * Default: `undefined`
102
109
  - `update_timeout`
103
- * Polling interval in milliseconds. Used only in `subscribe`
104
- * Type: `number`
105
- * Default: `5000`
110
+ * Polling interval in milliseconds. Used only in `subscribe`
111
+ * Type: `number`
112
+ * Default: `5000`
106
113
 
107
114
  ### Schema `Pair`
108
115
  Standardized liquidity pool object.
@@ -133,7 +140,15 @@ The loader automatically identifies the optimal persistent storage path for your
133
140
  - **macOS:** `~/Library/Caches/`
134
141
  - **Windows:** `%LOCALAPPDATA%` or `AppData/Local/`
135
142
 
136
- Cache files are named following the pattern `${package_name}_{factory_address}.csv`.
143
+ There are binary compact cache an CSV.<br>
144
+ Binary cache done via [DEX DB](https://github.com/calp-pro/dex_db)
145
+
146
+ Cache files are named following the pattern:
147
+ - `${package_name}_{factory_address}.csv`
148
+ - `${package_name}_{factory_address}_pairs.bin`
149
+ - `${package_name}_{factory_address}_tokens.bin`
150
+ - `${package_name}_{factory_address}_p2tt.bin`
151
+
137
152
 
138
153
 
139
154
  ## API Usage
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "uniswap-v2-loader",
3
- "version": "6.0.0",
3
+ "version": "6.1.0",
4
4
  "description": "Uniswap v2 protocol loader",
5
5
  "keywords": [
6
6
  "DEFI",
@@ -19,5 +19,5 @@ module.exports = factory => path.join(
19
19
  ? [home, '.cache']
20
20
  : [os.tmpdir()]
21
21
  ),
22
- `${pkg.name}_${factory.toLowerCase()}.csv`
22
+ `${pkg.name}_${factory.toLowerCase()}`
23
23
  )
package/src/dex_db.js ADDED
@@ -0,0 +1,216 @@
1
+ // DexDB v2.0.0
2
+ // written 13 March 2026 by Vladimir Spirin at Danang, Vietnam
3
+ const fs = require('fs')
4
+
5
+ function writeUInt24LE(buf, value, offset) {
6
+ value &= 0xffffff
7
+ buf[offset] = value & 0xff
8
+ buf[offset + 1] = (value >>> 8) & 0xff
9
+ buf[offset + 2] = (value >>> 16) & 0xff
10
+ }
11
+
12
+ function readUInt24LE(buf, offset) {
13
+ return buf[offset] |
14
+ (buf[offset + 1] << 8) |
15
+ (buf[offset + 2] << 16)
16
+ }
17
+
18
+ function dex_db(pairs = []) {
19
+ var aP = []
20
+ var aT = []
21
+ var P = new Map()//pairs
22
+ var T = new Map()//tokens
23
+ var p2tt = []// [p1.it0, p1.it1, p2.it0, p2, it1, ...]
24
+ var t2pt = []// [ [[ip, it], [ip, it], ...], ...]
25
+
26
+ const index = ([pair, token0, token1]) => {
27
+ var ip = P.get(pair)
28
+ if (ip == undefined) {
29
+ P.set(pair, ip = P.size)
30
+ aP.push(pair)
31
+ } else {
32
+ return undefined
33
+ }
34
+
35
+ var it0 = T.get(token0)
36
+ if (it0 == undefined) {
37
+ T.set(token0, it0 = T.size)
38
+ aT.push(token0)
39
+ }
40
+
41
+ var it1 = T.get(token1)
42
+ if (it1 == undefined) {
43
+ T.set(token1, it1 = T.size)
44
+ aT.push(token1)
45
+ }
46
+
47
+ p2tt[ip * 2] = it0
48
+ p2tt[ip * 2 + 1] = it1
49
+
50
+ if (t2pt[it0])
51
+ t2pt[it0].push(ip, it1)
52
+ else
53
+ t2pt[it0] = [ip, it1]
54
+
55
+ if (t2pt[it1])
56
+ t2pt[it1].push(ip, it0)
57
+ else
58
+ t2pt[it1] = [ip, it0]
59
+ }
60
+
61
+ const index_save = ([pair, token0, token1], filename = 'dump') => {
62
+ if (pair.length != 42 || token0.length != 42 || token1.length != 42) return
63
+ var ip = P.get(pair)
64
+ if (ip == undefined) {
65
+ P.set(pair, ip = P.size)
66
+ aP.push(pair)
67
+ fs.appendFileSync(filename + '_pairs.bin', Buffer.from(pair.slice(2), 'hex'))
68
+ } else {
69
+ return undefined
70
+ }
71
+
72
+ var it0 = T.get(token0)
73
+ if (it0 == undefined) {
74
+ T.set(token0, it0 = T.size)
75
+ aT.push(token0)
76
+ fs.appendFileSync(filename + '_tokens.bin', Buffer.from(token0.slice(2), 'hex'))
77
+ }
78
+
79
+ var it1 = T.get(token1)
80
+ if (it1 == undefined) {
81
+ T.set(token1, it1 = T.size)
82
+ aT.push(token1)
83
+ fs.appendFileSync(filename + '_tokens.bin', Buffer.from(token1.slice(2), 'hex'))
84
+ }
85
+
86
+ p2tt[ip * 2] = it0
87
+ p2tt[ip * 2 + 1] = it1
88
+
89
+ const buf = Buffer.allocUnsafe(6)
90
+ writeUInt24LE(buf, it0, 0)
91
+ writeUInt24LE(buf, it1, 3)
92
+ fs.appendFileSync(filename + '_p2tt.bin', buf)
93
+
94
+ if (t2pt[it0])
95
+ t2pt[it0].push(ip, it1)
96
+ else
97
+ t2pt[it0] = [ip, it1]
98
+
99
+ if (t2pt[it1])
100
+ t2pt[it1].push(ip, it0)
101
+ else
102
+ t2pt[it1] = [ip, it0]
103
+
104
+ return [ip, it0, it1]
105
+ }
106
+
107
+ const get_tokens = pair => {
108
+ const tokens = Array(2)
109
+ const ip = P.get(pair)
110
+ if (ip == undefined) return tokens
111
+ tokens[0] = aT[p2tt[ip * 2]]
112
+ tokens[1] = aT[p2tt[ip * 2 + 1]]
113
+ return tokens
114
+ }
115
+
116
+ const find_pairs_with_token = token => {
117
+ const pairs = []
118
+ const it = T.get(token)
119
+ if (it == undefined) return pairs
120
+ for (var i = 0; i < t2pt[it].length; i += 2)
121
+ pairs.push(
122
+ aP[t2pt[it][i]]
123
+ )
124
+ return pairs
125
+ }
126
+
127
+ const find_pairs_with_tokens = (token0, token1) => {
128
+ const pairs = []
129
+ const it0 = T.get(token0)
130
+ if (it0 == undefined) return pairs
131
+ const it1 = T.get(token1)
132
+ if (it1 == undefined) return pairs
133
+ for (var i = 0; i < t2pt[it0].length; i += 2)
134
+ if (t2pt[it0][i + 1] == it1)
135
+ pairs.push(
136
+ aP[t2pt[it0][i]]
137
+ )
138
+ return pairs
139
+ }
140
+
141
+ const save = (filename = 'dump') => {
142
+ fs.writeFileSync(filename + '_pairs.bin', Buffer.concat(aP.map(a => Buffer.from(a.slice(2), 'hex'))))
143
+ fs.writeFileSync(filename + '_tokens.bin', Buffer.concat(aT.map(a => Buffer.from(a.slice(2), 'hex'))))
144
+ const bin = fs.openSync(filename + '_p2tt.bin', 'w')
145
+ const buf = Buffer.allocUnsafe(6)
146
+ for (var i = 0; i < p2tt.length; i += 2) {
147
+ writeUInt24LE(buf, p2tt[i], 0)
148
+ writeUInt24LE(buf, p2tt[i + 1], 3)
149
+ fs.writeSync(bin, buf)
150
+ }
151
+ fs.closeSync(bin)
152
+ }
153
+
154
+ const load = (filename = 'dump') => {
155
+ aP.length = 0
156
+ aT.length = 0
157
+ p2tt.length = 0
158
+ t2pt.length = 0
159
+ T = new Map()
160
+ P = new Map()
161
+
162
+ var buf = fs.readFileSync(filename + '_pairs.bin')
163
+ for (var i = 0; i < buf.length; i += 20) {
164
+ const pair = '0x' + buf.slice(i, i + 20).toString('hex')
165
+ aP.push(pair)
166
+ P.set(pair, i / 20)
167
+ }
168
+ buf = fs.readFileSync(filename + '_tokens.bin')
169
+ for (var i = 0; i < buf.length; i += 20) {
170
+ const token = '0x' + buf.slice(i, i + 20).toString('hex')
171
+ aT.push(token)
172
+ T.set(token, i / 20)
173
+ }
174
+
175
+ const bin = fs.openSync(filename + '_p2tt.bin', 'r')
176
+ const { size } = fs.fstatSync(bin)
177
+ buf = Buffer.allocUnsafe(6)
178
+
179
+ for (var offset = 0, ip, it0, it1; offset < size; offset += 6) {
180
+ fs.readSync(bin, buf, 0, 6, offset)
181
+ ip = offset / 6
182
+ it0 = readUInt24LE(buf, 0)
183
+ it1 = readUInt24LE(buf, 3)
184
+ p2tt[ip * 2] = it0
185
+ p2tt[ip * 2 + 1] = it1
186
+
187
+ if (t2pt[it0])
188
+ t2pt[it0].push(ip, it1)
189
+ else
190
+ t2pt[it0] = [ip, it1]
191
+
192
+ if (t2pt[it1])
193
+ t2pt[it1].push(ip, it0)
194
+ else
195
+ t2pt[it1] = [ip, it0]
196
+ }
197
+
198
+ fs.closeSync(bin)
199
+ }
200
+
201
+ pairs.forEach(index)
202
+
203
+ return {
204
+ index,
205
+ index_save,
206
+ find_pairs_with_token,
207
+ find_pairs_with_tokens,
208
+ save,
209
+ load,
210
+ get_tokens,
211
+ get_all_pairs: () => aP,
212
+ get_all_tokens: () => aT,
213
+ }
214
+ }
215
+
216
+ module.exports = dex_db
package/src/index.js CHANGED
@@ -3,6 +3,7 @@ const fs = require('fs')
3
3
  const os = require('os')
4
4
  const path = require('path')
5
5
  const default_cache_filename = require('./default_cache_filename')
6
+ const dex_db = require('./dex_db')
6
7
  const max_workers = os.cpus().length - 1
7
8
  const debug_key = process.env.KEY || 'FZBvlPrOxtgaKBBkry3SH0W1IqH4Y5tu'
8
9
  const uniswap_v2_factory = '0x5c69bee701ef814a2b6a3edd4b1652cb9cc5aa6f'
@@ -12,6 +13,7 @@ const load = (params = {}) => {
12
13
  key = debug_key,
13
14
  factory = uniswap_v2_factory,
14
15
  filename,
16
+ csv = true,
15
17
  multicall_size = 50,
16
18
  from = 0,
17
19
  to,
@@ -21,23 +23,51 @@ const load = (params = {}) => {
21
23
  pairs,
22
24
  } = params
23
25
 
24
- filename ??= default_cache_filename(factory)
26
+ if (!filename) {
27
+ filename = default_cache_filename(factory)
28
+ if (csv) filename += '.csv'
29
+ }
25
30
  workers = Math.min(workers, max_workers)
26
-
27
- pairs ??= fs.existsSync(filename)
28
- ? fs.readFileSync(filename).toString().trim().split('\n')
29
- .reduce((pairs, line) => {
30
- line = line.split(',')
31
- const id = +line[0]
32
- if (id >= from && (to == undefined || id <= to)) pairs.push({
33
- id,
34
- pair: line[1],
35
- token0: line[2],
36
- token1: line[3]
31
+
32
+ var db
33
+
34
+ if (!pairs) {
35
+ if (csv) {
36
+ pairs = fs.existsSync(filename)
37
+ ? fs.readFileSync(filename).toString().trim().split('\n')
38
+ .reduce((pairs, line) => {
39
+ line = line.split(',')
40
+ const id = +line[0]
41
+ if (id >= from && (to == undefined || id <= to)) pairs.push({
42
+ id,
43
+ pair: line[1],
44
+ token0: line[2],
45
+ token1: line[3]
46
+ })
47
+ return pairs
48
+ }, [])
49
+ : []
50
+ } else {
51
+ pairs = []
52
+ db = dex_db()
53
+ if (
54
+ fs.existsSync(filename + '_pairs.bin') &&
55
+ fs.existsSync(filename + '_tokens.bin') &&
56
+ fs.existsSync(filename + '_p2tt.bin')
57
+ ) {
58
+ db.load(filename)
59
+ db.get_all_pairs().forEach((pair, i) => {
60
+ const tokens = db.get_tokens(pair)
61
+ pairs[i] = {
62
+ id: i,
63
+ pair,
64
+ token0: tokens[0],
65
+ token1: tokens[1]
66
+ }
37
67
  })
38
- return pairs
39
- }, [])
40
- : []
68
+ }
69
+ }
70
+ }
41
71
 
42
72
  if (to >= 0 && to <= pairs.length - 1) {
43
73
  if (progress)
@@ -88,15 +118,25 @@ const load = (params = {}) => {
88
118
  for (var i = from; i < start_loading_from; i++)
89
119
  progress(pairs[i].id, last_id + 1, pairs[i])
90
120
 
91
- const onpair = pair => {
92
- pairs[pair.id] = pair
93
- if (progress && pair.id >= from) progress(pair.id, last_id + 1, pair)
94
- var _
95
- while (_ = pairs[next_pair_order]) {
96
- fs.appendFileSync(filename, `${_.id},${_.pair},${_.token0},${_.token1}\n`)
97
- next_pair_order++
121
+ const onpair = csv
122
+ ? pair => {
123
+ pairs[pair.id] = pair
124
+ if (progress && pair.id >= from) progress(pair.id, last_id + 1, pair)
125
+ var _
126
+ while (_ = pairs[next_pair_order]) {
127
+ fs.appendFileSync(filename, `${_.id},${_.pair},${_.token0},${_.token1}\n`)
128
+ next_pair_order++
129
+ }
130
+ }
131
+ : pair => {
132
+ pairs[pair.id] = pair
133
+ if (progress && pair.id >= from) progress(pair.id, last_id + 1, pair)
134
+ var _
135
+ while (_ = pairs[next_pair_order]) {
136
+ db.index_save([_.pair, _.token0, _.token1], filename)
137
+ next_pair_order++
138
+ }
98
139
  }
99
- }
100
140
 
101
141
  if (!workers) {
102
142
  const ids = []
@@ -6,6 +6,7 @@ const options = [
6
6
  'key',
7
7
  'factory',
8
8
  'filename',
9
+ 'csv',
9
10
  'multicall_size',
10
11
  'from',
11
12
  'to',
@@ -57,10 +58,28 @@ while (arg = process.argv[i]) {
57
58
  process.exit(22)
58
59
  }
59
60
  }
61
+ if (option == 'csv') {
62
+ if (value != 'true' && value != 'false') {
63
+ console.log(`"csv" should be a "true" or "false". Default is "true".`)
64
+ process.exit(22)
65
+ }
66
+ value = value == 'true'
67
+ }
60
68
  params[option] = value
61
69
  i++
62
70
  }
63
71
 
72
+ if (params.csv != undefined && params.filename) {
73
+ if (params.csv == true && !params.filename.endsWith('.csv')) {
74
+ console.log(`CSV cache mode require "filename" ending with ".csv"`)
75
+ process.exit(22)
76
+ }
77
+ if (params.csv == false && params.filename.endsWith('.csv')) {
78
+ console.log(`Binary cache mode require "filename" ending without ".csv"`)
79
+ process.exit(22)
80
+ }
81
+ }
82
+
64
83
  params.progress = params.filename
65
84
  ? (c, t, pair) => {
66
85
  const total_left = t.toString().length