uniswap-v2-loader 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,2 +1,10 @@
1
+ ## [1.4.0] - 2026-02-24
2
+ - spawn -> cluster
3
+ - test order pool by factory id at CSV
4
+ - clear cache CSV file
5
+
6
+ ## [1.3.0] - 2026-02-23
7
+ - Add CLI version with `-c` or `--count` flag to counting loaded pairs from cache
8
+
1
9
  ## [1.2.0] - 2026-02-22
2
10
  - Added `onupdate` function for subscribing to new pairs.
package/README.md CHANGED
@@ -6,6 +6,13 @@ High-speed Uniswap v2 pair loader using viem multicall and parallel CPU processi
6
6
  The package uses Alchemy. Set your key as an environment variable (a default key is used if none is provided):
7
7
  `export KEY=your_alchemy_key`
8
8
 
9
+ ## CLI
10
+ ```
11
+ npm i -g uniswap-v2-loader
12
+ uniswap-v2-loader
13
+ ```
14
+
15
+
9
16
  ## API Reference
10
17
  ### `all(params)`
11
18
  - **Description**: Fetches token pairs from the Uniswap v2 factory. It utilizes multicall from `viem` and splits the loading process between multiple CPUs for high-speed execution.
@@ -0,0 +1,19 @@
1
+ #!/usr/bin/env node
2
+ const { all, count, clear_cache } = require('../index')
3
+ const rl = require('readline')
4
+
5
+ const progress = (c, t) => {
6
+ const total_len = t.toString().length
7
+ const cur = c.toString().padStart(total_len)
8
+ const pct = (c / t * 100 | 0).toString().padStart(3)
9
+ rl.cursorTo(process.stdout, 0)
10
+ rl.clearLine(process.stdout, 0)
11
+ process.stdout.write(`Loaded: ${cur} / ${t} (${pct}%)`)
12
+ }
13
+
14
+ if (process.argv[2] == '-c' || process.argv[2] == '--count')
15
+ console.log(count())
16
+ else if (process.argv[2] == '--clear')
17
+ clear_cache()
18
+ else
19
+ all({progress})
package/debug.js ADDED
@@ -0,0 +1 @@
1
+ require('./index').all({to: 487283})
@@ -0,0 +1,19 @@
1
+ const path = require('path')
2
+ const env = process.env
3
+ const os = require('os')
4
+ const home = os.homedir()
5
+ const pkg = require('./package.json')
6
+
7
+ module.exports = path.join(
8
+ ...(process.platform === 'win32'
9
+ ? (env.LOCALAPPDATA || env.APPDATA)
10
+ ? [env.LOCALAPPDATA || env.APPDATA]
11
+ : [home, 'AppData', 'Local']
12
+ : process.platform === 'darwin'
13
+ ? [home, 'Library', 'Caches']
14
+ : (env.XDG_CACHE_HOME && path.isAbsolute(env.XDG_CACHE_HOME))
15
+ ? [env.XDG_CACHE_HOME]
16
+ : [home, '.cache']
17
+ ),
18
+ pkg.name + '_pairs.csv'
19
+ )
package/index.js CHANGED
@@ -1,26 +1,9 @@
1
- const { spawn } = require('child_process')
1
+ const cluster = require('cluster')
2
2
  const fs = require('fs')
3
3
  const os = require('os')
4
- const path = require('path')
5
- const env = process.env
6
- const home = os.homedir()
7
- const pkg = require('./package.json')
8
- const default_filename = path.join(
9
- ...(process.platform === 'win32'
10
- ? (env.LOCALAPPDATA || env.APPDATA)
11
- ? [env.LOCALAPPDATA || env.APPDATA]
12
- : [home, 'AppData', 'Local']
13
- : process.platform === 'darwin'
14
- ? [home, 'Library', 'Caches']
15
- : (env.XDG_CACHE_HOME && path.isAbsolute(env.XDG_CACHE_HOME))
16
- ? [env.XDG_CACHE_HOME]
17
- : [home, '.cache']
18
- ),
19
- pkg.name + '_pairs.csv'
20
- )
21
4
  const { parseAbiItem, createPublicClient, http } = require('viem')
22
5
  const { mainnet } = require('viem/chains')
23
-
6
+ const default_filename = require('./default_cache_filename')
24
7
  const workers = os.cpus().length - 1
25
8
  const missed = Array(workers).fill(null).map(() => [])
26
9
  const key = process.env.KEY || 'FZBvlPrOxtgaKBBkry3SH0W1IqH4Y5tu'
@@ -31,7 +14,7 @@ const client = createPublicClient({
31
14
  })
32
15
 
33
16
  const load = params => {
34
- const {filename = default_filename, to, from = 0, chunk_size = 50} = params
17
+ const {filename = default_filename, to, from = 0, chunk_size = 50, progress, count} = params
35
18
  const pairs = params.pairs || fs.existsSync(filename)
36
19
  ? fs.readFileSync(filename).toString().trim().split('\n')
37
20
  .reduce((pairs, line) => {
@@ -47,7 +30,8 @@ const load = params => {
47
30
  }, [])
48
31
  : []
49
32
 
50
- if (pairs.length > to) return Promise.resolve(pairs.slice(0, to))
33
+ if (count) return pairs.length
34
+ if (to && pairs.length > to) return Promise.resolve(pairs.slice(0, to))
51
35
 
52
36
  return (to
53
37
  ? Promise.resolve(to)
@@ -56,69 +40,60 @@ const load = params => {
56
40
  abi: [parseAbiItem('function allPairsLength() view returns (uint256)')],
57
41
  functionName: 'allPairsLength'
58
42
  }).then(_ => Number(_))
59
- ).then(allPairsLength => {
60
- var next_pair_order = 0
43
+ ).then(all_pairs_length => {
61
44
  const start_loading_from = pairs.length
62
45
  ? Math.max(from || 0, pairs[pairs.length - 1].id + 1)
63
46
  : 0
64
47
 
48
+ var next_pair_order = pairs.length
49
+ ? pairs[pairs.length - 1].id + 1
50
+ : 0
51
+
65
52
  missed.forEach(_ => _.length = 0)
66
53
 
67
- for (var i = start_loading_from, rr = 0; i < allPairsLength; i++) {
54
+ for (var i = start_loading_from, rr = 0; i < all_pairs_length; i++) {
68
55
  missed[rr].push(i)
69
56
  if (missed[rr].length % chunk_size == 0)
70
57
  rr = (rr + 1) % workers
71
58
  }
72
59
 
73
- const jobs_data_filename = `jobs_data_${Date.now()}.json`
74
- fs.writeFileSync(jobs_data_filename, JSON.stringify({
75
- missed,
76
- factory,
77
- chunk_size,
78
- key
79
- }), 'utf8')
60
+ var progress_i = 0
61
+ const progress_end = all_pairs_length - start_loading_from
80
62
 
63
+ cluster.setupPrimary({ exec: __dirname + '/loader.js' })
81
64
  return Promise.all(
82
65
  missed
83
66
  .filter(_ => _.length)
84
- .map((_, i) => new Promise(y => {
85
- const loader = spawn('node', ['loader.js', jobs_data_filename, i.toString()])
86
- loader.stdout.on('data', data => {
87
- data += data.toString()
88
- if (!data.includes('\n')) return
89
- const lines = data.split('\n')
90
- data = lines.shift()
91
- lines.forEach(line => {
92
- const a = line.split(',')
93
- const id = +a[0]
94
- pairs[id] = {
95
- id,
96
- pair: a[1],
97
- token0: a[2],
98
- token1: a[3]
99
- }
100
- })
67
+ .map((missed, i) => new Promise(y => {
68
+ const w = cluster.fork()
69
+ w.send({ missed, factory, chunk_size, key })
70
+ w.on('message', p => {
71
+ const id = p[0]
72
+ pairs[id] = { id, pair: p[1], token0: p[2], token1: p[3] }
73
+ if (progress) progress(++progress_i, progress_end)
101
74
  if (filename) {
102
- var pair
103
- while (pair = pairs[next_pair_order]) {
104
- fs.appendFileSync(filename, pair.id + ',' + pair.pair + ',' + pair.token0 + ',' + pair.token1 + '\n')
75
+ var _
76
+ while (_ = pairs[next_pair_order]) {
77
+ fs.appendFileSync(filename, `${_.id},${_.pair},${_.token0},${_.token1}\n`)
105
78
  next_pair_order++
106
79
  }
107
80
  }
108
81
  })
109
- loader.on('close', y)
82
+ w.on('exit', y)
110
83
  }))
111
- )
112
- .then(() => {
113
- fs.unlinkSync(jobs_data_filename)
114
- return pairs
115
- })
84
+ ).then(() => pairs)
116
85
  })
117
86
  }
118
87
 
119
88
 
89
+ module.exports.clear_cache = () =>
90
+ fs.unlinkSync(default_filename)
91
+
120
92
  module.exports.all = (params = {}) =>
121
93
  load(params)
94
+
95
+ module.exports.count = () =>
96
+ load({count: true})
122
97
 
123
98
  module.exports.onupdate = function onupdate(callback, params = {}) {
124
99
  var subscribe = true, timeout
package/loader.js CHANGED
@@ -1,6 +1,5 @@
1
1
  const { parseAbiItem, createPublicClient, http } = require('viem')
2
2
  const { mainnet } = require('viem/chains')
3
- const fs = require('fs')
4
3
 
5
4
  const POOL = {
6
5
  ID: 0,
@@ -69,7 +68,7 @@ async function load(params) {
69
68
  pool[POOL.TOKEN0] = token0_result.result.toLowerCase()
70
69
  pool[POOL.TOKEN1] = token1_result.result.toLowerCase()
71
70
 
72
- console.log(pool.join(','))
71
+ process.send(pool)
73
72
  } else {
74
73
  retry_missed.push(pools_ok[j].id)
75
74
  }
@@ -88,17 +87,11 @@ async function load(params) {
88
87
  }
89
88
  }
90
89
 
91
- const jobs_data_filename = process.argv[2]
92
- const job_index = +process.argv[3]
90
+ process.on('message', jobs_data => {
91
+ const client = createPublicClient({
92
+ chain: mainnet,
93
+ transport: http(`https://eth-mainnet.g.alchemy.com/v2/${jobs_data.key}`)
94
+ })
93
95
 
94
- if (isNaN(job_index)) process.exit(1)
95
-
96
- const jobs_data = JSON.parse(fs.readFileSync(jobs_data_filename, 'utf8'))
97
- jobs_data.missed = jobs_data.missed[job_index]
98
-
99
- const client = createPublicClient({
100
- chain: mainnet,
101
- transport: http(`https://eth-mainnet.g.alchemy.com/v2/${jobs_data.key}`)
96
+ load({client, ...jobs_data}).then(() => process.exit())
102
97
  })
103
-
104
- load({client, ...jobs_data})
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "uniswap-v2-loader",
3
- "version": "1.2.0",
3
+ "version": "1.4.0",
4
4
  "description": "Uniswap v2 protocol loader",
5
5
  "keywords": [
6
6
  "uniswap-v2",
@@ -24,6 +24,9 @@
24
24
  "scripts": {
25
25
  "test": "node --test test.js"
26
26
  },
27
+ "bin": {
28
+ "uniswap-v2-loader": "./bin/uniswap-v2-loader"
29
+ },
27
30
  "dependencies": {
28
31
  "viem": "^2.46.2"
29
32
  }
package/test.js CHANGED
@@ -46,3 +46,10 @@ test('Heavy test load first 3000 pairs', () =>
46
46
  assert.equal(pairs.length, 3000)
47
47
  })
48
48
  )
49
+
50
+ test('Each line at CSV cache file should be orderd by pair id (factory id)', () => {
51
+ const lines = fs.readFileSync(require('./default_cache_filename'), 'utf8').trim().split('\n')
52
+ for (var i = 0; i < lines.length; i++)
53
+ assert.equal(i, +lines[i].split(',').shift())
54
+ })
55
+