puppyproxy 1.0.1 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/package.json +4 -2
- package/src/providers/Scraper.js +5 -16
- package/src/workers/collector.js +46 -44
package/README.md
CHANGED
|
@@ -166,7 +166,7 @@ log.highlight("TESTING: ProxyRequest");
|
|
|
166
166
|
|
|
167
167
|
//proxyRequest is a wrapper that allows you to use a more consistent proxy for multiple requests, with auto-rotation when it fails
|
|
168
168
|
const proxyRequest = pp.createProxyRequest(ipUrl, {
|
|
169
|
-
autoNewAgent: true, // auto rotate the proxy when a request fails.
|
|
169
|
+
autoNewAgent: true, // auto rotate the proxy when a request fails. set this to false if you really need just one proxy you can manage yourself
|
|
170
170
|
});
|
|
171
171
|
|
|
172
172
|
log.bold("----- proxyRequest.fetch");
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "puppyproxy",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.1.0",
|
|
4
4
|
"description": "Node.js library for managing, scraping, and using proxies with fetch and WebSocket support",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"types": "./src/index.d.ts",
|
|
@@ -29,12 +29,14 @@
|
|
|
29
29
|
"dependencies": {
|
|
30
30
|
"http-proxy-agent": "^7.0.2",
|
|
31
31
|
"https-proxy-agent": "^7.0.6",
|
|
32
|
+
"node-fetch": "^3.3.2",
|
|
32
33
|
"p-limit": "^7.2.0",
|
|
33
34
|
"puppyid": "^1.1.0",
|
|
34
35
|
"puppylog": "^1.2.0",
|
|
35
36
|
"puppymisc": "^1.1.1",
|
|
36
37
|
"puppyscrambled": "^1.1.0",
|
|
37
38
|
"socks-proxy-agent": "^8.0.5",
|
|
38
|
-
"sqlite3": "^5.1.7"
|
|
39
|
+
"sqlite3": "^5.1.7",
|
|
40
|
+
"ws": "^8.19.0"
|
|
39
41
|
}
|
|
40
42
|
}
|
package/src/providers/Scraper.js
CHANGED
|
@@ -8,11 +8,12 @@ import log from 'puppylog';
|
|
|
8
8
|
import id from 'puppyid';
|
|
9
9
|
import { getLastSavedTimestamp } from 'puppymisc';
|
|
10
10
|
import scrambled from 'puppyscrambled';
|
|
11
|
+
import { createBasicAgent } from '../utils.js';
|
|
11
12
|
|
|
12
13
|
export default class Scraper {
|
|
13
14
|
constructor(config, proxyDB) {
|
|
14
15
|
this.config = config;
|
|
15
|
-
this.proxyDB = proxyDB;
|
|
16
|
+
this.proxyDB = proxyDB;
|
|
16
17
|
this.hosts = [];
|
|
17
18
|
this.hostUsage = {};
|
|
18
19
|
|
|
@@ -21,14 +22,7 @@ export default class Scraper {
|
|
|
21
22
|
this.outputPath = path.join(this.storeDir, 'scraped_proxies.json');
|
|
22
23
|
}
|
|
23
24
|
|
|
24
|
-
|
|
25
|
-
if (proxyUrl.startsWith('socks')) return new SocksProxyAgent(proxyUrl);
|
|
26
|
-
if (proxyUrl.startsWith('http:')) return new HttpProxyAgent(proxyUrl);
|
|
27
|
-
if (proxyUrl.startsWith('https:')) return new HttpsProxyAgent(proxyUrl);
|
|
28
|
-
return null;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
genHostsScraper() {
|
|
25
|
+
loadScrapedProxies() {
|
|
32
26
|
try {
|
|
33
27
|
if (!fs.existsSync(this.outputPath)) return;
|
|
34
28
|
const data = fs.readFileSync(this.outputPath, 'utf8');
|
|
@@ -48,11 +42,6 @@ export default class Scraper {
|
|
|
48
42
|
}
|
|
49
43
|
}
|
|
50
44
|
|
|
51
|
-
// Alias for init
|
|
52
|
-
async loadScrapedProxies() {
|
|
53
|
-
this.genHostsScraper();
|
|
54
|
-
}
|
|
55
|
-
|
|
56
45
|
getHostAny(url = '') {
|
|
57
46
|
if (!this.hosts || this.hosts.length === 0) {
|
|
58
47
|
log.error(id, "No hosts available from scraper! IP would be exposed!");
|
|
@@ -95,7 +84,7 @@ export default class Scraper {
|
|
|
95
84
|
const socksProxy = this.getHost(url, type);
|
|
96
85
|
if (!socksProxy) return null;
|
|
97
86
|
|
|
98
|
-
const agent =
|
|
87
|
+
const agent = createBasicAgent(socksProxy);
|
|
99
88
|
agent.USINGSCRAPED = socksProxy;
|
|
100
89
|
return agent;
|
|
101
90
|
}
|
|
@@ -127,7 +116,7 @@ export default class Scraper {
|
|
|
127
116
|
worker.terminate();
|
|
128
117
|
// The worker returns raw proxies, we process them into DB
|
|
129
118
|
await this.proxyDB.dealWithProxies(message, this.outputPath);
|
|
130
|
-
this.
|
|
119
|
+
this.loadScrapedProxies(); // Reload memory
|
|
131
120
|
resolve();
|
|
132
121
|
});
|
|
133
122
|
|
package/src/workers/collector.js
CHANGED
|
@@ -15,8 +15,6 @@ process.on('unhandledRejection', () => {});
|
|
|
15
15
|
process.on('uncaughtException', () => {});
|
|
16
16
|
|
|
17
17
|
let config = {};
|
|
18
|
-
const wsUrl = 'wss://shellshock.io/matchmaker/';
|
|
19
|
-
const httpUrl = 'https://st1.lumidiagames.com';
|
|
20
18
|
|
|
21
19
|
/**
|
|
22
20
|
* Fetches and parses proxy lists from multiple public repositories
|
|
@@ -75,12 +73,14 @@ async function checkProxy(socksOrHttpProxy) {
|
|
|
75
73
|
|
|
76
74
|
const maxTimeAllowed = config.scraper.timeoutMax; //in seconds, lower for less proxies but higher quality
|
|
77
75
|
|
|
78
|
-
let method = "http"; // 'ws' or 'http'
|
|
79
|
-
|
|
76
|
+
let method = config.scraper.method || "http"; // 'ws' or 'http'
|
|
77
|
+
const wsUrl = config.scraper.wsUrl || 'wss://shellshock.io/matchmaker/';
|
|
78
|
+
const httpUrl = config.scraper.httpUrl || 'https://st1.lumidiagames.com';
|
|
79
|
+
// log.dim(socksOrHttpProxy, "checking...", method, "method", wsUrl, httpUrl);
|
|
80
80
|
|
|
81
|
-
if (socksOrHttpProxy.startsWith("http")) {
|
|
82
|
-
|
|
83
|
-
};
|
|
81
|
+
// if (socksOrHttpProxy.startsWith("http")) {
|
|
82
|
+
// method = "http";
|
|
83
|
+
// };
|
|
84
84
|
|
|
85
85
|
const timeoutMs = (config.scraper.timeoutPerProxy || 15) * 1e3;
|
|
86
86
|
|
|
@@ -123,22 +123,7 @@ async function checkProxy(socksOrHttpProxy) {
|
|
|
123
123
|
} else return new Promise(async (resolve) => {
|
|
124
124
|
try {
|
|
125
125
|
const agent = createBasicAgent(socksOrHttpProxy);
|
|
126
|
-
const options = { agent
|
|
127
|
-
headers: {
|
|
128
|
-
'accept-encoding': 'gzip, deflate, br, zstd',
|
|
129
|
-
'accept-language': 'en-US,en;q=0.9',
|
|
130
|
-
'cache-control': 'no-cache',
|
|
131
|
-
'connection': 'Upgrade',
|
|
132
|
-
'host': 'shellshock.io',
|
|
133
|
-
'origin': 'https://shellshock.io',
|
|
134
|
-
'pragma': 'no-cache',
|
|
135
|
-
'sec-websocket-extensions': 'permessage-deflate; client_max_window_bits',
|
|
136
|
-
// 'sec-websocket-key': generateWebSocketKey(),
|
|
137
|
-
// 'sec-websocket-version': '13',
|
|
138
|
-
// 'upgrade': 'websocket',
|
|
139
|
-
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36'
|
|
140
|
-
}
|
|
141
|
-
};
|
|
126
|
+
const options = { agent };
|
|
142
127
|
|
|
143
128
|
let func = ()=>{};
|
|
144
129
|
|
|
@@ -156,42 +141,58 @@ async function checkProxy(socksOrHttpProxy) {
|
|
|
156
141
|
let timeStart = Date.now();
|
|
157
142
|
|
|
158
143
|
ws.on('open', () => {
|
|
159
|
-
config.scraper.verbose && log.magenta(`Connected using proxy: ${
|
|
160
|
-
|
|
144
|
+
config.scraper.verbose && log.magenta(`Connected using proxy: ${socksOrHttpProxy}`);
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
clearTimeout(timeout);
|
|
148
|
+
let timeTaken = (Date.now() - timeStart)/1000;
|
|
149
|
+
// ips.add(ip.ip);
|
|
150
|
+
ws.close();
|
|
151
|
+
if (timeTaken > maxTimeAllowed) {
|
|
152
|
+
config.scraper.verbose && log.orange(`Result returned for proxy: ${socksOrHttpProxy} in [${timeTaken.toFixed(2)}s]`);
|
|
153
|
+
socksOrHttpProxy = null;
|
|
154
|
+
} else {
|
|
155
|
+
config.scraper.verbose && log.success(`Result returned for proxy: ${socksOrHttpProxy} in [${timeTaken.toFixed(2)}s]`);
|
|
156
|
+
};
|
|
157
|
+
resolve(socksOrHttpProxy);
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
// ws.send('{ "command": "regionList" }');
|
|
161
161
|
});
|
|
162
162
|
|
|
163
163
|
ws.on('message', (data) => {
|
|
164
|
-
clearTimeout(timeout);
|
|
165
|
-
const msg = JSON.parse(data.toString());
|
|
166
|
-
if (msg.command == "regionList") {
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
} else {
|
|
178
|
-
|
|
179
|
-
}
|
|
164
|
+
// clearTimeout(timeout);
|
|
165
|
+
// const msg = JSON.parse(data.toString());
|
|
166
|
+
// if (msg.command == "regionList") {
|
|
167
|
+
// let timeTaken = (Date.now() - timeStart)/1000;
|
|
168
|
+
// // ips.add(ip.ip);
|
|
169
|
+
// ws.close();
|
|
170
|
+
// if (timeTaken > maxTimeAllowed) {
|
|
171
|
+
// config.scraper.verbose && log.orange(`Result returned for proxy: ${socksOrHttpProxy} in [${timeTaken.toFixed(2)}s]`);
|
|
172
|
+
// socksOrHttpProxy = null;
|
|
173
|
+
// } else {
|
|
174
|
+
// config.scraper.verbose && log.success(`Result returned for proxy: ${socksOrHttpProxy} in [${timeTaken.toFixed(2)}s]`);
|
|
175
|
+
// };
|
|
176
|
+
// resolve(socksOrHttpProxy);
|
|
177
|
+
// } else {
|
|
178
|
+
// // log.dim(msg)
|
|
179
|
+
// }
|
|
180
180
|
});
|
|
181
181
|
|
|
182
182
|
ws.on('error', (err) => {
|
|
183
183
|
clearTimeout(timeout);
|
|
184
|
-
// log.dim(`Error with proxy ${
|
|
184
|
+
// log.dim(`Error with proxy ${socksOrHttpProxy}:`, err.message);
|
|
185
185
|
// log.error(err);
|
|
186
186
|
resolve(null);
|
|
187
187
|
});
|
|
188
188
|
|
|
189
|
-
ws.on('close', () => {
|
|
189
|
+
ws.on('close', (event) => {
|
|
190
190
|
clearTimeout(timeout);
|
|
191
|
+
// console.log(`WebSocket closed with code: ${event.code}, reason: ${event.reason}`);
|
|
191
192
|
resolve(null);
|
|
192
193
|
});
|
|
193
194
|
} catch (error) {
|
|
194
|
-
|
|
195
|
+
console.error("Error checking proxy:", error);
|
|
195
196
|
resolve(null);
|
|
196
197
|
};
|
|
197
198
|
});
|
|
@@ -199,6 +200,7 @@ async function checkProxy(socksOrHttpProxy) {
|
|
|
199
200
|
|
|
200
201
|
async function getProxies() {
|
|
201
202
|
log.dim("Fetching all proxies from sources...");
|
|
203
|
+
config.scraper.verbose && console.log(config.scraper);
|
|
202
204
|
|
|
203
205
|
let allProxies = await fetchAllProxies();
|
|
204
206
|
|