puppyproxy 1.1.0 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -26,14 +26,16 @@ const config = {
26
26
  },
27
27
  scraper: {
28
28
  use: true,
29
- timeBetweenScrapes: 45 * 60e3,
29
+ timeBetweenScrapes: 45, // minutes
30
30
  maxProxiesToCheck: 5000, // the higher this is, the longer it takes, but the more proxies you get
31
31
  limitProxies: 500000,
32
- timeoutPerProxy: 5,
32
+ timeoutPerProxy: 15, // seconds
33
33
  timeoutMax: 5,
34
34
  verbose: true, // for logging purposes
35
35
  logStatus: true, // shows a convenient summary every so often
36
36
  proxyTypes: ['socks4', 'socks5'], //['socks4', 'socks5', 'http', 'https']
37
+ method: 'ws', // 'ws' or 'http'
38
+ wsUrl: wsUrl,
37
39
 
38
40
  timeoutFetch: 10000, // ms
39
41
  timeoutWs: 7000,
@@ -48,6 +50,8 @@ log.highlight("TESTING: runCollector");
48
50
 
49
51
  await pp.init(true); // true forces run the collector
50
52
 
53
+ console.log(pp.request.createProxyRequestPool().length, "proxies in the pool after init");
54
+
51
55
  log.highlight("TESTING: RequestClient");
52
56
 
53
57
  log.bold("----- pp.request.fetch")
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "puppyproxy",
3
- "version": "1.1.0",
3
+ "version": "1.1.2",
4
4
  "description": "Node.js library for managing, scraping, and using proxies with fetch and WebSocket support",
5
5
  "type": "module",
6
6
  "types": "./src/index.d.ts",
package/src/PuppyProxy.js CHANGED
@@ -97,6 +97,10 @@ export default class PuppyProxy {
97
97
  return this.request.createProxyRequest(url, options);
98
98
  }
99
99
 
100
+ createProxyRequestPool(url, options = {}) {
101
+ return this.request.createProxyRequestPool(url, options);
102
+ }
103
+
100
104
  /**
101
105
  * Trigger the proxy collection worker
102
106
  */
@@ -281,4 +281,28 @@ export default class RequestClient {
281
281
  createProxyRequest(url, options) {
282
282
  return new ProxyRequest(this, url, options);
283
283
  }
284
+
285
+ //return an array of ProxyRequest objects, one for each proxy
286
+ createProxyRequestPool(url, options = {}) {
287
+ const oldLOG = this.config.LOG;
288
+ this.config.LOG = {}; //suppress logs for this operation
289
+ let currentWithoutNew = 0;
290
+ const withoutNewLimit = 1000;
291
+ const proxyRequests = [];
292
+ const uniqueProxies = [];
293
+
294
+ while (currentWithoutNew < withoutNewLimit) {
295
+ const pr = this.createProxyRequest(url, options);
296
+ const proxy = pr.options.AGENT?.USINGSCRAPED;
297
+
298
+ if (proxy && !uniqueProxies.includes(proxy)) {
299
+ proxyRequests.push(pr);
300
+ uniqueProxies.push(proxy);
301
+ currentWithoutNew = 0;
302
+ } else currentWithoutNew++;
303
+ }
304
+
305
+ this.config.LOG = oldLOG;
306
+ return proxyRequests;
307
+ }
284
308
  }
@@ -96,7 +96,7 @@ export default class Scraper {
96
96
  let lastSaved = (Date.now() - getLastSavedTimestamp(this.outputPath)) / 1000;
97
97
  if (lastSaved && !force) {
98
98
  console.log("lastSaved", lastSaved);
99
- let timeBetweenScrapes = this.config.scraper.timeBetweenScrapes * 60; // 25 mins
99
+ let timeBetweenScrapes = this.config.scraper.timeBetweenScrapes * 60e3; // 25 mins
100
100
  if (lastSaved < timeBetweenScrapes) {
101
101
  console.log(`Proxies saved < ${timeBetweenScrapes}s ago, skipping.`);
102
102
  return;