crawler-user-agents 1.0.83 → 1.0.87

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1157,7 +1157,8 @@
1157
1157
  "Mozilla/5.0 (compatible; AhrefsBot/5.2; News; +http://ahrefs.com/robot/)",
1158
1158
  "Mozilla/5.0 (compatible; AhrefsBot/5.2; +http://ahrefs.com/robot/)",
1159
1159
  "Mozilla/5.0 (compatible; AhrefsSiteAudit/5.2; +http://ahrefs.com/robot/)",
1160
- "Mozilla/5.0 (compatible; AhrefsBot/6.1; News; +http://ahrefs.com/robot/)"
1160
+ "Mozilla/5.0 (compatible; AhrefsBot/6.1; News; +http://ahrefs.com/robot/)",
1161
+ "Mozilla/5.0 (compatible; AhrefsBot/7.0; +http://ahrefs.com/robot/)"
1161
1162
  ]
1162
1163
  }
1163
1164
  ,
@@ -4857,5 +4858,23 @@
4857
4858
  "Mozilla/5.0 (Linux; Android 7.0;) AppleWebKit/537.36 (KHTML, like Gecko) Mobile Safari/537.36 (compatible; PetalBot;+https://webmaster.petalsearch.com/site/petalbot)"
4858
4859
  ],
4859
4860
  "url": "https://webmaster.petalsearch.com/site/petalbot"
4861
+ },
4862
+ {
4863
+ "pattern": "virustotal",
4864
+ "addition_date": "2021/09/22",
4865
+ "instances": [
4866
+ "Mozilla\/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US) AppEngine-Google; (+http:\/\/code.google.com\/appengine; appid: s~virustotalcloud)",
4867
+ "AppEngine-Google; (+http:\/\/code.google.com\/appengine; appid: s~virustotalcloud)"
4868
+ ],
4869
+ "url": "https://www.virustotal.com/gui/home/url"
4870
+ },
4871
+ {
4872
+ "pattern": "(^| )PTST\\/",
4873
+ "addition_date": "2021/12/05",
4874
+ "instances": [
4875
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36 PTST/211202.211915",
4876
+ "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:94.0) Gecko/20100101 Firefox/94.0 PTST/211202.211915"
4877
+ ],
4878
+ "url": "https://www.webpagetest.org"
4860
4879
  }
4861
4880
  ]
package/index.d.ts ADDED
@@ -0,0 +1,16 @@
1
+ // Example:
2
+ // {
3
+ // "pattern": "rogerbot",
4
+ // "addition_date": "2014/02/28",
5
+ // "url": "http://moz.com/help/pro/what-is-rogerbot-",
6
+ // "instances" : ["rogerbot/2.3 example UA"]
7
+ // }
8
+
9
+ declare const crawlerUserAgents: {
10
+ pattern: string
11
+ addition_date?: string
12
+ url?: string
13
+ instances: string[]
14
+ }[]
15
+
16
+ export = crawlerUserAgents;
package/package.json CHANGED
@@ -1,7 +1,8 @@
1
1
  {
2
2
  "name": "crawler-user-agents",
3
- "version": "1.0.83",
3
+ "version": "1.0.87",
4
4
  "main": "crawler-user-agents.json",
5
+ "typings": "./index.d.ts",
5
6
  "author": "Martin Monperrus <martin.monperrus@gnieh.org>",
6
7
  "license": "MIT",
7
8
  "description": "This repository contains a list of of HTTP user-agents used by robots, crawlers, and spiders as in single JSON file.",