@crawlee/http 3.0.3-beta.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +201 -0
- package/README.md +158 -0
- package/index.d.ts +3 -0
- package/index.d.ts.map +1 -0
- package/index.js +6 -0
- package/index.js.map +1 -0
- package/index.mjs +73 -0
- package/internals/http-crawler.d.ts +344 -0
- package/internals/http-crawler.d.ts.map +1 -0
- package/internals/http-crawler.js +629 -0
- package/internals/http-crawler.js.map +1 -0
- package/package.json +67 -0
- package/tsconfig.build.tsbuildinfo +1 -0
package/LICENSE.md
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
Apache License
|
|
2
|
+
Version 2.0, January 2004
|
|
3
|
+
http://www.apache.org/licenses/
|
|
4
|
+
|
|
5
|
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
6
|
+
|
|
7
|
+
1. Definitions.
|
|
8
|
+
|
|
9
|
+
"License" shall mean the terms and conditions for use, reproduction,
|
|
10
|
+
and distribution as defined by Sections 1 through 9 of this document.
|
|
11
|
+
|
|
12
|
+
"Licensor" shall mean the copyright owner or entity authorized by
|
|
13
|
+
the copyright owner that is granting the License.
|
|
14
|
+
|
|
15
|
+
"Legal Entity" shall mean the union of the acting entity and all
|
|
16
|
+
other entities that control, are controlled by, or are under common
|
|
17
|
+
control with that entity. For the purposes of this definition,
|
|
18
|
+
"control" means (i) the power, direct or indirect, to cause the
|
|
19
|
+
direction or management of such entity, whether by contract or
|
|
20
|
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
21
|
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
22
|
+
|
|
23
|
+
"You" (or "Your") shall mean an individual or Legal Entity
|
|
24
|
+
exercising permissions granted by this License.
|
|
25
|
+
|
|
26
|
+
"Source" form shall mean the preferred form for making modifications,
|
|
27
|
+
including but not limited to software source code, documentation
|
|
28
|
+
source, and configuration files.
|
|
29
|
+
|
|
30
|
+
"Object" form shall mean any form resulting from mechanical
|
|
31
|
+
transformation or translation of a Source form, including but
|
|
32
|
+
not limited to compiled object code, generated documentation,
|
|
33
|
+
and conversions to other media types.
|
|
34
|
+
|
|
35
|
+
"Work" shall mean the work of authorship, whether in Source or
|
|
36
|
+
Object form, made available under the License, as indicated by a
|
|
37
|
+
copyright notice that is included in or attached to the work
|
|
38
|
+
(an example is provided in the Appendix below).
|
|
39
|
+
|
|
40
|
+
"Derivative Works" shall mean any work, whether in Source or Object
|
|
41
|
+
form, that is based on (or derived from) the Work and for which the
|
|
42
|
+
editorial revisions, annotations, elaborations, or other modifications
|
|
43
|
+
represent, as a whole, an original work of authorship. For the purposes
|
|
44
|
+
of this License, Derivative Works shall not include works that remain
|
|
45
|
+
separable from, or merely link (or bind by name) to the interfaces of,
|
|
46
|
+
the Work and Derivative Works thereof.
|
|
47
|
+
|
|
48
|
+
"Contribution" shall mean any work of authorship, including
|
|
49
|
+
the original version of the Work and any modifications or additions
|
|
50
|
+
to that Work or Derivative Works thereof, that is intentionally
|
|
51
|
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
52
|
+
or by an individual or Legal Entity authorized to submit on behalf of
|
|
53
|
+
the copyright owner. For the purposes of this definition, "submitted"
|
|
54
|
+
means any form of electronic, verbal, or written communication sent
|
|
55
|
+
to the Licensor or its representatives, including but not limited to
|
|
56
|
+
communication on electronic mailing lists, source code control systems,
|
|
57
|
+
and issue tracking systems that are managed by, or on behalf of, the
|
|
58
|
+
Licensor for the purpose of discussing and improving the Work, but
|
|
59
|
+
excluding communication that is conspicuously marked or otherwise
|
|
60
|
+
designated in writing by the copyright owner as "Not a Contribution."
|
|
61
|
+
|
|
62
|
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
63
|
+
on behalf of whom a Contribution has been received by Licensor and
|
|
64
|
+
subsequently incorporated within the Work.
|
|
65
|
+
|
|
66
|
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
67
|
+
this License, each Contributor hereby grants to You a perpetual,
|
|
68
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
69
|
+
copyright license to reproduce, prepare Derivative Works of,
|
|
70
|
+
publicly display, publicly perform, sublicense, and distribute the
|
|
71
|
+
Work and such Derivative Works in Source or Object form.
|
|
72
|
+
|
|
73
|
+
3. Grant of Patent License. Subject to the terms and conditions of
|
|
74
|
+
this License, each Contributor hereby grants to You a perpetual,
|
|
75
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
76
|
+
(except as stated in this section) patent license to make, have made,
|
|
77
|
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
78
|
+
where such license applies only to those patent claims licensable
|
|
79
|
+
by such Contributor that are necessarily infringed by their
|
|
80
|
+
Contribution(s) alone or by combination of their Contribution(s)
|
|
81
|
+
with the Work to which such Contribution(s) was submitted. If You
|
|
82
|
+
institute patent litigation against any entity (including a
|
|
83
|
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
84
|
+
or a Contribution incorporated within the Work constitutes direct
|
|
85
|
+
or contributory patent infringement, then any patent licenses
|
|
86
|
+
granted to You under this License for that Work shall terminate
|
|
87
|
+
as of the date such litigation is filed.
|
|
88
|
+
|
|
89
|
+
4. Redistribution. You may reproduce and distribute copies of the
|
|
90
|
+
Work or Derivative Works thereof in any medium, with or without
|
|
91
|
+
modifications, and in Source or Object form, provided that You
|
|
92
|
+
meet the following conditions:
|
|
93
|
+
|
|
94
|
+
(a) You must give any other recipients of the Work or
|
|
95
|
+
Derivative Works a copy of this License; and
|
|
96
|
+
|
|
97
|
+
(b) You must cause any modified files to carry prominent notices
|
|
98
|
+
stating that You changed the files; and
|
|
99
|
+
|
|
100
|
+
(c) You must retain, in the Source form of any Derivative Works
|
|
101
|
+
that You distribute, all copyright, patent, trademark, and
|
|
102
|
+
attribution notices from the Source form of the Work,
|
|
103
|
+
excluding those notices that do not pertain to any part of
|
|
104
|
+
the Derivative Works; and
|
|
105
|
+
|
|
106
|
+
(d) If the Work includes a "NOTICE" text file as part of its
|
|
107
|
+
distribution, then any Derivative Works that You distribute must
|
|
108
|
+
include a readable copy of the attribution notices contained
|
|
109
|
+
within such NOTICE file, excluding those notices that do not
|
|
110
|
+
pertain to any part of the Derivative Works, in at least one
|
|
111
|
+
of the following places: within a NOTICE text file distributed
|
|
112
|
+
as part of the Derivative Works; within the Source form or
|
|
113
|
+
documentation, if provided along with the Derivative Works; or,
|
|
114
|
+
within a display generated by the Derivative Works, if and
|
|
115
|
+
wherever such third-party notices normally appear. The contents
|
|
116
|
+
of the NOTICE file are for informational purposes only and
|
|
117
|
+
do not modify the License. You may add Your own attribution
|
|
118
|
+
notices within Derivative Works that You distribute, alongside
|
|
119
|
+
or as an addendum to the NOTICE text from the Work, provided
|
|
120
|
+
that such additional attribution notices cannot be construed
|
|
121
|
+
as modifying the License.
|
|
122
|
+
|
|
123
|
+
You may add Your own copyright statement to Your modifications and
|
|
124
|
+
may provide additional or different license terms and conditions
|
|
125
|
+
for use, reproduction, or distribution of Your modifications, or
|
|
126
|
+
for any such Derivative Works as a whole, provided Your use,
|
|
127
|
+
reproduction, and distribution of the Work otherwise complies with
|
|
128
|
+
the conditions stated in this License.
|
|
129
|
+
|
|
130
|
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
131
|
+
any Contribution intentionally submitted for inclusion in the Work
|
|
132
|
+
by You to the Licensor shall be under the terms and conditions of
|
|
133
|
+
this License, without any additional terms or conditions.
|
|
134
|
+
Notwithstanding the above, nothing herein shall supersede or modify
|
|
135
|
+
the terms of any separate license agreement you may have executed
|
|
136
|
+
with Licensor regarding such Contributions.
|
|
137
|
+
|
|
138
|
+
6. Trademarks. This License does not grant permission to use the trade
|
|
139
|
+
names, trademarks, service marks, or product names of the Licensor,
|
|
140
|
+
except as required for reasonable and customary use in describing the
|
|
141
|
+
origin of the Work and reproducing the content of the NOTICE file.
|
|
142
|
+
|
|
143
|
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
144
|
+
agreed to in writing, Licensor provides the Work (and each
|
|
145
|
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
146
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
147
|
+
implied, including, without limitation, any warranties or conditions
|
|
148
|
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
149
|
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
150
|
+
appropriateness of using or redistributing the Work and assume any
|
|
151
|
+
risks associated with Your exercise of permissions under this License.
|
|
152
|
+
|
|
153
|
+
8. Limitation of Liability. In no event and under no legal theory,
|
|
154
|
+
whether in tort (including negligence), contract, or otherwise,
|
|
155
|
+
unless required by applicable law (such as deliberate and grossly
|
|
156
|
+
negligent acts) or agreed to in writing, shall any Contributor be
|
|
157
|
+
liable to You for damages, including any direct, indirect, special,
|
|
158
|
+
incidental, or consequential damages of any character arising as a
|
|
159
|
+
result of this License or out of the use or inability to use the
|
|
160
|
+
Work (including but not limited to damages for loss of goodwill,
|
|
161
|
+
work stoppage, computer failure or malfunction, or any and all
|
|
162
|
+
other commercial damages or losses), even if such Contributor
|
|
163
|
+
has been advised of the possibility of such damages.
|
|
164
|
+
|
|
165
|
+
9. Accepting Warranty or Additional Liability. While redistributing
|
|
166
|
+
the Work or Derivative Works thereof, You may choose to offer,
|
|
167
|
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
168
|
+
or other liability obligations and/or rights consistent with this
|
|
169
|
+
License. However, in accepting such obligations, You may act only
|
|
170
|
+
on Your own behalf and on Your sole responsibility, not on behalf
|
|
171
|
+
of any other Contributor, and only if You agree to indemnify,
|
|
172
|
+
defend, and hold each Contributor harmless for any liability
|
|
173
|
+
incurred by, or claims asserted against, such Contributor by reason
|
|
174
|
+
of your accepting any such warranty or additional liability.
|
|
175
|
+
|
|
176
|
+
END OF TERMS AND CONDITIONS
|
|
177
|
+
|
|
178
|
+
APPENDIX: How to apply the Apache License to your work.
|
|
179
|
+
|
|
180
|
+
To apply the Apache License to your work, attach the following
|
|
181
|
+
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
182
|
+
replaced with your own identifying information. (Don't include
|
|
183
|
+
the brackets!) The text should be enclosed in the appropriate
|
|
184
|
+
comment syntax for the file format. We also recommend that a
|
|
185
|
+
file or class name and description of purpose be included on the
|
|
186
|
+
same "printed page" as the copyright notice for easier
|
|
187
|
+
identification within third-party archives.
|
|
188
|
+
|
|
189
|
+
Copyright 2018 Apify Technologies s.r.o.
|
|
190
|
+
|
|
191
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
192
|
+
you may not use this file except in compliance with the License.
|
|
193
|
+
You may obtain a copy of the License at
|
|
194
|
+
|
|
195
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
196
|
+
|
|
197
|
+
Unless required by applicable law or agreed to in writing, software
|
|
198
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
199
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
200
|
+
See the License for the specific language governing permissions and
|
|
201
|
+
limitations under the License.
|
package/README.md
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
<h1 align="center">
|
|
2
|
+
<a href="https://crawlee.dev">
|
|
3
|
+
<picture>
|
|
4
|
+
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/apify/crawlee/master/website/static/img/crawlee-dark.svg?sanitize=true">
|
|
5
|
+
<img alt="Crawlee" src="https://raw.githubusercontent.com/apify/crawlee/master/website/static/img/crawlee-light.svg?sanitize=true" width="500">
|
|
6
|
+
</picture>
|
|
7
|
+
</a>
|
|
8
|
+
<br>
|
|
9
|
+
<small>The scalable web crawling and scraping library for JavaScript</small>
|
|
10
|
+
</h1>
|
|
11
|
+
|
|
12
|
+
<p align=center>
|
|
13
|
+
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/v/@crawlee/core.svg" alt="NPM latest version" data-canonical-src="https://img.shields.io/npm/v/@crawlee/core/next.svg" style="max-width: 100%;"></a>
|
|
14
|
+
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/dm/@crawlee/core.svg" alt="Downloads" data-canonical-src="https://img.shields.io/npm/dm/@crawlee/core.svg" style="max-width: 100%;"></a>
|
|
15
|
+
<a href="https://discord.gg/jyEM2PRvMU" rel="nofollow"><img src="https://img.shields.io/discord/801163717915574323?label=discord" alt="Chat on discord" data-canonical-src="https://img.shields.io/discord/801163717915574323?label=discord" style="max-width: 100%;"></a>
|
|
16
|
+
<a href="https://github.com/apify/crawlee/actions/workflows/test-and-release.yml"><img src="https://github.com/apify/crawlee/actions/workflows/test-and-release.yml/badge.svg?branch=master" alt="Build Status" style="max-width: 100%;"></a>
|
|
17
|
+
</p>
|
|
18
|
+
|
|
19
|
+
>👉👉👉 Crawlee is the successor to [Apify SDK](https://sdk.apify.com). 🎉 Fully rewritten in **TypeScript** for a better developer experience, and with even more powerful anti-blocking features. The interface is almost the same as Apify SDK so upgrading is a breeze. Read [the upgrading guide](https://crawlee.dev/docs/upgrading/upgrading-to-v3) to learn about the changes. 👈👈👈
|
|
20
|
+
|
|
21
|
+
Crawlee simplifies the development of web crawlers, scrapers, data extractors and web automation jobs. It provides tools to manage and automatically scale a pool of headless browsers, to maintain queues of URLs to crawl, store crawling results to a local filesystem or into the cloud, rotate proxies and much more. Crawlee is available as the [`crawlee`](https://www.npmjs.com/package/crawlee) NPM package. It can be used either stand-alone in your own applications or in [actors](https://docs.apify.com/actor) running on the [Apify Cloud](https://apify.com/).
|
|
22
|
+
|
|
23
|
+
**View full documentation, guides and examples on the [Crawlee project website](https://crawlee.dev)**
|
|
24
|
+
|
|
25
|
+
> Would you like to work with us on Crawlee or similar projects? [We are hiring!](https://apify.com/jobs#senior-node.js-engineer)
|
|
26
|
+
|
|
27
|
+
## Motivation
|
|
28
|
+
|
|
29
|
+
Thanks to tools like [Playwright](https://github.com/microsoft/playwright), [Puppeteer](https://github.com/puppeteer/puppeteer) or [Cheerio](https://www.npmjs.com/package/cheerio), it is easy to write Node.js code to extract data from web pages. But eventually things will get complicated. For example, when you try to:
|
|
30
|
+
|
|
31
|
+
- Perform a deep crawl of an entire website using a persistent queue of URLs.
|
|
32
|
+
- Run your scraping code on a list of 100k URLs in a CSV file, without losing any data when your code crashes.
|
|
33
|
+
- Rotate proxies to hide your browser origin and keep user-like sessions.
|
|
34
|
+
- Disable browser fingerprinting protections used by websites.
|
|
35
|
+
|
|
36
|
+
Python has [Scrapy](https://scrapy.org/) for these tasks, but there was no such library for **JavaScript, the language of the web**. The use of JavaScript is natural, since the same language is used to write the scripts as well as the data extraction code running in a browser.
|
|
37
|
+
|
|
38
|
+
The goal of Crawlee is to fill this gap and provide a toolbox for generic web scraping, crawling and automation tasks in JavaScript. So don't reinvent the wheel every time you need data from the web, and focus on writing code specific to the target website, rather than developing commonalities.
|
|
39
|
+
|
|
40
|
+
## Overview
|
|
41
|
+
|
|
42
|
+
Crawlee is available as the [`crawlee`](https://www.npmjs.com/package/crawlee) NPM package and is also available via `@crawlee/*` packages. It provides the following tools:
|
|
43
|
+
|
|
44
|
+
- [`CheerioCrawler`](https://crawlee.dev/api/cheerio-crawler/class/CheerioCrawler) - Enables the parallel crawling of a large number of web pages using the [cheerio](https://www.npmjs.com/package/cheerio) HTML parser. This is the most efficient web crawler, but it does not work on websites that require JavaScript. Available also under `@crawlee/cheerio` package.
|
|
45
|
+
|
|
46
|
+
- [`PuppeteerCrawler`](https://crawlee.dev/api/puppeteer-crawler/class/PuppeteerCrawler) - Enables the parallel crawling of a large number of web pages using the headless Chrome browser and [Puppeteer](https://github.com/puppeteer/puppeteer). The pool of Chrome browsers is automatically scaled up and down based on available system resources. Available also under `@crawlee/puppeteer` package.
|
|
47
|
+
|
|
48
|
+
- [`PlaywrightCrawler`](https://crawlee.dev/api/playwright-crawler/class/PlaywrightCrawler) - Unlike `PuppeteerCrawler` you can use [Playwright](https://github.com/microsoft/playwright) to manage almost any headless browser. It also provides a cleaner and more mature interface while keeping the ease of use and advanced features. Available also under `@crawlee/playwright` package.
|
|
49
|
+
|
|
50
|
+
- [`BasicCrawler`](https://crawlee.dev/api/basic-crawler/class/BasicCrawler) - Provides a simple framework for the parallel crawling of web pages whose URLs are fed either from a static list or from a dynamic queue of URLs. This class serves as a base for the more specialized crawlers above. Available also under `@crawlee/basic` package.
|
|
51
|
+
|
|
52
|
+
- [`RequestList`](https://crawlee.dev/api/core/class/RequestList) - Represents a list of URLs to crawl. The URLs can be passed in code or in a text file hosted on the web. The list persists its state so that crawling can resume when the Node.js process restarts. Available also under `@crawlee/core` package.
|
|
53
|
+
|
|
54
|
+
- [`RequestQueue`](https://crawlee.dev/api/core/class/RequestQueue) - Represents a queue of URLs to crawl, which is stored either in memory, on a local filesystem, or in the [Apify Cloud](https://apify.com). The queue is used for deep crawling of websites, where you start with several URLs and then recursively follow links to other pages. The data structure supports both breadth-first and depth-first crawling orders. Available also under `@crawlee/core` package.
|
|
55
|
+
|
|
56
|
+
- [`Dataset`](https://crawlee.dev/api/core/class/Dataset) - Provides a store for structured data and enables their export to formats like JSON, JSONL, CSV, XML, Excel or HTML. The data is stored on a local filesystem or in the Apify Cloud. Datasets are useful for storing and sharing large tabular crawling results, such as a list of products or real estate offers. Available also under `@crawlee/core` package.
|
|
57
|
+
|
|
58
|
+
- [`KeyValueStore`](https://crawlee.dev/api/core/class/KeyValueStore) - A simple key-value store for arbitrary data records or files, along with their MIME content type. It is ideal for saving screenshots of web pages, PDFs or to persist the state of your crawlers. The data is stored on a local filesystem or in the Apify Cloud. Available also under `@crawlee/core` package.
|
|
59
|
+
|
|
60
|
+
- [`AutoscaledPool`](https://crawlee.dev/api/core/class/AutoscaledPool) - Runs asynchronous background tasks, while automatically adjusting the concurrency based on free system memory and CPU usage. This is useful for running web scraping tasks at the maximum capacity of the system. Available also under `@crawlee/core` package.
|
|
61
|
+
|
|
62
|
+
Additionally, the package provides various helper functions to simplify running your code on the Apify Cloud and thus take advantage of its pool of proxies, job scheduler, data storage, etc. For more information, see the [Crawlee Programmer's Reference](https://crawlee.dev).
|
|
63
|
+
|
|
64
|
+
## Quick Start
|
|
65
|
+
|
|
66
|
+
This short tutorial will set you up to start using Crawlee in a minute or two. If you want to learn more, proceed to the [Getting Started](https://crawlee.dev/docs/guides/getting-started) tutorial that will take you step by step through creating your first scraper.
|
|
67
|
+
|
|
68
|
+
### Local stand-alone usage
|
|
69
|
+
|
|
70
|
+
Crawlee requires [Node.js](https://nodejs.org/en/) 16 or later. Add Crawlee to any Node.js project by running:
|
|
71
|
+
|
|
72
|
+
```bash
|
|
73
|
+
npm install crawlee playwright
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
> Neither `playwright` nor `puppeteer` are bundled with Crawlee to reduce install size and allow greater flexibility. That's why we install it with NPM. You can choose one, both, or neither.
|
|
77
|
+
|
|
78
|
+
Run the following example to perform a recursive crawl of a website using Playwright. For more examples showcasing various features of Crawlee, [see the Examples section of the documentation](https://crawlee.dev/docs/examples/crawl-multiple-urls).
|
|
79
|
+
|
|
80
|
+
```javascript
|
|
81
|
+
import { PlaywrightCrawler, Dataset } from 'crawlee';
|
|
82
|
+
|
|
83
|
+
const crawler = new PlaywrightCrawler();
|
|
84
|
+
|
|
85
|
+
crawler.router.addDefaultHandler(async ({ request, page, enqueueLinks }) => {
|
|
86
|
+
const title = await page.title();
|
|
87
|
+
console.log(`Title of ${request.loadedUrl} is '${title}'`);
|
|
88
|
+
|
|
89
|
+
// save some results
|
|
90
|
+
await Dataset.pushData({ title, url: request.loadedUrl });
|
|
91
|
+
|
|
92
|
+
// enqueue all links targeting the same hostname
|
|
93
|
+
await enqueueLinks();
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
await crawler.run(['https://www.iana.org/']);
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
When you run the example, you should see Crawlee automating a Chrome browser.
|
|
100
|
+
|
|
101
|
+

|
|
102
|
+
|
|
103
|
+
By default, Crawlee stores data to `./storage` in the current working directory. You can override this directory via `CRAWLEE_STORAGE_DIR` env var. For details, see [Environment variables](https://crawlee.dev/docs/guides/environment-variables), [Request storage](https://crawlee.dev/docs/guides/request-storage) and [Result storage](https://crawlee.dev/docs/guides/result-storage).
|
|
104
|
+
|
|
105
|
+
### Local usage with Crawlee command-line interface (CLI)
|
|
106
|
+
|
|
107
|
+
To create a boilerplate of your project we can use the [Crawlee command-line interface (CLI)](https://github.com/apify/apify-cli) tool.
|
|
108
|
+
|
|
109
|
+
Let's create a boilerplate of your new web crawling project by running:
|
|
110
|
+
|
|
111
|
+
```bash
|
|
112
|
+
npx crawlee create my-hello-world
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
The CLI will prompt you to select a project boilerplate template - just pick "Hello world". The tool will create a directory called `my-hello-world` with a Node.js project files. You can run the project as follows:
|
|
116
|
+
|
|
117
|
+
```bash
|
|
118
|
+
cd my-hello-world
|
|
119
|
+
npx crawlee run
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
By default, the crawling data will be stored in a local directory at `./storage`. For example, the input JSON file for the actor is expected to be in the default key-value store in `./storage/key_value_stores/default/INPUT.json`.
|
|
123
|
+
|
|
124
|
+
### Usage on the Apify platform
|
|
125
|
+
|
|
126
|
+
Now if we want to run our new crawler on Apify Platform, we first need to download the `apify-cli` and login with our token:
|
|
127
|
+
|
|
128
|
+
> We could also use the Apify CLI to generate a new project, which can be better suited if we want to run it on the Apify Platform.
|
|
129
|
+
|
|
130
|
+
```bash
|
|
131
|
+
npm i -g apify-cli
|
|
132
|
+
apify login
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
Finally, we can easily deploy our code to the Apify platform by running:
|
|
136
|
+
|
|
137
|
+
```bash
|
|
138
|
+
apify push
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
Your script will be uploaded to the Apify platform and built there so that it can be run. For more information, view the
|
|
142
|
+
[Apify Actor](https://docs.apify.com/cli) documentation.
|
|
143
|
+
|
|
144
|
+
You can also develop your web scraping project in an online code editor directly on the [Apify platform](https://crawlee.dev/docs/guides/apify-platform). You'll need to have an Apify Account. Go to [Actors](https://console.apify.com/actors), page in the Apify Console, click <i>Create new</i> and then go to the <i>Source</i> tab and start writing your code or paste one of the examples from the Examples section.
|
|
145
|
+
|
|
146
|
+
For more information, view the [Apify actors quick start guide](https://docs.apify.com/actor/quick-start).
|
|
147
|
+
|
|
148
|
+
## Support
|
|
149
|
+
|
|
150
|
+
If you find any bug or issue with Crawlee, please [submit an issue on GitHub](https://github.com/apify/crawlee/issues). For questions, you can ask on [Stack Overflow](https://stackoverflow.com/questions/tagged/apify) or contact support@apify.com
|
|
151
|
+
|
|
152
|
+
## Contributing
|
|
153
|
+
|
|
154
|
+
Your code contributions are welcome, and you'll be praised to eternity! If you have any ideas for improvements, either submit an issue or create a pull request. For contribution guidelines and the code of conduct, see [CONTRIBUTING.md](https://github.com/apify/crawlee/blob/master/CONTRIBUTING.md).
|
|
155
|
+
|
|
156
|
+
## License
|
|
157
|
+
|
|
158
|
+
This project is licensed under the Apache License 2.0 - see the [LICENSE.md](https://github.com/apify/crawlee/blob/master/LICENSE.md) file for details.
|
package/index.d.ts
ADDED
package/index.d.ts.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,0BAA0B,CAAC"}
|
package/index.js
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const tslib_1 = require("tslib");
|
|
4
|
+
tslib_1.__exportStar(require("@crawlee/basic"), exports);
|
|
5
|
+
tslib_1.__exportStar(require("./internals/http-crawler"), exports);
|
|
6
|
+
//# sourceMappingURL=index.js.map
|
package/index.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yDAA+B;AAC/B,mEAAyC"}
|
package/index.mjs
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import mod from "./index.js";
|
|
2
|
+
|
|
3
|
+
export default mod;
|
|
4
|
+
export const API_PROCESSED_REQUESTS_DELAY_MILLIS = mod.API_PROCESSED_REQUESTS_DELAY_MILLIS;
|
|
5
|
+
export const AutoscaledPool = mod.AutoscaledPool;
|
|
6
|
+
export const BASIC_CRAWLER_TIMEOUT_BUFFER_SECS = mod.BASIC_CRAWLER_TIMEOUT_BUFFER_SECS;
|
|
7
|
+
export const BasicCrawler = mod.BasicCrawler;
|
|
8
|
+
export const Configuration = mod.Configuration;
|
|
9
|
+
export const CookieParseError = mod.CookieParseError;
|
|
10
|
+
export const CrawlerExtension = mod.CrawlerExtension;
|
|
11
|
+
export const CriticalError = mod.CriticalError;
|
|
12
|
+
export const DATASET_ITERATORS_DEFAULT_LIMIT = mod.DATASET_ITERATORS_DEFAULT_LIMIT;
|
|
13
|
+
export const Dataset = mod.Dataset;
|
|
14
|
+
export const EVENT_SESSION_RETIRED = mod.EVENT_SESSION_RETIRED;
|
|
15
|
+
export const EnqueueStrategy = mod.EnqueueStrategy;
|
|
16
|
+
export const EventManager = mod.EventManager;
|
|
17
|
+
export const EventType = mod.EventType;
|
|
18
|
+
export const HttpCrawler = mod.HttpCrawler;
|
|
19
|
+
export const KeyValueStore = mod.KeyValueStore;
|
|
20
|
+
export const LocalEventManager = mod.LocalEventManager;
|
|
21
|
+
export const Log = mod.Log;
|
|
22
|
+
export const LogLevel = mod.LogLevel;
|
|
23
|
+
export const Logger = mod.Logger;
|
|
24
|
+
export const LoggerJson = mod.LoggerJson;
|
|
25
|
+
export const LoggerText = mod.LoggerText;
|
|
26
|
+
export const MAX_QUERIES_FOR_CONSISTENCY = mod.MAX_QUERIES_FOR_CONSISTENCY;
|
|
27
|
+
export const MissingRouteError = mod.MissingRouteError;
|
|
28
|
+
export const NonRetryableError = mod.NonRetryableError;
|
|
29
|
+
export const ProxyConfiguration = mod.ProxyConfiguration;
|
|
30
|
+
export const PseudoUrl = mod.PseudoUrl;
|
|
31
|
+
export const QUERY_HEAD_BUFFER = mod.QUERY_HEAD_BUFFER;
|
|
32
|
+
export const QUERY_HEAD_MIN_LENGTH = mod.QUERY_HEAD_MIN_LENGTH;
|
|
33
|
+
export const REQUESTS_PERSISTENCE_KEY = mod.REQUESTS_PERSISTENCE_KEY;
|
|
34
|
+
export const Request = mod.Request;
|
|
35
|
+
export const RequestList = mod.RequestList;
|
|
36
|
+
export const RequestQueue = mod.RequestQueue;
|
|
37
|
+
export const Router = mod.Router;
|
|
38
|
+
export const STATE_PERSISTENCE_KEY = mod.STATE_PERSISTENCE_KEY;
|
|
39
|
+
export const STORAGE_CONSISTENCY_DELAY_MILLIS = mod.STORAGE_CONSISTENCY_DELAY_MILLIS;
|
|
40
|
+
export const Session = mod.Session;
|
|
41
|
+
export const SessionPool = mod.SessionPool;
|
|
42
|
+
export const Snapshotter = mod.Snapshotter;
|
|
43
|
+
export const Statistics = mod.Statistics;
|
|
44
|
+
export const StorageManager = mod.StorageManager;
|
|
45
|
+
export const SystemStatus = mod.SystemStatus;
|
|
46
|
+
export const browserPoolCookieToToughCookie = mod.browserPoolCookieToToughCookie;
|
|
47
|
+
export const checkAndSerialize = mod.checkAndSerialize;
|
|
48
|
+
export const chunkBySize = mod.chunkBySize;
|
|
49
|
+
export const constructGlobObjectsFromGlobs = mod.constructGlobObjectsFromGlobs;
|
|
50
|
+
export const constructRegExpObjectsFromPseudoUrls = mod.constructRegExpObjectsFromPseudoUrls;
|
|
51
|
+
export const constructRegExpObjectsFromRegExps = mod.constructRegExpObjectsFromRegExps;
|
|
52
|
+
export const cookieStringToToughCookie = mod.cookieStringToToughCookie;
|
|
53
|
+
export const createBasicRouter = mod.createBasicRouter;
|
|
54
|
+
export const createDeserialize = mod.createDeserialize;
|
|
55
|
+
export const createHttpRouter = mod.createHttpRouter;
|
|
56
|
+
export const createRequestOptions = mod.createRequestOptions;
|
|
57
|
+
export const createRequests = mod.createRequests;
|
|
58
|
+
export const deserializeArray = mod.deserializeArray;
|
|
59
|
+
export const enqueueLinks = mod.enqueueLinks;
|
|
60
|
+
export const getCookiesFromResponse = mod.getCookiesFromResponse;
|
|
61
|
+
export const getDefaultCookieExpirationDate = mod.getDefaultCookieExpirationDate;
|
|
62
|
+
export const getRequestId = mod.getRequestId;
|
|
63
|
+
export const handleRequestTimeout = mod.handleRequestTimeout;
|
|
64
|
+
export const log = mod.log;
|
|
65
|
+
export const maybeStringify = mod.maybeStringify;
|
|
66
|
+
export const mergeCookies = mod.mergeCookies;
|
|
67
|
+
export const purgeDefaultStorages = mod.purgeDefaultStorages;
|
|
68
|
+
export const resolveBaseUrlForEnqueueLinksFiltering = mod.resolveBaseUrlForEnqueueLinksFiltering;
|
|
69
|
+
export const serializeArray = mod.serializeArray;
|
|
70
|
+
export const toughCookieToBrowserPoolCookie = mod.toughCookieToBrowserPoolCookie;
|
|
71
|
+
export const updateEnqueueLinksPatternCache = mod.updateEnqueueLinksPatternCache;
|
|
72
|
+
export const validateGlobPattern = mod.validateGlobPattern;
|
|
73
|
+
export const validators = mod.validators;
|