@crawlee/http 4.0.0-beta.2 → 4.0.0-beta.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -9,6 +9,10 @@
9
9
  <small>A web scraping and browser automation library</small>
10
10
  </h1>
11
11
 
12
+ <p align=center>
13
+ <a href="https://trendshift.io/repositories/5179" target="_blank"><img src="https://trendshift.io/api/badge/repositories/5179" alt="apify%2Fcrawlee | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
14
+ </p>
15
+
12
16
  <p align=center>
13
17
  <a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/v/@crawlee/core.svg" alt="NPM latest version" data-canonical-src="https://img.shields.io/npm/v/@crawlee/core/next.svg" style="max-width: 100%;"></a>
14
18
  <a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/dm/@crawlee/core.svg" alt="Downloads" data-canonical-src="https://img.shields.io/npm/dm/@crawlee/core.svg" style="max-width: 100%;"></a>
@@ -24,7 +28,7 @@ Crawlee is available as the [`crawlee`](https://www.npmjs.com/package/crawlee) N
24
28
 
25
29
  > 👉 **View full documentation, guides and examples on the [Crawlee project website](https://crawlee.dev)** 👈
26
30
 
27
- > Crawlee for Python is open for early adopters. 🐍 [👉 Checkout the source code 👈](https://github.com/apify/crawlee-python).
31
+ > Do you prefer 🐍 Python instead of JavaScript? [👉 Checkout Crawlee for Python 👈](https://github.com/apify/crawlee-python).
28
32
 
29
33
  ## Installation
30
34
 
@@ -1,29 +1,45 @@
1
+ import { Transform } from 'node:stream';
2
+ import type { BasicCrawlerOptions } from '@crawlee/basic';
3
+ import { BasicCrawler } from '@crawlee/basic';
4
+ import type { CrawlingContext, LoadedRequest, Request } from '@crawlee/core';
1
5
  import type { Dictionary } from '@crawlee/types';
2
- // @ts-ignore optional peer dependency or compatibility with es2022
3
- import type { Request } from 'got-scraping';
4
- import type { ErrorHandler, GetUserDataFromRequest, HttpCrawlerOptions, InternalHttpCrawlingContext, InternalHttpHook, RequestHandler, RouterRoutes } from '../index.js';
5
- import { HttpCrawler } from '../index.js';
6
- export type FileDownloadErrorHandler<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
7
- JSONData extends Dictionary = any> = ErrorHandler<FileDownloadCrawlingContext<UserData, JSONData>>;
8
- export type StreamHandlerContext = Omit<FileDownloadCrawlingContext, 'body' | 'parseWithCheerio' | 'json' | 'addRequests' | 'contentType'> & {
9
- stream: Request;
10
- };
11
- type StreamHandler = (context: StreamHandlerContext) => void | Promise<void>;
12
- export type FileDownloadOptions<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
13
- JSONData extends Dictionary = any> = (Omit<HttpCrawlerOptions<FileDownloadCrawlingContext<UserData, JSONData>>, 'requestHandler'> & {
14
- requestHandler?: never;
15
- streamHandler?: StreamHandler;
16
- }) | (Omit<HttpCrawlerOptions<FileDownloadCrawlingContext<UserData, JSONData>>, 'requestHandler'> & {
17
- requestHandler: FileDownloadRequestHandler;
18
- streamHandler?: never;
19
- });
20
- export type FileDownloadHook<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
21
- JSONData extends Dictionary = any> = InternalHttpHook<FileDownloadCrawlingContext<UserData, JSONData>>;
22
- export interface FileDownloadCrawlingContext<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
23
- JSONData extends Dictionary = any> extends InternalHttpCrawlingContext<UserData, JSONData, FileDownload> {
6
+ import type { ErrorHandler, GetUserDataFromRequest, InternalHttpHook, RequestHandler, RouterRoutes } from '../index.js';
7
+ export type FileDownloadErrorHandler<UserData extends Dictionary = any> = ErrorHandler<FileDownloadCrawlingContext<UserData>>;
8
+ export type FileDownloadHook<UserData extends Dictionary = any> = InternalHttpHook<FileDownloadCrawlingContext<UserData>>;
9
+ export interface FileDownloadCrawlingContext<UserData extends Dictionary = any> extends CrawlingContext<UserData> {
10
+ request: LoadedRequest<Request<UserData>>;
11
+ response: Response;
12
+ contentType: {
13
+ type: string;
14
+ encoding: BufferEncoding;
15
+ };
24
16
  }
25
- export type FileDownloadRequestHandler<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
26
- JSONData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<UserData, JSONData>>;
17
+ export type FileDownloadRequestHandler<UserData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<UserData>>;
18
+ /**
19
+ * Creates a transform stream that throws an error if the source data speed is below the specified minimum speed.
20
+ * This `Transform` checks the amount of data every `checkProgressInterval` milliseconds.
21
+ * If the stream has received less than `minSpeedKbps * historyLengthMs / 1000` bytes in the last `historyLengthMs` milliseconds,
22
+ * it will throw an error.
23
+ *
24
+ * Can be used e.g. to abort a download if the network speed is too slow.
25
+ * @returns Transform stream that monitors the speed of the incoming data.
26
+ */
27
+ export declare function MinimumSpeedStream({ minSpeedKbps, historyLengthMs, checkProgressInterval: checkProgressIntervalMs, }: {
28
+ minSpeedKbps: number;
29
+ historyLengthMs?: number;
30
+ checkProgressInterval?: number;
31
+ }): Transform;
32
+ /**
33
+ * Creates a transform stream that logs the progress of the incoming data.
34
+ * This `Transform` calls the `logProgress` function every `loggingInterval` milliseconds with the number of bytes received so far.
35
+ *
36
+ * Can be used e.g. to log the progress of a download.
37
+ * @returns Transform stream logging the progress of the incoming data.
38
+ */
39
+ export declare function ByteCounterStream({ logTransferredBytes, loggingInterval, }: {
40
+ logTransferredBytes: (transferredBytes: number) => void;
41
+ loggingInterval?: number;
42
+ }): Transform;
27
43
  /**
28
44
  * Provides a framework for downloading files in parallel using plain HTTP requests. The URLs to download are fed either from a static list of URLs or they can be added on the fly from another crawler.
29
45
  *
@@ -39,11 +55,11 @@ JSONData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<
39
55
  *
40
56
  * The crawler finishes when there are no more {@link Request} objects to crawl.
41
57
  *
42
- * We can use the `preNavigationHooks` to adjust `gotOptions`:
58
+ * We can use the `preNavigationHooks` to adjust the crawling context before the request is made:
43
59
  *
44
60
  * ```
45
61
  * preNavigationHooks: [
46
- * (crawlingContext, gotOptions) => {
62
+ * (crawlingContext) => {
47
63
  * // ...
48
64
  * },
49
65
  * ]
@@ -67,11 +83,9 @@ JSONData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<
67
83
  * ]);
68
84
  * ```
69
85
  */
70
- export declare class FileDownload extends HttpCrawler<FileDownloadCrawlingContext> {
71
- private streamHandler?;
72
- constructor(options?: FileDownloadOptions);
73
- protected _runRequestHandler(context: FileDownloadCrawlingContext): Promise<void>;
74
- private streamRequestHandler;
86
+ export declare class FileDownload extends BasicCrawler<FileDownloadCrawlingContext> {
87
+ constructor(options?: BasicCrawlerOptions<FileDownloadCrawlingContext>);
88
+ private initiateDownload;
75
89
  }
76
90
  /**
77
91
  * Creates new {@link Router} instance that works based on request labels.
@@ -98,6 +112,5 @@ export declare class FileDownload extends HttpCrawler<FileDownloadCrawlingContex
98
112
  * ```
99
113
  */
100
114
  // @ts-ignore optional peer dependency or compatibility with es2022
101
- export declare function createFileRouter<Context extends FileDownloadCrawlingContext = FileDownloadCrawlingContext, UserData extends Dictionary = GetUserDataFromRequest<Context['request']>>(routes?: RouterRoutes<Context, UserData>): import("packages/core/dist/router.js").RouterHandler<Context>;
102
- export {};
115
+ export declare function createFileRouter<Context extends FileDownloadCrawlingContext = FileDownloadCrawlingContext, UserData extends Dictionary = GetUserDataFromRequest<Context['request']>>(routes?: RouterRoutes<Context, UserData>): import("@crawlee/basic").RouterHandler<Context>;
103
116
  //# sourceMappingURL=file-download.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"file-download.d.ts","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AAE5C,OAAO,KAAK,EACR,YAAY,EACZ,sBAAsB,EACtB,kBAAkB,EAClB,2BAA2B,EAC3B,gBAAgB,EAChB,cAAc,EACd,YAAY,EACf,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,WAAW,EAAU,MAAM,aAAa,CAAC;AAElD,MAAM,MAAM,wBAAwB,CAChC,QAAQ,SAAS,UAAU,GAAG,GAAG,EAAE,2EAA2E;AAC9G,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,YAAY,CAAC,2BAA2B,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC;AAElE,MAAM,MAAM,oBAAoB,GAAG,IAAI,CACnC,2BAA2B,EAC3B,MAAM,GAAG,kBAAkB,GAAG,MAAM,GAAG,aAAa,GAAG,aAAa,CACvE,GAAG;IACA,MAAM,EAAE,OAAO,CAAC;CACnB,CAAC;AAEF,KAAK,aAAa,GAAG,CAAC,OAAO,EAAE,oBAAoB,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AAE7E,MAAM,MAAM,mBAAmB,CAC3B,QAAQ,SAAS,UAAU,GAAG,GAAG,EAAE,2EAA2E;AAC9G,QAAQ,SAAS,UAAU,GAAG,GAAG,IAE/B,CAAC,IAAI,CAAC,kBAAkB,CAAC,2BAA2B,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,EAAE,gBAAgB,CAAC,GAAG;IAC3F,cAAc,CAAC,EAAE,KAAK,CAAC;IACvB,aAAa,CAAC,EAAE,aAAa,CAAC;CACjC,CAAC,GACF,CAAC,IAAI,CAAC,kBAAkB,CAAC,2BAA2B,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,EAAE,gBAAgB,CAAC,GAAG;IAC3F,cAAc,EAAE,0BAA0B,CAAC;IAC3C,aAAa,CAAC,EAAE,KAAK,CAAC;CACzB,CAAC,CAAC;AAET,MAAM,MAAM,gBAAgB,CACxB,QAAQ,SAAS,UAAU,GAAG,GAAG,EAAE,2EAA2E;AAC9G,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,gBAAgB,CAAC,2BAA2B,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC;AAEtE,MAAM,WAAW,2BAA2B,CACxC,QAAQ,SAAS,UAAU,GAAG,GAAG,EAAE,2EAA2E;AAC9G,QAAQ,SAAS,UAAU,GAAG,GAAG,CACnC,SAAQ,2BAA2B,CAAC,QAAQ,EAAE,QAAQ,EAAE,YAAY,CAAC;CAAG;AAE1E,MAAM,MAAM,0BAA0B,CAClC,QAAQ,SAAS,UAAU,GAAG,GAAG,EAAE,2EAA2E;AAC9G,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,cAAc,CAAC,2BAA2B,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC;AAEpE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,qBAAa,YAAa,SAAQ,WAAW,CAAC,2BAA2B,CAAC;IACtE,OAAO,CAAC,aAAa,CAAC,CAAgB;gBAE1B,OAAO,GAAE,mBAAwB;cAqBpB,kBAAkB,CAAC,OAAO,EAAE,2BAA2B;YAQlE,oBAAoB;CA8DrC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAgB,gBAAgB,CAC5B,OAAO,SAAS,2BAA2B,GAAG,2BAA2B,EACzE,QAAQ,SAAS,UAAU,GAAG,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAC1E,MAAM,CAAC,EAAE,YAAY,CAAC,OAAO,EAAE,QAAQ,CAAC,iEAEzC"}
1
+ {"version":3,"file":"file-download.d.ts","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAGxC,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAmB,MAAM,gBAAgB,CAAC;AAC/D,OAAO,KAAK,EAAE,eAAe,EAAE,aAAa,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AAC7E,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAEjD,OAAO,KAAK,EAAE,YAAY,EAAE,sBAAsB,EAAE,gBAAgB,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAIxH,MAAM,MAAM,wBAAwB,CAChC,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,YAAY,CAAC,2BAA2B,CAAC,QAAQ,CAAC,CAAC,CAAC;AAExD,MAAM,MAAM,gBAAgB,CACxB,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,gBAAgB,CAAC,2BAA2B,CAAC,QAAQ,CAAC,CAAC,CAAC;AAE5D,MAAM,WAAW,2BAA2B,CACxC,QAAQ,SAAS,UAAU,GAAG,GAAG,CACnC,SAAQ,eAAe,CAAC,QAAQ,CAAC;IAC/B,OAAO,EAAE,aAAa,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC1C,QAAQ,EAAE,QAAQ,CAAC;IACnB,WAAW,EAAE;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,cAAc,CAAA;KAAE,CAAC;CAC3D;AAED,MAAM,MAAM,0BAA0B,CAClC,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,cAAc,CAAC,2BAA2B,CAAC,QAAQ,CAAC,CAAC,CAAC;AAE1D;;;;;;;;GAQG;AACH,wBAAgB,kBAAkB,CAAC,EAC/B,YAAY,EACZ,eAAsB,EACtB,qBAAqB,EAAE,uBAA6B,GACvD,EAAE;IACC,YAAY,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAClC,GAAG,SAAS,CA4BZ;AAED;;;;;;GAMG;AACH,wBAAgB,iBAAiB,CAAC,EAC9B,mBAAmB,EACnB,eAAsB,GACzB,EAAE;IACC,mBAAmB,EAAE,CAAC,gBAAgB,EAAE,MAAM,KAAK,IAAI,CAAC;IACxD,eAAe,CAAC,EAAE,MAAM,CAAC;CAC5B,GAAG,SAAS,CAoBZ;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,qBAAa,YAAa,SAAQ,YAAY,CAAC,2BAA2B,CAAC;gBAE3D,OAAO,GAAE,mBAAmB,CAAC,2BAA2B,CAAM;YAa5D,gBAAgB;CAiBjC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAgB,gBAAgB,CAC5B,OAAO,SAAS,2BAA2B,GAAG,2BAA2B,EACzE,QAAQ,SAAS,UAAU,GAAG,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAC1E,MAAM,CAAC,EAAE,YAAY,CAAC,OAAO,EAAE,QAAQ,CAAC,mDAEzC"}
@@ -1,6 +1,66 @@
1
+ import { Transform } from 'node:stream';
1
2
  import { finished } from 'node:stream/promises';
2
- import { isPromise } from 'node:util/types';
3
- import { HttpCrawler, Router } from '../index.js';
3
+ import { BasicCrawler, ContextPipeline } from '@crawlee/basic';
4
+ import { Router } from '../index.js';
5
+ import { parseContentTypeFromResponse } from './utils.js';
6
+ /**
7
+ * Creates a transform stream that throws an error if the source data speed is below the specified minimum speed.
8
+ * This `Transform` checks the amount of data every `checkProgressInterval` milliseconds.
9
+ * If the stream has received less than `minSpeedKbps * historyLengthMs / 1000` bytes in the last `historyLengthMs` milliseconds,
10
+ * it will throw an error.
11
+ *
12
+ * Can be used e.g. to abort a download if the network speed is too slow.
13
+ * @returns Transform stream that monitors the speed of the incoming data.
14
+ */
15
+ export function MinimumSpeedStream({ minSpeedKbps, historyLengthMs = 10e3, checkProgressInterval: checkProgressIntervalMs = 5e3, }) {
16
+ let snapshots = [];
17
+ const checkInterval = setInterval(() => {
18
+ const now = Date.now();
19
+ snapshots = snapshots.filter((snapshot) => now - snapshot.timestamp < historyLengthMs);
20
+ const totalBytes = snapshots.reduce((acc, snapshot) => acc + snapshot.bytes, 0);
21
+ const elapsed = (now - (snapshots[0]?.timestamp ?? 0)) / 1000;
22
+ if (totalBytes / 1024 / elapsed < minSpeedKbps) {
23
+ clearInterval(checkInterval);
24
+ stream.emit('error', new Error(`Stream speed too slow, aborting...`));
25
+ }
26
+ }, checkProgressIntervalMs);
27
+ const stream = new Transform({
28
+ transform: (chunk, _, callback) => {
29
+ snapshots.push({ timestamp: Date.now(), bytes: chunk.length });
30
+ callback(null, chunk);
31
+ },
32
+ final: (callback) => {
33
+ clearInterval(checkInterval);
34
+ callback();
35
+ },
36
+ });
37
+ return stream;
38
+ }
39
+ /**
40
+ * Creates a transform stream that logs the progress of the incoming data.
41
+ * This `Transform` calls the `logProgress` function every `loggingInterval` milliseconds with the number of bytes received so far.
42
+ *
43
+ * Can be used e.g. to log the progress of a download.
44
+ * @returns Transform stream logging the progress of the incoming data.
45
+ */
46
+ export function ByteCounterStream({ logTransferredBytes, loggingInterval = 5000, }) {
47
+ let transferredBytes = 0;
48
+ let lastLogTime = Date.now();
49
+ return new Transform({
50
+ transform: (chunk, _, callback) => {
51
+ transferredBytes += chunk.length;
52
+ if (Date.now() - lastLogTime > loggingInterval) {
53
+ lastLogTime = Date.now();
54
+ logTransferredBytes(transferredBytes);
55
+ }
56
+ callback(null, chunk);
57
+ },
58
+ flush: (callback) => {
59
+ logTransferredBytes(transferredBytes);
60
+ callback();
61
+ },
62
+ });
63
+ }
4
64
  /**
5
65
  * Provides a framework for downloading files in parallel using plain HTTP requests. The URLs to download are fed either from a static list of URLs or they can be added on the fly from another crawler.
6
66
  *
@@ -16,11 +76,11 @@ import { HttpCrawler, Router } from '../index.js';
16
76
  *
17
77
  * The crawler finishes when there are no more {@link Request} objects to crawl.
18
78
  *
19
- * We can use the `preNavigationHooks` to adjust `gotOptions`:
79
+ * We can use the `preNavigationHooks` to adjust the crawling context before the request is made:
20
80
  *
21
81
  * ```
22
82
  * preNavigationHooks: [
23
- * (crawlingContext, gotOptions) => {
83
+ * (crawlingContext) => {
24
84
  * // ...
25
85
  * },
26
86
  * ]
@@ -44,79 +104,31 @@ import { HttpCrawler, Router } from '../index.js';
44
104
  * ]);
45
105
  * ```
46
106
  */
47
- export class FileDownload extends HttpCrawler {
48
- streamHandler;
107
+ export class FileDownload extends BasicCrawler {
108
+ // TODO hooks
49
109
  constructor(options = {}) {
50
- const { streamHandler } = options;
51
- delete options.streamHandler;
52
- if (streamHandler) {
53
- // For streams, the navigation is done in the request handler.
54
- options.requestHandlerTimeoutSecs = options.navigationTimeoutSecs ?? 120;
55
- }
56
- super(options);
57
- this.streamHandler = streamHandler;
58
- if (this.streamHandler) {
59
- this.requestHandler = this.streamRequestHandler;
60
- }
61
- // The base HttpCrawler class only supports a handful of text based mime types.
62
- // With the FileDownload crawler, we want to download any file type.
63
- this.supportedMimeTypes = new Set(['*/*']);
64
- }
65
- async _runRequestHandler(context) {
66
- if (this.streamHandler) {
67
- context.request.skipNavigation = true;
68
- }
69
- await super._runRequestHandler(context);
110
+ super({
111
+ ...options,
112
+ contextPipelineBuilder: () => ContextPipeline.create().compose({
113
+ action: async (context) => this.initiateDownload(context),
114
+ cleanup: async (context) => {
115
+ await (context.response.body ? finished(context.response.body) : Promise.resolve());
116
+ },
117
+ }),
118
+ });
70
119
  }
71
- async streamRequestHandler(context) {
72
- const { log, request: { url }, } = context;
73
- const response = await this.httpClient.stream({
74
- url,
75
- timeout: { request: undefined },
76
- proxyUrl: context.proxyInfo?.url,
120
+ async initiateDownload(context) {
121
+ const response = await this.httpClient.stream(context.request.intoFetchAPIRequest(), {
122
+ session: context.session,
77
123
  });
78
- let pollingInterval;
79
- const cleanUp = () => {
80
- clearInterval(pollingInterval);
81
- response.stream.destroy();
124
+ const { type, charset: encoding } = parseContentTypeFromResponse(response);
125
+ context.request.url = response.url;
126
+ const contextExtension = {
127
+ request: context.request,
128
+ response,
129
+ contentType: { type, encoding },
82
130
  };
83
- const downloadPromise = new Promise((resolve, reject) => {
84
- pollingInterval = setInterval(() => {
85
- const { total, transferred } = response.downloadProgress;
86
- if (transferred > 0) {
87
- log.debug(`Downloaded ${transferred} bytes of ${total ?? 0} bytes from ${url}.`);
88
- }
89
- }, 5000);
90
- response.stream.on('error', async (error) => {
91
- cleanUp();
92
- reject(error);
93
- });
94
- let streamHandlerResult;
95
- try {
96
- context.stream = response.stream;
97
- context.response = response;
98
- streamHandlerResult = this.streamHandler(context);
99
- }
100
- catch (e) {
101
- cleanUp();
102
- reject(e);
103
- }
104
- if (isPromise(streamHandlerResult)) {
105
- streamHandlerResult
106
- .then(() => {
107
- resolve();
108
- })
109
- .catch((e) => {
110
- cleanUp();
111
- reject(e);
112
- });
113
- }
114
- else {
115
- resolve();
116
- }
117
- });
118
- await Promise.all([downloadPromise, finished(response.stream)]);
119
- cleanUp();
131
+ return contextExtension;
120
132
  }
121
133
  }
122
134
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"file-download.js","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAc5C,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AA4ClD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,MAAM,OAAO,YAAa,SAAQ,WAAwC;IAC9D,aAAa,CAAiB;IAEtC,YAAY,UAA+B,EAAE;QACzC,MAAM,EAAE,aAAa,EAAE,GAAG,OAAO,CAAC;QAClC,OAAO,OAAO,CAAC,aAAa,CAAC;QAE7B,IAAI,aAAa,EAAE,CAAC;YAChB,8DAA8D;YAC7D,OAAe,CAAC,yBAAyB,GAAG,OAAO,CAAC,qBAAqB,IAAI,GAAG,CAAC;QACtF,CAAC;QAED,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC;YACrB,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,oBAA2B,CAAC;QAC3D,CAAC;QAED,+EAA+E;QAC/E,oEAAoE;QACnE,IAAY,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;IACxD,CAAC;IAEkB,KAAK,CAAC,kBAAkB,CAAC,OAAoC;QAC5E,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC;YACrB,OAAO,CAAC,OAAO,CAAC,cAAc,GAAG,IAAI,CAAC;QAC1C,CAAC;QAED,MAAM,KAAK,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;IAC5C,CAAC;IAEO,KAAK,CAAC,oBAAoB,CAAC,OAAoC;QACnE,MAAM,EACF,GAAG,EACH,OAAO,EAAE,EAAE,GAAG,EAAE,GACnB,GAAG,OAAO,CAAC;QAEZ,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;YAC1C,GAAG;YACH,OAAO,EAAE,EAAE,OAAO,EAAE,SAAS,EAAE;YAC/B,QAAQ,EAAE,OAAO,CAAC,SAAS,EAAE,GAAG;SACnC,CAAC,CAAC;QAEH,IAAI,eAA2C,CAAC;QAEhD,MAAM,OAAO,GAAG,GAAG,EAAE;YACjB,aAAa,CAAC,eAAgB,CAAC,CAAC;YAChC,QAAQ,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;QAC9B,CAAC,CAAC;QAEF,MAAM,eAAe,GAAG,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1D,eAAe,GAAG,WAAW,CAAC,GAAG,EAAE;gBAC/B,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,QAAQ,CAAC,gBAAgB,CAAC;gBAEzD,IAAI,WAAW,GAAG,CAAC,EAAE,CAAC;oBAClB,GAAG,CAAC,KAAK,CAAC,cAAc,WAAW,aAAa,KAAK,IAAI,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC;gBACrF,CAAC;YACL,CAAC,EAAE,IAAI,CAAC,CAAC;YAET,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,EAAE,KAAY,EAAE,EAAE;gBAC/C,OAAO,EAAE,CAAC;gBACV,MAAM,CAAC,KAAK,CAAC,CAAC;YAClB,CAAC,CAAC,CAAC;YAEH,IAAI,mBAAmB,CAAC;YAExB,IAAI,CAAC;gBACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;gBACjC,OAAO,CAAC,QAAQ,GAAG,QAAe,CAAC;gBACnC,mBAAmB,GAAG,IAAI,CAAC,aAAc,CAAC,OAAc,CAAC,CAAC;YAC9D,CAAC;YAAC,OAAO,CAAC,EAAE,CAAC;gBACT,OAAO,EAAE,CAAC;gBACV,MAAM,CAAC,CAAC,CAAC,CAAC;YACd,CAAC;YAED,IAAI,SAAS,CAAC,mBAAmB,CAAC,EAAE,CAAC;gBACjC,mBAAmB;qBACd,IAAI,CAAC,GAAG,EAAE;oBACP,OAAO,EAAE,CAAC;gBACd,CAAC,CAAC;qBACD,KAAK,CAAC,CAAC,CAAQ,EAAE,EAAE;oBAChB,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,CAAC,CAAC,CAAC;gBACd,CAAC,CAAC,CAAC;YACX,CAAC;iBAAM,CAAC;gBACJ,OAAO,EAAE,CAAC;YACd,CAAC;QACL,CAAC,CAAC,CAAC;QAEH,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;QAEhE,OAAO,EAAE,CAAC;IACd,CAAC;CACJ;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,UAAU,gBAAgB,CAG9B,MAAwC;IACtC,OAAO,MAAM,CAAC,MAAM,CAAU,MAAM,CAAC,CAAC;AAC1C,CAAC"}
1
+ {"version":3,"file":"file-download.js","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAGhD,OAAO,EAAE,YAAY,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAK/D,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EAAE,4BAA4B,EAAE,MAAM,YAAY,CAAC;AAsB1D;;;;;;;;GAQG;AACH,MAAM,UAAU,kBAAkB,CAAC,EAC/B,YAAY,EACZ,eAAe,GAAG,IAAI,EACtB,qBAAqB,EAAE,uBAAuB,GAAG,GAAG,GAKvD;IACG,IAAI,SAAS,GAA2C,EAAE,CAAC;IAE3D,MAAM,aAAa,GAAG,WAAW,CAAC,GAAG,EAAE;QACnC,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEvB,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,GAAG,GAAG,QAAQ,CAAC,SAAS,GAAG,eAAe,CAAC,CAAC;QACvF,MAAM,UAAU,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,GAAG,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAChF,MAAM,OAAO,GAAG,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;QAE9D,IAAI,UAAU,GAAG,IAAI,GAAG,OAAO,GAAG,YAAY,EAAE,CAAC;YAC7C,aAAa,CAAC,aAAa,CAAC,CAAC;YAC7B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC,CAAC;QAC1E,CAAC;IACL,CAAC,EAAE,uBAAuB,CAAC,CAAC;IAE5B,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;QACzB,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC,EAAE,QAAQ,EAAE,EAAE;YAC9B,SAAS,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,KAAK,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;YAC/D,QAAQ,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAC1B,CAAC;QACD,KAAK,EAAE,CAAC,QAAQ,EAAE,EAAE;YAChB,aAAa,CAAC,aAAa,CAAC,CAAC;YAC7B,QAAQ,EAAE,CAAC;QACf,CAAC;KACJ,CAAC,CAAC;IAEH,OAAO,MAAM,CAAC;AAClB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,iBAAiB,CAAC,EAC9B,mBAAmB,EACnB,eAAe,GAAG,IAAI,GAIzB;IACG,IAAI,gBAAgB,GAAG,CAAC,CAAC;IACzB,IAAI,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAE7B,OAAO,IAAI,SAAS,CAAC;QACjB,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC,EAAE,QAAQ,EAAE,EAAE;YAC9B,gBAAgB,IAAI,KAAK,CAAC,MAAM,CAAC;YAEjC,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,GAAG,eAAe,EAAE,CAAC;gBAC7C,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;gBACzB,mBAAmB,CAAC,gBAAgB,CAAC,CAAC;YAC1C,CAAC;YAED,QAAQ,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAC1B,CAAC;QACD,KAAK,EAAE,CAAC,QAAQ,EAAE,EAAE;YAChB,mBAAmB,CAAC,gBAAgB,CAAC,CAAC;YACtC,QAAQ,EAAE,CAAC;QACf,CAAC;KACJ,CAAC,CAAC;AACP,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,MAAM,OAAO,YAAa,SAAQ,YAAyC;IACvE,aAAa;IACb,YAAY,UAA4D,EAAE;QACtE,KAAK,CAAC;YACF,GAAG,OAAO;YACV,sBAAsB,EAAE,GAAG,EAAE,CACzB,eAAe,CAAC,MAAM,EAAmB,CAAC,OAAO,CAAC;gBAC9C,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;gBACzD,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;oBACvB,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAW,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;gBAC/F,CAAC;aACJ,CAAC;SACT,CAAC,CAAC;IACP,CAAC;IAEO,KAAK,CAAC,gBAAgB,CAAC,OAAwB;QACnD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,mBAAmB,EAAE,EAAE;YACjF,OAAO,EAAE,OAAO,CAAC,OAAO;SAC3B,CAAC,CAAC;QAEH,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,4BAA4B,CAAC,QAAQ,CAAC,CAAC;QAE3E,OAAO,CAAC,OAAO,CAAC,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC;QAEnC,MAAM,gBAAgB,GAAG;YACrB,OAAO,EAAE,OAAO,CAAC,OAAiC;YAClD,QAAQ;YACR,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;SAClC,CAAC;QAEF,OAAO,gBAAgB,CAAC;IAC5B,CAAC;CACJ;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,UAAU,gBAAgB,CAG9B,MAAwC;IACtC,OAAO,MAAM,CAAC,MAAM,CAAU,MAAM,CAAC,CAAC;AAC1C,CAAC"}