@crawlee/http 4.0.0-beta.2 → 4.0.0-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/internals/file-download.d.ts +46 -33
- package/internals/file-download.d.ts.map +1 -1
- package/internals/file-download.js +85 -73
- package/internals/file-download.js.map +1 -1
- package/internals/http-crawler.d.ts +70 -149
- package/internals/http-crawler.d.ts.map +1 -1
- package/internals/http-crawler.js +178 -287
- package/internals/http-crawler.js.map +1 -1
- package/internals/utils.d.ts +9 -0
- package/internals/utils.d.ts.map +1 -0
- package/internals/utils.js +35 -0
- package/internals/utils.js.map +1 -0
- package/package.json +6 -6
- package/tsconfig.build.tsbuildinfo +0 -1
package/README.md
CHANGED
|
@@ -9,6 +9,10 @@
|
|
|
9
9
|
<small>A web scraping and browser automation library</small>
|
|
10
10
|
</h1>
|
|
11
11
|
|
|
12
|
+
<p align=center>
|
|
13
|
+
<a href="https://trendshift.io/repositories/5179" target="_blank"><img src="https://trendshift.io/api/badge/repositories/5179" alt="apify%2Fcrawlee | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
|
14
|
+
</p>
|
|
15
|
+
|
|
12
16
|
<p align=center>
|
|
13
17
|
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/v/@crawlee/core.svg" alt="NPM latest version" data-canonical-src="https://img.shields.io/npm/v/@crawlee/core/next.svg" style="max-width: 100%;"></a>
|
|
14
18
|
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/dm/@crawlee/core.svg" alt="Downloads" data-canonical-src="https://img.shields.io/npm/dm/@crawlee/core.svg" style="max-width: 100%;"></a>
|
|
@@ -24,7 +28,7 @@ Crawlee is available as the [`crawlee`](https://www.npmjs.com/package/crawlee) N
|
|
|
24
28
|
|
|
25
29
|
> 👉 **View full documentation, guides and examples on the [Crawlee project website](https://crawlee.dev)** 👈
|
|
26
30
|
|
|
27
|
-
>
|
|
31
|
+
> Do you prefer 🐍 Python instead of JavaScript? [👉 Checkout Crawlee for Python 👈](https://github.com/apify/crawlee-python).
|
|
28
32
|
|
|
29
33
|
## Installation
|
|
30
34
|
|
|
@@ -1,29 +1,45 @@
|
|
|
1
|
+
import { Transform } from 'node:stream';
|
|
2
|
+
import type { BasicCrawlerOptions } from '@crawlee/basic';
|
|
3
|
+
import { BasicCrawler } from '@crawlee/basic';
|
|
4
|
+
import type { CrawlingContext, LoadedRequest, Request } from '@crawlee/core';
|
|
1
5
|
import type { Dictionary } from '@crawlee/types';
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
export type FileDownloadOptions<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
|
|
13
|
-
JSONData extends Dictionary = any> = (Omit<HttpCrawlerOptions<FileDownloadCrawlingContext<UserData, JSONData>>, 'requestHandler'> & {
|
|
14
|
-
requestHandler?: never;
|
|
15
|
-
streamHandler?: StreamHandler;
|
|
16
|
-
}) | (Omit<HttpCrawlerOptions<FileDownloadCrawlingContext<UserData, JSONData>>, 'requestHandler'> & {
|
|
17
|
-
requestHandler: FileDownloadRequestHandler;
|
|
18
|
-
streamHandler?: never;
|
|
19
|
-
});
|
|
20
|
-
export type FileDownloadHook<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
|
|
21
|
-
JSONData extends Dictionary = any> = InternalHttpHook<FileDownloadCrawlingContext<UserData, JSONData>>;
|
|
22
|
-
export interface FileDownloadCrawlingContext<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
|
|
23
|
-
JSONData extends Dictionary = any> extends InternalHttpCrawlingContext<UserData, JSONData, FileDownload> {
|
|
6
|
+
import type { ErrorHandler, GetUserDataFromRequest, InternalHttpHook, RequestHandler, RouterRoutes } from '../index.js';
|
|
7
|
+
export type FileDownloadErrorHandler<UserData extends Dictionary = any> = ErrorHandler<FileDownloadCrawlingContext<UserData>>;
|
|
8
|
+
export type FileDownloadHook<UserData extends Dictionary = any> = InternalHttpHook<FileDownloadCrawlingContext<UserData>>;
|
|
9
|
+
export interface FileDownloadCrawlingContext<UserData extends Dictionary = any> extends CrawlingContext<UserData> {
|
|
10
|
+
request: LoadedRequest<Request<UserData>>;
|
|
11
|
+
response: Response;
|
|
12
|
+
contentType: {
|
|
13
|
+
type: string;
|
|
14
|
+
encoding: BufferEncoding;
|
|
15
|
+
};
|
|
24
16
|
}
|
|
25
|
-
export type FileDownloadRequestHandler<UserData extends Dictionary = any
|
|
26
|
-
|
|
17
|
+
export type FileDownloadRequestHandler<UserData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<UserData>>;
|
|
18
|
+
/**
|
|
19
|
+
* Creates a transform stream that throws an error if the source data speed is below the specified minimum speed.
|
|
20
|
+
* This `Transform` checks the amount of data every `checkProgressInterval` milliseconds.
|
|
21
|
+
* If the stream has received less than `minSpeedKbps * historyLengthMs / 1000` bytes in the last `historyLengthMs` milliseconds,
|
|
22
|
+
* it will throw an error.
|
|
23
|
+
*
|
|
24
|
+
* Can be used e.g. to abort a download if the network speed is too slow.
|
|
25
|
+
* @returns Transform stream that monitors the speed of the incoming data.
|
|
26
|
+
*/
|
|
27
|
+
export declare function MinimumSpeedStream({ minSpeedKbps, historyLengthMs, checkProgressInterval: checkProgressIntervalMs, }: {
|
|
28
|
+
minSpeedKbps: number;
|
|
29
|
+
historyLengthMs?: number;
|
|
30
|
+
checkProgressInterval?: number;
|
|
31
|
+
}): Transform;
|
|
32
|
+
/**
|
|
33
|
+
* Creates a transform stream that logs the progress of the incoming data.
|
|
34
|
+
* This `Transform` calls the `logProgress` function every `loggingInterval` milliseconds with the number of bytes received so far.
|
|
35
|
+
*
|
|
36
|
+
* Can be used e.g. to log the progress of a download.
|
|
37
|
+
* @returns Transform stream logging the progress of the incoming data.
|
|
38
|
+
*/
|
|
39
|
+
export declare function ByteCounterStream({ logTransferredBytes, loggingInterval, }: {
|
|
40
|
+
logTransferredBytes: (transferredBytes: number) => void;
|
|
41
|
+
loggingInterval?: number;
|
|
42
|
+
}): Transform;
|
|
27
43
|
/**
|
|
28
44
|
* Provides a framework for downloading files in parallel using plain HTTP requests. The URLs to download are fed either from a static list of URLs or they can be added on the fly from another crawler.
|
|
29
45
|
*
|
|
@@ -39,11 +55,11 @@ JSONData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<
|
|
|
39
55
|
*
|
|
40
56
|
* The crawler finishes when there are no more {@link Request} objects to crawl.
|
|
41
57
|
*
|
|
42
|
-
* We can use the `preNavigationHooks` to adjust
|
|
58
|
+
* We can use the `preNavigationHooks` to adjust the crawling context before the request is made:
|
|
43
59
|
*
|
|
44
60
|
* ```
|
|
45
61
|
* preNavigationHooks: [
|
|
46
|
-
* (crawlingContext
|
|
62
|
+
* (crawlingContext) => {
|
|
47
63
|
* // ...
|
|
48
64
|
* },
|
|
49
65
|
* ]
|
|
@@ -67,11 +83,9 @@ JSONData extends Dictionary = any> = RequestHandler<FileDownloadCrawlingContext<
|
|
|
67
83
|
* ]);
|
|
68
84
|
* ```
|
|
69
85
|
*/
|
|
70
|
-
export declare class FileDownload extends
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
protected _runRequestHandler(context: FileDownloadCrawlingContext): Promise<void>;
|
|
74
|
-
private streamRequestHandler;
|
|
86
|
+
export declare class FileDownload extends BasicCrawler<FileDownloadCrawlingContext> {
|
|
87
|
+
constructor(options?: BasicCrawlerOptions<FileDownloadCrawlingContext>);
|
|
88
|
+
private initiateDownload;
|
|
75
89
|
}
|
|
76
90
|
/**
|
|
77
91
|
* Creates new {@link Router} instance that works based on request labels.
|
|
@@ -98,6 +112,5 @@ export declare class FileDownload extends HttpCrawler<FileDownloadCrawlingContex
|
|
|
98
112
|
* ```
|
|
99
113
|
*/
|
|
100
114
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
101
|
-
export declare function createFileRouter<Context extends FileDownloadCrawlingContext = FileDownloadCrawlingContext, UserData extends Dictionary = GetUserDataFromRequest<Context['request']>>(routes?: RouterRoutes<Context, UserData>): import("
|
|
102
|
-
export {};
|
|
115
|
+
export declare function createFileRouter<Context extends FileDownloadCrawlingContext = FileDownloadCrawlingContext, UserData extends Dictionary = GetUserDataFromRequest<Context['request']>>(routes?: RouterRoutes<Context, UserData>): import("@crawlee/basic").RouterHandler<Context>;
|
|
103
116
|
//# sourceMappingURL=file-download.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"file-download.d.ts","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"file-download.d.ts","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAGxC,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAC1D,OAAO,EAAE,YAAY,EAAmB,MAAM,gBAAgB,CAAC;AAC/D,OAAO,KAAK,EAAE,eAAe,EAAE,aAAa,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AAC7E,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAEjD,OAAO,KAAK,EAAE,YAAY,EAAE,sBAAsB,EAAE,gBAAgB,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAIxH,MAAM,MAAM,wBAAwB,CAChC,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,YAAY,CAAC,2BAA2B,CAAC,QAAQ,CAAC,CAAC,CAAC;AAExD,MAAM,MAAM,gBAAgB,CACxB,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,gBAAgB,CAAC,2BAA2B,CAAC,QAAQ,CAAC,CAAC,CAAC;AAE5D,MAAM,WAAW,2BAA2B,CACxC,QAAQ,SAAS,UAAU,GAAG,GAAG,CACnC,SAAQ,eAAe,CAAC,QAAQ,CAAC;IAC/B,OAAO,EAAE,aAAa,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC1C,QAAQ,EAAE,QAAQ,CAAC;IACnB,WAAW,EAAE;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,cAAc,CAAA;KAAE,CAAC;CAC3D;AAED,MAAM,MAAM,0BAA0B,CAClC,QAAQ,SAAS,UAAU,GAAG,GAAG,IACjC,cAAc,CAAC,2BAA2B,CAAC,QAAQ,CAAC,CAAC,CAAC;AAE1D;;;;;;;;GAQG;AACH,wBAAgB,kBAAkB,CAAC,EAC/B,YAAY,EACZ,eAAsB,EACtB,qBAAqB,EAAE,uBAA6B,GACvD,EAAE;IACC,YAAY,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAClC,GAAG,SAAS,CA4BZ;AAED;;;;;;GAMG;AACH,wBAAgB,iBAAiB,CAAC,EAC9B,mBAAmB,EACnB,eAAsB,GACzB,EAAE;IACC,mBAAmB,EAAE,CAAC,gBAAgB,EAAE,MAAM,KAAK,IAAI,CAAC;IACxD,eAAe,CAAC,EAAE,MAAM,CAAC;CAC5B,GAAG,SAAS,CAoBZ;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,qBAAa,YAAa,SAAQ,YAAY,CAAC,2BAA2B,CAAC;gBAE3D,OAAO,GAAE,mBAAmB,CAAC,2BAA2B,CAAM;YAa5D,gBAAgB;CAiBjC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAgB,gBAAgB,CAC5B,OAAO,SAAS,2BAA2B,GAAG,2BAA2B,EACzE,QAAQ,SAAS,UAAU,GAAG,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAC1E,MAAM,CAAC,EAAE,YAAY,CAAC,OAAO,EAAE,QAAQ,CAAC,mDAEzC"}
|
|
@@ -1,6 +1,66 @@
|
|
|
1
|
+
import { Transform } from 'node:stream';
|
|
1
2
|
import { finished } from 'node:stream/promises';
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
3
|
+
import { BasicCrawler, ContextPipeline } from '@crawlee/basic';
|
|
4
|
+
import { Router } from '../index.js';
|
|
5
|
+
import { parseContentTypeFromResponse } from './utils.js';
|
|
6
|
+
/**
|
|
7
|
+
* Creates a transform stream that throws an error if the source data speed is below the specified minimum speed.
|
|
8
|
+
* This `Transform` checks the amount of data every `checkProgressInterval` milliseconds.
|
|
9
|
+
* If the stream has received less than `minSpeedKbps * historyLengthMs / 1000` bytes in the last `historyLengthMs` milliseconds,
|
|
10
|
+
* it will throw an error.
|
|
11
|
+
*
|
|
12
|
+
* Can be used e.g. to abort a download if the network speed is too slow.
|
|
13
|
+
* @returns Transform stream that monitors the speed of the incoming data.
|
|
14
|
+
*/
|
|
15
|
+
export function MinimumSpeedStream({ minSpeedKbps, historyLengthMs = 10e3, checkProgressInterval: checkProgressIntervalMs = 5e3, }) {
|
|
16
|
+
let snapshots = [];
|
|
17
|
+
const checkInterval = setInterval(() => {
|
|
18
|
+
const now = Date.now();
|
|
19
|
+
snapshots = snapshots.filter((snapshot) => now - snapshot.timestamp < historyLengthMs);
|
|
20
|
+
const totalBytes = snapshots.reduce((acc, snapshot) => acc + snapshot.bytes, 0);
|
|
21
|
+
const elapsed = (now - (snapshots[0]?.timestamp ?? 0)) / 1000;
|
|
22
|
+
if (totalBytes / 1024 / elapsed < minSpeedKbps) {
|
|
23
|
+
clearInterval(checkInterval);
|
|
24
|
+
stream.emit('error', new Error(`Stream speed too slow, aborting...`));
|
|
25
|
+
}
|
|
26
|
+
}, checkProgressIntervalMs);
|
|
27
|
+
const stream = new Transform({
|
|
28
|
+
transform: (chunk, _, callback) => {
|
|
29
|
+
snapshots.push({ timestamp: Date.now(), bytes: chunk.length });
|
|
30
|
+
callback(null, chunk);
|
|
31
|
+
},
|
|
32
|
+
final: (callback) => {
|
|
33
|
+
clearInterval(checkInterval);
|
|
34
|
+
callback();
|
|
35
|
+
},
|
|
36
|
+
});
|
|
37
|
+
return stream;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Creates a transform stream that logs the progress of the incoming data.
|
|
41
|
+
* This `Transform` calls the `logProgress` function every `loggingInterval` milliseconds with the number of bytes received so far.
|
|
42
|
+
*
|
|
43
|
+
* Can be used e.g. to log the progress of a download.
|
|
44
|
+
* @returns Transform stream logging the progress of the incoming data.
|
|
45
|
+
*/
|
|
46
|
+
export function ByteCounterStream({ logTransferredBytes, loggingInterval = 5000, }) {
|
|
47
|
+
let transferredBytes = 0;
|
|
48
|
+
let lastLogTime = Date.now();
|
|
49
|
+
return new Transform({
|
|
50
|
+
transform: (chunk, _, callback) => {
|
|
51
|
+
transferredBytes += chunk.length;
|
|
52
|
+
if (Date.now() - lastLogTime > loggingInterval) {
|
|
53
|
+
lastLogTime = Date.now();
|
|
54
|
+
logTransferredBytes(transferredBytes);
|
|
55
|
+
}
|
|
56
|
+
callback(null, chunk);
|
|
57
|
+
},
|
|
58
|
+
flush: (callback) => {
|
|
59
|
+
logTransferredBytes(transferredBytes);
|
|
60
|
+
callback();
|
|
61
|
+
},
|
|
62
|
+
});
|
|
63
|
+
}
|
|
4
64
|
/**
|
|
5
65
|
* Provides a framework for downloading files in parallel using plain HTTP requests. The URLs to download are fed either from a static list of URLs or they can be added on the fly from another crawler.
|
|
6
66
|
*
|
|
@@ -16,11 +76,11 @@ import { HttpCrawler, Router } from '../index.js';
|
|
|
16
76
|
*
|
|
17
77
|
* The crawler finishes when there are no more {@link Request} objects to crawl.
|
|
18
78
|
*
|
|
19
|
-
* We can use the `preNavigationHooks` to adjust
|
|
79
|
+
* We can use the `preNavigationHooks` to adjust the crawling context before the request is made:
|
|
20
80
|
*
|
|
21
81
|
* ```
|
|
22
82
|
* preNavigationHooks: [
|
|
23
|
-
* (crawlingContext
|
|
83
|
+
* (crawlingContext) => {
|
|
24
84
|
* // ...
|
|
25
85
|
* },
|
|
26
86
|
* ]
|
|
@@ -44,79 +104,31 @@ import { HttpCrawler, Router } from '../index.js';
|
|
|
44
104
|
* ]);
|
|
45
105
|
* ```
|
|
46
106
|
*/
|
|
47
|
-
export class FileDownload extends
|
|
48
|
-
|
|
107
|
+
export class FileDownload extends BasicCrawler {
|
|
108
|
+
// TODO hooks
|
|
49
109
|
constructor(options = {}) {
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
this.requestHandler = this.streamRequestHandler;
|
|
60
|
-
}
|
|
61
|
-
// The base HttpCrawler class only supports a handful of text based mime types.
|
|
62
|
-
// With the FileDownload crawler, we want to download any file type.
|
|
63
|
-
this.supportedMimeTypes = new Set(['*/*']);
|
|
64
|
-
}
|
|
65
|
-
async _runRequestHandler(context) {
|
|
66
|
-
if (this.streamHandler) {
|
|
67
|
-
context.request.skipNavigation = true;
|
|
68
|
-
}
|
|
69
|
-
await super._runRequestHandler(context);
|
|
110
|
+
super({
|
|
111
|
+
...options,
|
|
112
|
+
contextPipelineBuilder: () => ContextPipeline.create().compose({
|
|
113
|
+
action: async (context) => this.initiateDownload(context),
|
|
114
|
+
cleanup: async (context) => {
|
|
115
|
+
await (context.response.body ? finished(context.response.body) : Promise.resolve());
|
|
116
|
+
},
|
|
117
|
+
}),
|
|
118
|
+
});
|
|
70
119
|
}
|
|
71
|
-
async
|
|
72
|
-
const
|
|
73
|
-
|
|
74
|
-
url,
|
|
75
|
-
timeout: { request: undefined },
|
|
76
|
-
proxyUrl: context.proxyInfo?.url,
|
|
120
|
+
async initiateDownload(context) {
|
|
121
|
+
const response = await this.httpClient.stream(context.request.intoFetchAPIRequest(), {
|
|
122
|
+
session: context.session,
|
|
77
123
|
});
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
124
|
+
const { type, charset: encoding } = parseContentTypeFromResponse(response);
|
|
125
|
+
context.request.url = response.url;
|
|
126
|
+
const contextExtension = {
|
|
127
|
+
request: context.request,
|
|
128
|
+
response,
|
|
129
|
+
contentType: { type, encoding },
|
|
82
130
|
};
|
|
83
|
-
|
|
84
|
-
pollingInterval = setInterval(() => {
|
|
85
|
-
const { total, transferred } = response.downloadProgress;
|
|
86
|
-
if (transferred > 0) {
|
|
87
|
-
log.debug(`Downloaded ${transferred} bytes of ${total ?? 0} bytes from ${url}.`);
|
|
88
|
-
}
|
|
89
|
-
}, 5000);
|
|
90
|
-
response.stream.on('error', async (error) => {
|
|
91
|
-
cleanUp();
|
|
92
|
-
reject(error);
|
|
93
|
-
});
|
|
94
|
-
let streamHandlerResult;
|
|
95
|
-
try {
|
|
96
|
-
context.stream = response.stream;
|
|
97
|
-
context.response = response;
|
|
98
|
-
streamHandlerResult = this.streamHandler(context);
|
|
99
|
-
}
|
|
100
|
-
catch (e) {
|
|
101
|
-
cleanUp();
|
|
102
|
-
reject(e);
|
|
103
|
-
}
|
|
104
|
-
if (isPromise(streamHandlerResult)) {
|
|
105
|
-
streamHandlerResult
|
|
106
|
-
.then(() => {
|
|
107
|
-
resolve();
|
|
108
|
-
})
|
|
109
|
-
.catch((e) => {
|
|
110
|
-
cleanUp();
|
|
111
|
-
reject(e);
|
|
112
|
-
});
|
|
113
|
-
}
|
|
114
|
-
else {
|
|
115
|
-
resolve();
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
await Promise.all([downloadPromise, finished(response.stream)]);
|
|
119
|
-
cleanUp();
|
|
131
|
+
return contextExtension;
|
|
120
132
|
}
|
|
121
133
|
}
|
|
122
134
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"file-download.js","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"file-download.js","sourceRoot":"","sources":["../../src/internals/file-download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,MAAM,sBAAsB,CAAC;AAGhD,OAAO,EAAE,YAAY,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAK/D,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,OAAO,EAAE,4BAA4B,EAAE,MAAM,YAAY,CAAC;AAsB1D;;;;;;;;GAQG;AACH,MAAM,UAAU,kBAAkB,CAAC,EAC/B,YAAY,EACZ,eAAe,GAAG,IAAI,EACtB,qBAAqB,EAAE,uBAAuB,GAAG,GAAG,GAKvD;IACG,IAAI,SAAS,GAA2C,EAAE,CAAC;IAE3D,MAAM,aAAa,GAAG,WAAW,CAAC,GAAG,EAAE;QACnC,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEvB,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,GAAG,GAAG,QAAQ,CAAC,SAAS,GAAG,eAAe,CAAC,CAAC;QACvF,MAAM,UAAU,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,QAAQ,EAAE,EAAE,CAAC,GAAG,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAChF,MAAM,OAAO,GAAG,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;QAE9D,IAAI,UAAU,GAAG,IAAI,GAAG,OAAO,GAAG,YAAY,EAAE,CAAC;YAC7C,aAAa,CAAC,aAAa,CAAC,CAAC;YAC7B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC,CAAC;QAC1E,CAAC;IACL,CAAC,EAAE,uBAAuB,CAAC,CAAC;IAE5B,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;QACzB,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC,EAAE,QAAQ,EAAE,EAAE;YAC9B,SAAS,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,EAAE,KAAK,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;YAC/D,QAAQ,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAC1B,CAAC;QACD,KAAK,EAAE,CAAC,QAAQ,EAAE,EAAE;YAChB,aAAa,CAAC,aAAa,CAAC,CAAC;YAC7B,QAAQ,EAAE,CAAC;QACf,CAAC;KACJ,CAAC,CAAC;IAEH,OAAO,MAAM,CAAC;AAClB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,iBAAiB,CAAC,EAC9B,mBAAmB,EACnB,eAAe,GAAG,IAAI,GAIzB;IACG,IAAI,gBAAgB,GAAG,CAAC,CAAC;IACzB,IAAI,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAE7B,OAAO,IAAI,SAAS,CAAC;QACjB,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC,EAAE,QAAQ,EAAE,EAAE;YAC9B,gBAAgB,IAAI,KAAK,CAAC,MAAM,CAAC;YAEjC,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,GAAG,eAAe,EAAE,CAAC;gBAC7C,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;gBACzB,mBAAmB,CAAC,gBAAgB,CAAC,CAAC;YAC1C,CAAC;YAED,QAAQ,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAC1B,CAAC;QACD,KAAK,EAAE,CAAC,QAAQ,EAAE,EAAE;YAChB,mBAAmB,CAAC,gBAAgB,CAAC,CAAC;YACtC,QAAQ,EAAE,CAAC;QACf,CAAC;KACJ,CAAC,CAAC;AACP,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,MAAM,OAAO,YAAa,SAAQ,YAAyC;IACvE,aAAa;IACb,YAAY,UAA4D,EAAE;QACtE,KAAK,CAAC;YACF,GAAG,OAAO;YACV,sBAAsB,EAAE,GAAG,EAAE,CACzB,eAAe,CAAC,MAAM,EAAmB,CAAC,OAAO,CAAC;gBAC9C,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;gBACzD,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;oBACvB,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAW,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;gBAC/F,CAAC;aACJ,CAAC;SACT,CAAC,CAAC;IACP,CAAC;IAEO,KAAK,CAAC,gBAAgB,CAAC,OAAwB;QACnD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,mBAAmB,EAAE,EAAE;YACjF,OAAO,EAAE,OAAO,CAAC,OAAO;SAC3B,CAAC,CAAC;QAEH,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,4BAA4B,CAAC,QAAQ,CAAC,CAAC;QAE3E,OAAO,CAAC,OAAO,CAAC,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC;QAEnC,MAAM,gBAAgB,GAAG;YACrB,OAAO,EAAE,OAAO,CAAC,OAAiC;YAClD,QAAQ;YACR,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;SAClC,CAAC;QAEF,OAAO,gBAAgB,CAAC;IAC5B,CAAC;CACJ;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,UAAU,gBAAgB,CAG9B,MAAwC;IACtC,OAAO,MAAM,CAAC,MAAM,CAAU,MAAM,CAAC,CAAC;AAC1C,CAAC"}
|