@hono/node-server 0.6.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -7
- package/dist/globals.d.ts +1 -30
- package/dist/globals.js +13 -37
- package/dist/listener.d.ts +1 -0
- package/dist/listener.js +13 -14
- package/dist/serve-static.js +1 -1
- package/dist/server.js +0 -2
- package/dist/types.d.ts +0 -5
- package/dist/vercel.d.ts +3 -0
- package/dist/vercel.js +8 -0
- package/package.json +9 -11
- package/dist/base64.d.ts +0 -2
- package/dist/base64.js +0 -11
- package/dist/fetch.d.ts +0 -27
- package/dist/fetch.js +0 -46
- package/dist/nextjs.d.ts +0 -2
- package/dist/nextjs.js +0 -10
- package/dist/stream.d.ts +0 -7
- package/dist/stream.js +0 -107
package/README.md
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
|
-
#
|
|
1
|
+
# Node.js Adapter for Hono
|
|
2
2
|
|
|
3
|
-
This
|
|
4
|
-
Hono is ultrafast web framework for Cloudflare Workers, Deno, and Bun.
|
|
5
|
-
**It's not for Node.js**.
|
|
6
|
-
**BUT**, there may be a case that you really want to run on Node.js. This library is an adapter server that connects Hono and Node.js.
|
|
3
|
+
This adapter allows you to run your Hono application on Node.js. Initially, Hono wasn't designed for Node.js, but with this adapter, it can now be used with Node.js. It utilizes web standard APIs implemented in Node.js version 18 or higher.
|
|
7
4
|
|
|
8
|
-
Hono is ultra
|
|
5
|
+
While Hono is ultra-fast, it may not be as fast on Node.js due to the overhead involved in adapting Hono's API to Node.js.
|
|
9
6
|
|
|
10
|
-
|
|
7
|
+
However, it's worth noting that it is still faster than Express.
|
|
8
|
+
|
|
9
|
+
## Requirement
|
|
10
|
+
|
|
11
|
+
- Node.js version 18 or higher.
|
|
11
12
|
|
|
12
13
|
## Install
|
|
13
14
|
|
package/dist/globals.d.ts
CHANGED
|
@@ -1,30 +1 @@
|
|
|
1
|
-
|
|
2
|
-
declare global {
|
|
3
|
-
namespace NodeJS {
|
|
4
|
-
interface ProcessEnv {
|
|
5
|
-
NODE_ENV: 'development' | 'production' | 'test';
|
|
6
|
-
}
|
|
7
|
-
interface Global {
|
|
8
|
-
atob: typeof atob;
|
|
9
|
-
btoa: typeof btoa;
|
|
10
|
-
Blob: typeof Blob;
|
|
11
|
-
File: typeof File;
|
|
12
|
-
Headers: typeof Headers;
|
|
13
|
-
Request: typeof Request;
|
|
14
|
-
Response: typeof Response;
|
|
15
|
-
fetch: typeof fetch;
|
|
16
|
-
FormData: typeof FormData;
|
|
17
|
-
ReadableStream: typeof ReadableStream;
|
|
18
|
-
WritableStream: typeof WritableStream;
|
|
19
|
-
TransformStream: typeof TransformStream;
|
|
20
|
-
TextDecoderStream: typeof TextDecoderStream;
|
|
21
|
-
TextEncoderStream: typeof TextEncoderStream;
|
|
22
|
-
crypto: Crypto;
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
export declare function installGlobals(): void;
|
|
27
|
-
/**
|
|
28
|
-
* Credits:
|
|
29
|
-
* - https://github.com/remix-run/remix/blob/e77e2eb/packages/remix-node/globals.ts
|
|
30
|
-
*/
|
|
1
|
+
export {};
|
package/dist/globals.js
CHANGED
|
@@ -3,42 +3,18 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.installGlobals = void 0;
|
|
7
6
|
const node_crypto_1 = __importDefault(require("node:crypto"));
|
|
8
|
-
const
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
if (typeof base64_1.atob === 'undefined')
|
|
13
|
-
global.atob = base64_1.atob;
|
|
14
|
-
if (typeof base64_1.btoa === 'undefined')
|
|
15
|
-
global.btoa = base64_1.btoa;
|
|
16
|
-
if (typeof Blob === 'undefined')
|
|
17
|
-
global.Blob = fetch_1.Blob;
|
|
18
|
-
global.File = fetch_1.File;
|
|
19
|
-
global.Headers = fetch_1.Headers;
|
|
20
|
-
global.Request = fetch_1.Request;
|
|
21
|
-
global.Response = fetch_1.Response;
|
|
22
|
-
global.fetch = fetch_1.fetch;
|
|
23
|
-
global.FormData = fetch_1.FormData;
|
|
24
|
-
global.ReadableStream = web_stream_1.ReadableStream;
|
|
25
|
-
global.WritableStream = web_stream_1.WritableStream;
|
|
26
|
-
global.TransformStream = web_stream_1.TransformStream;
|
|
27
|
-
global.TextDecoderStream = web_stream_1.TextDecoderStream;
|
|
28
|
-
global.TextEncoderStream = web_stream_1.TextEncoderStream;
|
|
29
|
-
if (typeof global.crypto === 'undefined') {
|
|
30
|
-
// If crypto.subtle is undefined, we're in a Node.js v16 environment
|
|
31
|
-
if (typeof node_crypto_1.default.subtle === 'undefined') {
|
|
32
|
-
// We can use the webcrypto polyfill
|
|
33
|
-
global.crypto = require('crypto').webcrypto;
|
|
34
|
-
}
|
|
35
|
-
else {
|
|
36
|
-
global.crypto = node_crypto_1.default;
|
|
37
|
-
}
|
|
38
|
-
}
|
|
7
|
+
const webFetch = global.fetch;
|
|
8
|
+
/** jest dose not use crypto in the global, but this is OK for node 18 */
|
|
9
|
+
if (typeof global.crypto === 'undefined') {
|
|
10
|
+
global.crypto = node_crypto_1.default;
|
|
39
11
|
}
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
12
|
+
global.fetch = (info, init) => {
|
|
13
|
+
init = {
|
|
14
|
+
// Disable compression handling so people can return the result of a fetch
|
|
15
|
+
// directly in the loader without messing with the Content-Encoding header.
|
|
16
|
+
compress: false,
|
|
17
|
+
...init,
|
|
18
|
+
};
|
|
19
|
+
return webFetch(info, init);
|
|
20
|
+
};
|
package/dist/listener.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
1
|
import { IncomingMessage, ServerResponse } from 'node:http';
|
|
2
2
|
import { FetchCallback } from './types';
|
|
3
|
+
import './globals';
|
|
3
4
|
export declare const getRequestListener: (fetchCallback: FetchCallback) => (incoming: IncomingMessage, outgoing: ServerResponse) => Promise<void>;
|
package/dist/listener.js
CHANGED
|
@@ -1,39 +1,37 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getRequestListener = void 0;
|
|
4
|
-
const
|
|
5
|
-
const
|
|
4
|
+
const node_stream_1 = require("node:stream");
|
|
5
|
+
const promises_1 = require("node:stream/promises");
|
|
6
|
+
require("./globals");
|
|
6
7
|
const getRequestListener = (fetchCallback) => {
|
|
7
8
|
return async (incoming, outgoing) => {
|
|
8
9
|
const method = incoming.method || 'GET';
|
|
9
10
|
const url = `http://${incoming.headers.host}${incoming.url}`;
|
|
10
|
-
const headerRecord =
|
|
11
|
+
const headerRecord = [];
|
|
11
12
|
const len = incoming.rawHeaders.length;
|
|
12
|
-
for (let i = 0; i < len; i
|
|
13
|
-
|
|
14
|
-
const key = incoming.rawHeaders[i];
|
|
15
|
-
headerRecord[key] = incoming.rawHeaders[i + 1];
|
|
16
|
-
}
|
|
13
|
+
for (let i = 0; i < len; i += 2) {
|
|
14
|
+
headerRecord.push([incoming.rawHeaders[i], incoming.rawHeaders[i + 1]]);
|
|
17
15
|
}
|
|
18
16
|
const init = {
|
|
19
17
|
method: method,
|
|
20
18
|
headers: headerRecord,
|
|
21
|
-
// duplex: 'half', should used in nodejs 18
|
|
22
19
|
};
|
|
23
20
|
if (!(method === 'GET' || method === 'HEAD')) {
|
|
24
21
|
// lazy-consume request body
|
|
25
|
-
init.body =
|
|
22
|
+
init.body = node_stream_1.Readable.toWeb(incoming);
|
|
23
|
+
init.duplex = 'half';
|
|
26
24
|
}
|
|
27
25
|
let res;
|
|
28
26
|
try {
|
|
29
27
|
res = (await fetchCallback(new Request(url.toString(), init)));
|
|
30
28
|
}
|
|
31
29
|
catch (e) {
|
|
32
|
-
res = new
|
|
30
|
+
res = new Response(null, { status: 500 });
|
|
33
31
|
if (e instanceof Error) {
|
|
34
32
|
// timeout error emits 504 timeout
|
|
35
33
|
if (e.name === 'TimeoutError' || e.constructor.name === 'TimeoutError') {
|
|
36
|
-
res = new
|
|
34
|
+
res = new Response(null, { status: 504 });
|
|
37
35
|
}
|
|
38
36
|
}
|
|
39
37
|
}
|
|
@@ -45,7 +43,8 @@ const getRequestListener = (fetchCallback) => {
|
|
|
45
43
|
const transferEncoding = res.headers.get('transfer-encoding');
|
|
46
44
|
for (const [k, v] of res.headers) {
|
|
47
45
|
if (k === 'set-cookie') {
|
|
48
|
-
|
|
46
|
+
// node native Headers.prototype has getSetCookie method
|
|
47
|
+
outgoing.setHeader(k, res.headers.getSetCookie(k));
|
|
49
48
|
}
|
|
50
49
|
else {
|
|
51
50
|
outgoing.setHeader(k, v);
|
|
@@ -67,7 +66,7 @@ const getRequestListener = (fetchCallback) => {
|
|
|
67
66
|
contentLength ||
|
|
68
67
|
/^no$/i.test(buffering) ||
|
|
69
68
|
!/^(application\/json\b|text\/(?!event-stream\b))/i.test(contentType)) {
|
|
70
|
-
await (0,
|
|
69
|
+
await (0, promises_1.pipeline)(node_stream_1.Readable.fromWeb(res.body), outgoing);
|
|
71
70
|
}
|
|
72
71
|
else {
|
|
73
72
|
const text = await res.text();
|
package/dist/serve-static.js
CHANGED
|
@@ -11,7 +11,7 @@ const serveStatic = (options = { root: '' }) => {
|
|
|
11
11
|
await next();
|
|
12
12
|
}
|
|
13
13
|
const url = new URL(c.req.url);
|
|
14
|
-
const filename = options.path ?? url.pathname;
|
|
14
|
+
const filename = options.path ?? decodeURI(url.pathname);
|
|
15
15
|
let path = (0, filepath_1.getFilePath)({
|
|
16
16
|
filename: options.rewriteRequestPath ? options.rewriteRequestPath(filename) : filename,
|
|
17
17
|
root: options.root,
|
package/dist/server.js
CHANGED
|
@@ -2,9 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.serve = exports.createAdaptorServer = void 0;
|
|
4
4
|
const node_http_1 = require("node:http");
|
|
5
|
-
const globals_1 = require("./globals");
|
|
6
5
|
const listener_1 = require("./listener");
|
|
7
|
-
(0, globals_1.installGlobals)();
|
|
8
6
|
const createAdaptorServer = (options) => {
|
|
9
7
|
const fetchCallback = options.fetch;
|
|
10
8
|
const requestListener = (0, listener_1.getRequestListener)(fetchCallback);
|
package/dist/types.d.ts
CHANGED
|
@@ -1,6 +1,4 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
|
-
import type { Hono } from 'hono';
|
|
3
|
-
import type { NextApiHandler } from 'next/types';
|
|
4
2
|
import type { createServer } from 'node:http';
|
|
5
3
|
export declare type FetchCallback = (request: Request) => Promise<unknown> | unknown;
|
|
6
4
|
export declare type NextHandlerOption = {
|
|
@@ -13,6 +11,3 @@ export declare type Options = {
|
|
|
13
11
|
serverOptions?: Object;
|
|
14
12
|
createServer?: typeof createServer;
|
|
15
13
|
};
|
|
16
|
-
export interface HandleInterface {
|
|
17
|
-
<E extends Hono<any, any>>(subApp: E, path?: string): NextApiHandler;
|
|
18
|
-
}
|
package/dist/vercel.d.ts
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import type { Env, Hono } from 'hono';
|
|
3
|
+
export declare const handle: <E extends Env, S extends {}, BasePath extends string>(app: Hono<E, S, BasePath>) => (incoming: import("http").IncomingMessage, outgoing: import("http").ServerResponse<import("http").IncomingMessage>) => Promise<void>;
|
package/dist/vercel.js
ADDED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hono/node-server",
|
|
3
|
-
"version": "0.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Node.js Adapter for Hono",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -10,15 +10,15 @@
|
|
|
10
10
|
"exports": {
|
|
11
11
|
".": "./dist/index.js",
|
|
12
12
|
"./serve-static": "./dist/serve-static.js",
|
|
13
|
-
"./
|
|
13
|
+
"./vercel": "./dist/vercel.js"
|
|
14
14
|
},
|
|
15
15
|
"typesVersions": {
|
|
16
16
|
"*": {
|
|
17
17
|
"serve-static": [
|
|
18
18
|
"./dist/serve-static.d.ts"
|
|
19
19
|
],
|
|
20
|
-
"
|
|
21
|
-
"./dist/
|
|
20
|
+
"vercel": [
|
|
21
|
+
"./dist/vercel.d.ts"
|
|
22
22
|
]
|
|
23
23
|
}
|
|
24
24
|
},
|
|
@@ -39,19 +39,17 @@
|
|
|
39
39
|
"registry": "https://registry.npmjs.org",
|
|
40
40
|
"access": "public"
|
|
41
41
|
},
|
|
42
|
-
"
|
|
43
|
-
"
|
|
44
|
-
"@remix-run/web-file": "^3.0.2",
|
|
45
|
-
"@remix-run/web-stream": "^1.0.3"
|
|
42
|
+
"engines": {
|
|
43
|
+
"node": ">=18.0.0"
|
|
46
44
|
},
|
|
45
|
+
"dependencies": {},
|
|
47
46
|
"devDependencies": {
|
|
48
47
|
"@types/jest": "^29.0.1",
|
|
49
48
|
"@types/node": "^18.7.16",
|
|
50
49
|
"@types/supertest": "^2.0.12",
|
|
51
50
|
"hono": "^3.1.5",
|
|
52
51
|
"jest": "^29.0.3",
|
|
53
|
-
"
|
|
54
|
-
"np": "^7.6.2",
|
|
52
|
+
"np": "^7.7.0",
|
|
55
53
|
"rimraf": "^3.0.2",
|
|
56
54
|
"supertest": "^6.2.4",
|
|
57
55
|
"ts-jest": "^29.0.0",
|
package/dist/base64.d.ts
DELETED
package/dist/base64.js
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.btoa = exports.atob = void 0;
|
|
4
|
-
function atob(a) {
|
|
5
|
-
return Buffer.from(a, 'base64').toString('binary');
|
|
6
|
-
}
|
|
7
|
-
exports.atob = atob;
|
|
8
|
-
function btoa(b) {
|
|
9
|
-
return Buffer.from(b, 'binary').toString('base64');
|
|
10
|
-
}
|
|
11
|
-
exports.btoa = btoa;
|
package/dist/fetch.d.ts
DELETED
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
import type { Readable } from 'node:stream';
|
|
3
|
-
import { fetch as webFetch, Headers as WebHeaders, Request as WebRequest, Response as WebResponse } from '@remix-run/web-fetch';
|
|
4
|
-
export { FormData } from '@remix-run/web-fetch';
|
|
5
|
-
export { File, Blob } from '@remix-run/web-file';
|
|
6
|
-
declare type NodeHeadersInit = ConstructorParameters<typeof WebHeaders>[0];
|
|
7
|
-
declare type NodeResponseInit = NonNullable<ConstructorParameters<typeof WebResponse>[1]>;
|
|
8
|
-
declare type NodeRequestInfo = ConstructorParameters<typeof WebRequest>[0] | NodeRequest;
|
|
9
|
-
declare type NodeRequestInit = Omit<NonNullable<ConstructorParameters<typeof WebRequest>[1]>, 'body'> & {
|
|
10
|
-
body?: NonNullable<ConstructorParameters<typeof WebRequest>[1]>['body'] | Readable;
|
|
11
|
-
};
|
|
12
|
-
export type { NodeHeadersInit as HeadersInit, NodeRequestInfo as RequestInfo, NodeRequestInit as RequestInit, NodeResponseInit as ResponseInit, };
|
|
13
|
-
declare class NodeRequest extends WebRequest {
|
|
14
|
-
constructor(info: NodeRequestInfo, init?: NodeRequestInit);
|
|
15
|
-
get headers(): WebHeaders;
|
|
16
|
-
clone(): NodeRequest;
|
|
17
|
-
}
|
|
18
|
-
declare class NodeResponse extends WebResponse {
|
|
19
|
-
get headers(): WebHeaders;
|
|
20
|
-
clone(): NodeResponse;
|
|
21
|
-
}
|
|
22
|
-
export { WebHeaders as Headers, NodeRequest as Request, NodeResponse as Response };
|
|
23
|
-
export declare const fetch: typeof webFetch;
|
|
24
|
-
/**
|
|
25
|
-
* Credits:
|
|
26
|
-
* - https://github.com/remix-run/remix/blob/e77e2eb/packages/remix-node/fetch.ts
|
|
27
|
-
*/
|
package/dist/fetch.js
DELETED
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.fetch = exports.Response = exports.Request = exports.Headers = exports.Blob = exports.File = exports.FormData = void 0;
|
|
4
|
-
const web_fetch_1 = require("@remix-run/web-fetch");
|
|
5
|
-
Object.defineProperty(exports, "Headers", { enumerable: true, get: function () { return web_fetch_1.Headers; } });
|
|
6
|
-
var web_fetch_2 = require("@remix-run/web-fetch");
|
|
7
|
-
Object.defineProperty(exports, "FormData", { enumerable: true, get: function () { return web_fetch_2.FormData; } });
|
|
8
|
-
var web_file_1 = require("@remix-run/web-file");
|
|
9
|
-
Object.defineProperty(exports, "File", { enumerable: true, get: function () { return web_file_1.File; } });
|
|
10
|
-
Object.defineProperty(exports, "Blob", { enumerable: true, get: function () { return web_file_1.Blob; } });
|
|
11
|
-
class NodeRequest extends web_fetch_1.Request {
|
|
12
|
-
constructor(info, init) {
|
|
13
|
-
super(info, init);
|
|
14
|
-
}
|
|
15
|
-
get headers() {
|
|
16
|
-
return super.headers;
|
|
17
|
-
}
|
|
18
|
-
// @ts-ignore
|
|
19
|
-
clone() {
|
|
20
|
-
return new NodeRequest(this);
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
exports.Request = NodeRequest;
|
|
24
|
-
class NodeResponse extends web_fetch_1.Response {
|
|
25
|
-
get headers() {
|
|
26
|
-
return super.headers;
|
|
27
|
-
}
|
|
28
|
-
clone() {
|
|
29
|
-
return super.clone();
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
exports.Response = NodeResponse;
|
|
33
|
-
const fetch = (info, init) => {
|
|
34
|
-
init = {
|
|
35
|
-
// Disable compression handling so people can return the result of a fetch
|
|
36
|
-
// directly in the loader without messing with the Content-Encoding header.
|
|
37
|
-
compress: false,
|
|
38
|
-
...init,
|
|
39
|
-
};
|
|
40
|
-
return (0, web_fetch_1.fetch)(info, init);
|
|
41
|
-
};
|
|
42
|
-
exports.fetch = fetch;
|
|
43
|
-
/**
|
|
44
|
-
* Credits:
|
|
45
|
-
* - https://github.com/remix-run/remix/blob/e77e2eb/packages/remix-node/fetch.ts
|
|
46
|
-
*/
|
package/dist/nextjs.d.ts
DELETED
package/dist/nextjs.js
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.handle = void 0;
|
|
4
|
-
const hono_1 = require("hono");
|
|
5
|
-
const listener_1 = require("./listener");
|
|
6
|
-
// <E extends Hono<any, any>
|
|
7
|
-
const handle = (subApp, path = '/') => {
|
|
8
|
-
return (0, listener_1.getRequestListener)(new hono_1.Hono().route(path, subApp).fetch);
|
|
9
|
-
};
|
|
10
|
-
exports.handle = handle;
|
package/dist/stream.d.ts
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
import { Writable, Readable } from 'node:stream';
|
|
3
|
-
/** pipeline will assure the backpressure and reduce huge memory usage */
|
|
4
|
-
export declare function writeReadableStreamToWritable(stream: ReadableStream, writable: Writable): Promise<void>;
|
|
5
|
-
/** This implementation use nodejs Readable::fromWeb as references */
|
|
6
|
-
export declare function webReadableStreamToNodeReadable(stream: ReadableStream): Readable;
|
|
7
|
-
export declare function nodeReadableToWebReadableStream(readable: Readable): ReadableStream<Uint8Array>;
|
package/dist/stream.js
DELETED
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.nodeReadableToWebReadableStream = exports.webReadableStreamToNodeReadable = exports.writeReadableStreamToWritable = void 0;
|
|
4
|
-
const node_stream_1 = require("node:stream");
|
|
5
|
-
const node_util_1 = require("node:util");
|
|
6
|
-
const pipelinePromise = (0, node_util_1.promisify)(node_stream_1.pipeline);
|
|
7
|
-
/** pipeline will assure the backpressure and reduce huge memory usage */
|
|
8
|
-
async function writeReadableStreamToWritable(stream, writable) {
|
|
9
|
-
const readable = webReadableStreamToNodeReadable(stream);
|
|
10
|
-
return pipelinePromise(readable, writable);
|
|
11
|
-
}
|
|
12
|
-
exports.writeReadableStreamToWritable = writeReadableStreamToWritable;
|
|
13
|
-
/** This implementation use nodejs Readable::fromWeb as references */
|
|
14
|
-
function webReadableStreamToNodeReadable(stream) {
|
|
15
|
-
const reader = stream.getReader();
|
|
16
|
-
let closed = false;
|
|
17
|
-
const readable = new node_stream_1.Readable({
|
|
18
|
-
read() {
|
|
19
|
-
reader
|
|
20
|
-
.read()
|
|
21
|
-
.then(({ done, value }) => {
|
|
22
|
-
if (done) {
|
|
23
|
-
this.push(null);
|
|
24
|
-
}
|
|
25
|
-
else {
|
|
26
|
-
this.push(value);
|
|
27
|
-
}
|
|
28
|
-
})
|
|
29
|
-
.catch(e => {
|
|
30
|
-
readable.destroy(e);
|
|
31
|
-
});
|
|
32
|
-
},
|
|
33
|
-
destroy(error, callback) {
|
|
34
|
-
const done = () => {
|
|
35
|
-
try {
|
|
36
|
-
callback(error);
|
|
37
|
-
}
|
|
38
|
-
catch (err) {
|
|
39
|
-
process.nextTick(() => {
|
|
40
|
-
throw err;
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
};
|
|
44
|
-
if (!closed) {
|
|
45
|
-
reader.cancel(error).then(done, done);
|
|
46
|
-
return;
|
|
47
|
-
}
|
|
48
|
-
done();
|
|
49
|
-
},
|
|
50
|
-
});
|
|
51
|
-
reader.closed.then(() => {
|
|
52
|
-
closed = true;
|
|
53
|
-
}, error => {
|
|
54
|
-
readable.destroy(error);
|
|
55
|
-
});
|
|
56
|
-
return readable;
|
|
57
|
-
}
|
|
58
|
-
exports.webReadableStreamToNodeReadable = webReadableStreamToNodeReadable;
|
|
59
|
-
function nodeReadableToWebReadableStream(readable) {
|
|
60
|
-
if (readable.destroyed) {
|
|
61
|
-
const stream = new ReadableStream();
|
|
62
|
-
stream.cancel();
|
|
63
|
-
return stream;
|
|
64
|
-
}
|
|
65
|
-
const highWaterMark = readable.readableHighWaterMark;
|
|
66
|
-
const strategy = { highWaterMark };
|
|
67
|
-
let controller;
|
|
68
|
-
const onData = (chunk) => {
|
|
69
|
-
// Copy the Buffer to detach it from the pool.
|
|
70
|
-
if (Buffer.isBuffer(chunk)) {
|
|
71
|
-
chunk = new Uint8Array(chunk);
|
|
72
|
-
}
|
|
73
|
-
controller.enqueue(chunk);
|
|
74
|
-
if (controller.desiredSize !== null && controller.desiredSize <= 0) {
|
|
75
|
-
readable.pause();
|
|
76
|
-
}
|
|
77
|
-
};
|
|
78
|
-
readable.pause();
|
|
79
|
-
const cleanup = (0, node_stream_1.finished)(readable, error => {
|
|
80
|
-
if (error?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
|
|
81
|
-
const err = new Error(undefined, { cause: error });
|
|
82
|
-
Object.defineProperty(err, 'name', 'AbortError');
|
|
83
|
-
error = err;
|
|
84
|
-
}
|
|
85
|
-
cleanup();
|
|
86
|
-
// This is a protection against non-standard, legacy streams
|
|
87
|
-
// that happen to emit an error event again after finished is called.
|
|
88
|
-
readable.on('error', () => { });
|
|
89
|
-
if (error) {
|
|
90
|
-
return controller.error(error);
|
|
91
|
-
}
|
|
92
|
-
controller.close();
|
|
93
|
-
});
|
|
94
|
-
readable.on('data', onData);
|
|
95
|
-
return new ReadableStream({
|
|
96
|
-
start(c) {
|
|
97
|
-
controller = c;
|
|
98
|
-
},
|
|
99
|
-
pull() {
|
|
100
|
-
readable.resume();
|
|
101
|
-
},
|
|
102
|
-
cancel(reason) {
|
|
103
|
-
readable.destroy(reason);
|
|
104
|
-
},
|
|
105
|
-
}, strategy);
|
|
106
|
-
}
|
|
107
|
-
exports.nodeReadableToWebReadableStream = nodeReadableToWebReadableStream;
|