@restatedev/restate-sdk 1.10.3 → 1.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_virtual/rolldown_runtime.js +13 -0
- package/dist/common_api.cjs +1 -0
- package/dist/common_api.d.cts +1 -0
- package/dist/common_api.d.cts.map +1 -1
- package/dist/common_api.d.ts +1 -0
- package/dist/common_api.d.ts.map +1 -1
- package/dist/common_api.js +1 -0
- package/dist/common_api.js.map +1 -1
- package/dist/context_impl.cjs +20 -6
- package/dist/context_impl.d.ts +7 -5
- package/dist/context_impl.d.ts.map +1 -1
- package/dist/context_impl.js +20 -6
- package/dist/context_impl.js.map +1 -1
- package/dist/endpoint/endpoint.cjs +2 -2
- package/dist/endpoint/endpoint.js +2 -2
- package/dist/endpoint/fetch_endpoint.cjs +2 -2
- package/dist/endpoint/fetch_endpoint.js +2 -2
- package/dist/endpoint/fetch_endpoint.js.map +1 -1
- package/dist/endpoint/handlers/core_logging.cjs +52 -0
- package/dist/endpoint/handlers/core_logging.d.ts +10 -0
- package/dist/endpoint/handlers/core_logging.d.ts.map +1 -0
- package/dist/endpoint/handlers/core_logging.js +51 -0
- package/dist/endpoint/handlers/core_logging.js.map +1 -0
- package/dist/endpoint/handlers/discovery.cjs +58 -0
- package/dist/endpoint/handlers/discovery.d.ts +5 -0
- package/dist/endpoint/handlers/discovery.d.ts.map +1 -0
- package/dist/endpoint/handlers/discovery.js +59 -0
- package/dist/endpoint/handlers/discovery.js.map +1 -0
- package/dist/endpoint/handlers/fetch.cjs +23 -11
- package/dist/endpoint/handlers/fetch.d.ts +2 -2
- package/dist/endpoint/handlers/fetch.d.ts.map +1 -1
- package/dist/endpoint/handlers/fetch.js +24 -11
- package/dist/endpoint/handlers/fetch.js.map +1 -1
- package/dist/endpoint/handlers/generic.cjs +167 -248
- package/dist/endpoint/handlers/generic.d.ts +2 -58
- package/dist/endpoint/handlers/generic.d.ts.map +1 -1
- package/dist/endpoint/handlers/generic.js +166 -244
- package/dist/endpoint/handlers/generic.js.map +1 -1
- package/dist/endpoint/handlers/lambda.cjs +64 -61
- package/dist/endpoint/handlers/lambda.d.ts +2 -2
- package/dist/endpoint/handlers/lambda.d.ts.map +1 -1
- package/dist/endpoint/handlers/lambda.js +64 -60
- package/dist/endpoint/handlers/lambda.js.map +1 -1
- package/dist/endpoint/handlers/types.d.ts +41 -0
- package/dist/endpoint/handlers/types.d.ts.map +1 -0
- package/dist/endpoint/handlers/types.js +2 -0
- package/dist/endpoint/handlers/types.js.map +1 -0
- package/dist/endpoint/handlers/utils.cjs +51 -0
- package/dist/endpoint/handlers/utils.d.ts +11 -0
- package/dist/endpoint/handlers/utils.d.ts.map +1 -0
- package/dist/endpoint/handlers/utils.js +48 -0
- package/dist/endpoint/handlers/utils.js.map +1 -0
- package/dist/endpoint/handlers/vm/sdk_shared_core_wasm_bindings.cjs +283 -283
- package/dist/endpoint/handlers/vm/sdk_shared_core_wasm_bindings.d.ts +107 -107
- package/dist/endpoint/handlers/vm/sdk_shared_core_wasm_bindings.d.ts.map +1 -1
- package/dist/endpoint/handlers/vm/sdk_shared_core_wasm_bindings.js +282 -282
- package/dist/endpoint/handlers/vm/sdk_shared_core_wasm_bindings.js.map +1 -1
- package/dist/endpoint/lambda_endpoint.cjs +2 -2
- package/dist/endpoint/lambda_endpoint.js +2 -2
- package/dist/endpoint/lambda_endpoint.js.map +1 -1
- package/dist/endpoint/node_endpoint.cjs +41 -41
- package/dist/endpoint/node_endpoint.d.ts +1 -1
- package/dist/endpoint/node_endpoint.d.ts.map +1 -1
- package/dist/endpoint/node_endpoint.js +41 -40
- package/dist/endpoint/node_endpoint.js.map +1 -1
- package/dist/fetch.cjs +7 -0
- package/dist/fetch.d.cts +2 -1
- package/dist/fetch.d.cts.map +1 -1
- package/dist/fetch.d.ts +2 -1
- package/dist/fetch.d.ts.map +1 -1
- package/dist/fetch.js +2 -1
- package/dist/fetch.js.map +1 -1
- package/dist/index.cjs +7 -0
- package/dist/index.d.cts +2 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.js +2 -1
- package/dist/internal.cjs +11 -0
- package/dist/internal.d.cts +27 -0
- package/dist/internal.d.cts.map +1 -0
- package/dist/internal.d.ts +27 -0
- package/dist/internal.d.ts.map +1 -0
- package/dist/internal.js +6 -0
- package/dist/internal.js.map +1 -0
- package/dist/io.cjs +2 -2
- package/dist/io.d.ts +3 -3
- package/dist/io.d.ts.map +1 -1
- package/dist/io.js +2 -2
- package/dist/io.js.map +1 -1
- package/dist/lambda.cjs +7 -0
- package/dist/lambda.d.cts +2 -1
- package/dist/lambda.d.cts.map +1 -1
- package/dist/lambda.d.ts +2 -1
- package/dist/lambda.d.ts.map +1 -1
- package/dist/lambda.js +2 -1
- package/dist/lambda.js.map +1 -1
- package/dist/node.cjs +7 -0
- package/dist/node.d.cts +2 -1
- package/dist/node.d.cts.map +1 -1
- package/dist/node.d.ts +2 -1
- package/dist/node.d.ts.map +1 -1
- package/dist/node.js +2 -1
- package/dist/node.js.map +1 -1
- package/dist/package.cjs +1 -1
- package/dist/package.js +1 -1
- package/dist/package.js.map +1 -1
- package/dist/types/errors.cjs +2 -0
- package/dist/types/errors.d.cts +8 -0
- package/dist/types/errors.d.cts.map +1 -1
- package/dist/types/errors.d.ts +8 -0
- package/dist/types/errors.d.ts.map +1 -1
- package/dist/types/errors.js +2 -0
- package/dist/types/errors.js.map +1 -1
- package/package.json +2 -2
- package/dist/utils/streams.cjs +0 -14
- package/dist/utils/streams.d.ts +0 -3
- package/dist/utils/streams.d.ts.map +0 -1
- package/dist/utils/streams.js +0 -13
- package/dist/utils/streams.js.map +0 -1
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
|
|
2
2
|
const require_errors = require('../../types/errors.cjs');
|
|
3
3
|
const require_user_agent = require('../../user_agent.cjs');
|
|
4
|
-
const
|
|
5
|
-
const require_generic = require('./generic.cjs');
|
|
6
|
-
let node_stream_web = require("node:stream/web");
|
|
7
|
-
node_stream_web = require_rolldown_runtime.__toESM(node_stream_web);
|
|
4
|
+
const require_utils = require('./utils.cjs');
|
|
8
5
|
let node_buffer = require("node:buffer");
|
|
9
6
|
node_buffer = require_rolldown_runtime.__toESM(node_buffer);
|
|
10
7
|
let node_zlib = require("node:zlib");
|
|
@@ -18,70 +15,76 @@ var LambdaHandler = class {
|
|
|
18
15
|
this.compressionSupported = compressionSupported;
|
|
19
16
|
}
|
|
20
17
|
async handleRequest(event, context) {
|
|
21
|
-
const path = "path" in event ? event.path : event.rawPath;
|
|
22
|
-
let requestContentEncoding;
|
|
23
|
-
let requestAcceptEncoding;
|
|
24
|
-
for (const [key, value] of Object.entries(event.headers)) if (key.localeCompare("content-encoding", void 0, { sensitivity: "accent" }) === 0) requestContentEncoding = value;
|
|
25
|
-
else if (key.localeCompare("accept-encoding", void 0, { sensitivity: "accent" }) === 0) requestAcceptEncoding = value;
|
|
26
|
-
let bodyStream;
|
|
27
|
-
if (!event.body) bodyStream = null;
|
|
28
|
-
else {
|
|
29
|
-
let bodyBuffer;
|
|
30
|
-
if (event.isBase64Encoded) bodyBuffer = node_buffer.Buffer.from(event.body, "base64");
|
|
31
|
-
else bodyBuffer = node_buffer.Buffer.from(new TextEncoder().encode(event.body));
|
|
32
|
-
if (requestContentEncoding && requestContentEncoding.includes("zstd")) {
|
|
33
|
-
if (!this.compressionSupported) throw new Error("The input is compressed using zstd, but this lambda deployment doesn't support compression. Make sure to deploy the Lambda using Node > 22");
|
|
34
|
-
bodyBuffer = node_zlib.zstdDecompressSync(bodyBuffer);
|
|
35
|
-
}
|
|
36
|
-
bodyStream = require_streams.OnceStream(bodyBuffer);
|
|
37
|
-
}
|
|
38
18
|
const abortController = new AbortController();
|
|
39
|
-
const request = {
|
|
40
|
-
body: bodyStream,
|
|
41
|
-
headers: event.headers,
|
|
42
|
-
url: path,
|
|
43
|
-
extraArgs: [context],
|
|
44
|
-
abortSignal: abortController.signal
|
|
45
|
-
};
|
|
46
|
-
let response;
|
|
47
|
-
try {
|
|
48
|
-
response = await this.handler.handle(request, { AWSRequestId: context.awsRequestId });
|
|
49
|
-
} catch (e) {
|
|
50
|
-
abortController.abort();
|
|
51
|
-
throw e;
|
|
52
|
-
}
|
|
53
|
-
const chunks = [];
|
|
54
19
|
try {
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
20
|
+
const path = "path" in event ? event.path : event.rawPath;
|
|
21
|
+
let requestContentEncoding;
|
|
22
|
+
let requestAcceptEncoding;
|
|
23
|
+
for (const [key, value] of Object.entries(event.headers)) if (key.localeCompare("content-encoding", void 0, { sensitivity: "accent" }) === 0) requestContentEncoding = value;
|
|
24
|
+
else if (key.localeCompare("accept-encoding", void 0, { sensitivity: "accent" }) === 0) requestAcceptEncoding = value;
|
|
25
|
+
let inputReader;
|
|
26
|
+
if (!event.body) inputReader = require_utils.emptyInputReader();
|
|
27
|
+
else {
|
|
28
|
+
let bodyBuffer;
|
|
29
|
+
if (event.isBase64Encoded) bodyBuffer = node_buffer.Buffer.from(event.body, "base64");
|
|
30
|
+
else bodyBuffer = node_buffer.Buffer.from(new TextEncoder().encode(event.body));
|
|
31
|
+
if (requestContentEncoding && requestContentEncoding.includes("zstd")) {
|
|
32
|
+
if (!this.compressionSupported) throw new Error("The input is compressed using zstd, but this lambda deployment doesn't support compression. Make sure to deploy the Lambda using Node > 22");
|
|
33
|
+
bodyBuffer = node_zlib.zstdDecompressSync(bodyBuffer);
|
|
34
|
+
}
|
|
35
|
+
inputReader = (async function* () {
|
|
36
|
+
yield bodyBuffer;
|
|
37
|
+
})()[Symbol.asyncIterator]();
|
|
38
|
+
}
|
|
39
|
+
const chunks = [];
|
|
40
|
+
const outputWriter = {
|
|
41
|
+
write: function(value) {
|
|
42
|
+
chunks.push(value);
|
|
43
|
+
return Promise.resolve();
|
|
65
44
|
},
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
45
|
+
close: function() {
|
|
46
|
+
return Promise.resolve();
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
const response = this.handler.handle({
|
|
50
|
+
headers: event.headers,
|
|
51
|
+
url: path,
|
|
52
|
+
extraArgs: [context]
|
|
53
|
+
}, { AWSRequestId: context.awsRequestId });
|
|
54
|
+
try {
|
|
55
|
+
await response.process({
|
|
56
|
+
inputReader,
|
|
57
|
+
outputWriter,
|
|
58
|
+
abortSignal: abortController.signal
|
|
59
|
+
});
|
|
60
|
+
} catch (e) {
|
|
61
|
+
const error = require_errors.ensureError(e);
|
|
62
|
+
(require_utils.tryCreateContextualLogger(this.handler.endpoint.loggerTransport, path, event.headers) ?? this.handler.endpoint.rlog).error("Unexpected error: " + (error.stack ?? error.message));
|
|
63
|
+
return {
|
|
64
|
+
headers: {
|
|
65
|
+
"content-type": "application/json",
|
|
66
|
+
"x-restate-server": require_user_agent.X_RESTATE_SERVER
|
|
67
|
+
},
|
|
68
|
+
statusCode: 500,
|
|
69
|
+
isBase64Encoded: false,
|
|
70
|
+
body: JSON.stringify({ message: error.message })
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
const responseBodyBuffer = node_buffer.Buffer.concat(chunks);
|
|
74
|
+
let responseBody;
|
|
75
|
+
if (this.compressionSupported && responseBodyBuffer.length > RESPONSE_COMPRESSION_THRESHOLD && requestAcceptEncoding && requestAcceptEncoding.includes("zstd")) {
|
|
76
|
+
response.headers["content-encoding"] = "zstd";
|
|
77
|
+
responseBody = node_zlib.zstdCompressSync(responseBodyBuffer).toString("base64");
|
|
78
|
+
} else responseBody = responseBodyBuffer.toString("base64");
|
|
79
|
+
return {
|
|
80
|
+
headers: response.headers,
|
|
81
|
+
statusCode: response.statusCode,
|
|
82
|
+
isBase64Encoded: true,
|
|
83
|
+
body: responseBody
|
|
69
84
|
};
|
|
70
85
|
} finally {
|
|
71
86
|
abortController.abort();
|
|
72
87
|
}
|
|
73
|
-
const responseBodyBuffer = node_buffer.Buffer.concat(chunks);
|
|
74
|
-
let responseBody;
|
|
75
|
-
if (this.compressionSupported && responseBodyBuffer.length > RESPONSE_COMPRESSION_THRESHOLD && requestAcceptEncoding && requestAcceptEncoding.includes("zstd")) {
|
|
76
|
-
response.headers["content-encoding"] = "zstd";
|
|
77
|
-
responseBody = node_zlib.zstdCompressSync(responseBodyBuffer).toString("base64");
|
|
78
|
-
} else responseBody = responseBodyBuffer.toString("base64");
|
|
79
|
-
return {
|
|
80
|
-
headers: response.headers,
|
|
81
|
-
statusCode: response.statusCode,
|
|
82
|
-
isBase64Encoded: true,
|
|
83
|
-
body: responseBody
|
|
84
|
-
};
|
|
85
88
|
}
|
|
86
89
|
};
|
|
87
90
|
function isCompressionSupported() {
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import type { APIGatewayProxyEvent, APIGatewayProxyEventV2, APIGatewayProxyResult, APIGatewayProxyStructuredResultV2, Context } from "aws-lambda";
|
|
2
|
-
import
|
|
2
|
+
import { RestateHandler } from "./types.js";
|
|
3
3
|
export declare class LambdaHandler {
|
|
4
4
|
private readonly handler;
|
|
5
5
|
private readonly compressionSupported;
|
|
6
|
-
constructor(handler:
|
|
6
|
+
constructor(handler: RestateHandler, compressionSupported: boolean);
|
|
7
7
|
handleRequest(event: APIGatewayProxyEvent | APIGatewayProxyEventV2, context: Context): Promise<APIGatewayProxyResult | APIGatewayProxyStructuredResultV2>;
|
|
8
8
|
}
|
|
9
9
|
export declare function isCompressionSupported(): boolean;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lambda.d.ts","sourceRoot":"","sources":["../../../src/endpoint/handlers/lambda.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EACV,oBAAoB,EACpB,sBAAsB,EACtB,qBAAqB,EACrB,iCAAiC,EACjC,OAAO,EACR,MAAM,YAAY,CAAC;
|
|
1
|
+
{"version":3,"file":"lambda.d.ts","sourceRoot":"","sources":["../../../src/endpoint/handlers/lambda.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EACV,oBAAoB,EACpB,sBAAsB,EACtB,qBAAqB,EACrB,iCAAiC,EACjC,OAAO,EACR,MAAM,YAAY,CAAC;AAKpB,OAAO,EAA6B,cAAc,EAAE,MAAM,YAAY,CAAC;AAKvE,qBAAa,aAAa;IAEtB,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,oBAAoB;gBADpB,OAAO,EAAE,cAAc,EACvB,oBAAoB,EAAE,OAAO;IAG1C,aAAa,CACjB,KAAK,EAAE,oBAAoB,GAAG,sBAAsB,EACpD,OAAO,EAAE,OAAO,GACf,OAAO,CAAC,qBAAqB,GAAG,iCAAiC,CAAC;CA8ItE;AAED,wBAAgB,sBAAsB,YAErC"}
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import { ensureError } from "../../types/errors.js";
|
|
2
2
|
import { X_RESTATE_SERVER } from "../../user_agent.js";
|
|
3
|
-
import {
|
|
4
|
-
import { tryCreateContextualLogger } from "./generic.js";
|
|
5
|
-
import { WritableStream } from "node:stream/web";
|
|
3
|
+
import { emptyInputReader, tryCreateContextualLogger } from "./utils.js";
|
|
6
4
|
import { Buffer } from "node:buffer";
|
|
7
5
|
import * as zlib from "node:zlib";
|
|
8
6
|
|
|
@@ -14,70 +12,76 @@ var LambdaHandler = class {
|
|
|
14
12
|
this.compressionSupported = compressionSupported;
|
|
15
13
|
}
|
|
16
14
|
async handleRequest(event, context) {
|
|
17
|
-
const path = "path" in event ? event.path : event.rawPath;
|
|
18
|
-
let requestContentEncoding;
|
|
19
|
-
let requestAcceptEncoding;
|
|
20
|
-
for (const [key, value] of Object.entries(event.headers)) if (key.localeCompare("content-encoding", void 0, { sensitivity: "accent" }) === 0) requestContentEncoding = value;
|
|
21
|
-
else if (key.localeCompare("accept-encoding", void 0, { sensitivity: "accent" }) === 0) requestAcceptEncoding = value;
|
|
22
|
-
let bodyStream;
|
|
23
|
-
if (!event.body) bodyStream = null;
|
|
24
|
-
else {
|
|
25
|
-
let bodyBuffer;
|
|
26
|
-
if (event.isBase64Encoded) bodyBuffer = Buffer.from(event.body, "base64");
|
|
27
|
-
else bodyBuffer = Buffer.from(new TextEncoder().encode(event.body));
|
|
28
|
-
if (requestContentEncoding && requestContentEncoding.includes("zstd")) {
|
|
29
|
-
if (!this.compressionSupported) throw new Error("The input is compressed using zstd, but this lambda deployment doesn't support compression. Make sure to deploy the Lambda using Node > 22");
|
|
30
|
-
bodyBuffer = zlib.zstdDecompressSync(bodyBuffer);
|
|
31
|
-
}
|
|
32
|
-
bodyStream = OnceStream(bodyBuffer);
|
|
33
|
-
}
|
|
34
15
|
const abortController = new AbortController();
|
|
35
|
-
const request = {
|
|
36
|
-
body: bodyStream,
|
|
37
|
-
headers: event.headers,
|
|
38
|
-
url: path,
|
|
39
|
-
extraArgs: [context],
|
|
40
|
-
abortSignal: abortController.signal
|
|
41
|
-
};
|
|
42
|
-
let response;
|
|
43
|
-
try {
|
|
44
|
-
response = await this.handler.handle(request, { AWSRequestId: context.awsRequestId });
|
|
45
|
-
} catch (e) {
|
|
46
|
-
abortController.abort();
|
|
47
|
-
throw e;
|
|
48
|
-
}
|
|
49
|
-
const chunks = [];
|
|
50
16
|
try {
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
17
|
+
const path = "path" in event ? event.path : event.rawPath;
|
|
18
|
+
let requestContentEncoding;
|
|
19
|
+
let requestAcceptEncoding;
|
|
20
|
+
for (const [key, value] of Object.entries(event.headers)) if (key.localeCompare("content-encoding", void 0, { sensitivity: "accent" }) === 0) requestContentEncoding = value;
|
|
21
|
+
else if (key.localeCompare("accept-encoding", void 0, { sensitivity: "accent" }) === 0) requestAcceptEncoding = value;
|
|
22
|
+
let inputReader;
|
|
23
|
+
if (!event.body) inputReader = emptyInputReader();
|
|
24
|
+
else {
|
|
25
|
+
let bodyBuffer;
|
|
26
|
+
if (event.isBase64Encoded) bodyBuffer = Buffer.from(event.body, "base64");
|
|
27
|
+
else bodyBuffer = Buffer.from(new TextEncoder().encode(event.body));
|
|
28
|
+
if (requestContentEncoding && requestContentEncoding.includes("zstd")) {
|
|
29
|
+
if (!this.compressionSupported) throw new Error("The input is compressed using zstd, but this lambda deployment doesn't support compression. Make sure to deploy the Lambda using Node > 22");
|
|
30
|
+
bodyBuffer = zlib.zstdDecompressSync(bodyBuffer);
|
|
31
|
+
}
|
|
32
|
+
inputReader = (async function* () {
|
|
33
|
+
yield bodyBuffer;
|
|
34
|
+
})()[Symbol.asyncIterator]();
|
|
35
|
+
}
|
|
36
|
+
const chunks = [];
|
|
37
|
+
const outputWriter = {
|
|
38
|
+
write: function(value) {
|
|
39
|
+
chunks.push(value);
|
|
40
|
+
return Promise.resolve();
|
|
61
41
|
},
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
42
|
+
close: function() {
|
|
43
|
+
return Promise.resolve();
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
const response = this.handler.handle({
|
|
47
|
+
headers: event.headers,
|
|
48
|
+
url: path,
|
|
49
|
+
extraArgs: [context]
|
|
50
|
+
}, { AWSRequestId: context.awsRequestId });
|
|
51
|
+
try {
|
|
52
|
+
await response.process({
|
|
53
|
+
inputReader,
|
|
54
|
+
outputWriter,
|
|
55
|
+
abortSignal: abortController.signal
|
|
56
|
+
});
|
|
57
|
+
} catch (e) {
|
|
58
|
+
const error = ensureError(e);
|
|
59
|
+
(tryCreateContextualLogger(this.handler.endpoint.loggerTransport, path, event.headers) ?? this.handler.endpoint.rlog).error("Unexpected error: " + (error.stack ?? error.message));
|
|
60
|
+
return {
|
|
61
|
+
headers: {
|
|
62
|
+
"content-type": "application/json",
|
|
63
|
+
"x-restate-server": X_RESTATE_SERVER
|
|
64
|
+
},
|
|
65
|
+
statusCode: 500,
|
|
66
|
+
isBase64Encoded: false,
|
|
67
|
+
body: JSON.stringify({ message: error.message })
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
const responseBodyBuffer = Buffer.concat(chunks);
|
|
71
|
+
let responseBody;
|
|
72
|
+
if (this.compressionSupported && responseBodyBuffer.length > RESPONSE_COMPRESSION_THRESHOLD && requestAcceptEncoding && requestAcceptEncoding.includes("zstd")) {
|
|
73
|
+
response.headers["content-encoding"] = "zstd";
|
|
74
|
+
responseBody = zlib.zstdCompressSync(responseBodyBuffer).toString("base64");
|
|
75
|
+
} else responseBody = responseBodyBuffer.toString("base64");
|
|
76
|
+
return {
|
|
77
|
+
headers: response.headers,
|
|
78
|
+
statusCode: response.statusCode,
|
|
79
|
+
isBase64Encoded: true,
|
|
80
|
+
body: responseBody
|
|
65
81
|
};
|
|
66
82
|
} finally {
|
|
67
83
|
abortController.abort();
|
|
68
84
|
}
|
|
69
|
-
const responseBodyBuffer = Buffer.concat(chunks);
|
|
70
|
-
let responseBody;
|
|
71
|
-
if (this.compressionSupported && responseBodyBuffer.length > RESPONSE_COMPRESSION_THRESHOLD && requestAcceptEncoding && requestAcceptEncoding.includes("zstd")) {
|
|
72
|
-
response.headers["content-encoding"] = "zstd";
|
|
73
|
-
responseBody = zlib.zstdCompressSync(responseBodyBuffer).toString("base64");
|
|
74
|
-
} else responseBody = responseBodyBuffer.toString("base64");
|
|
75
|
-
return {
|
|
76
|
-
headers: response.headers,
|
|
77
|
-
statusCode: response.statusCode,
|
|
78
|
-
isBase64Encoded: true,
|
|
79
|
-
body: responseBody
|
|
80
|
-
};
|
|
81
85
|
}
|
|
82
86
|
};
|
|
83
87
|
function isCompressionSupported() {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lambda.js","names":["handler:
|
|
1
|
+
{"version":3,"file":"lambda.js","names":["handler: RestateHandler","compressionSupported: boolean","inputReader: InputReader","bodyBuffer: Buffer | undefined","chunks: Uint8Array[]","outputWriter: OutputWriter"],"sources":["../../../src/endpoint/handlers/lambda.ts"],"sourcesContent":["/*\n * Copyright (c) 2023-2024 - Restate Software, Inc., Restate GmbH\n *\n * This file is part of the Restate SDK for Node.js/TypeScript,\n * which is released under the MIT license.\n *\n * You can find a copy of the license in file LICENSE in the root\n * directory of this repository or package, or at\n * https://github.com/restatedev/sdk-typescript/blob/main/LICENSE\n */\n\nimport type {\n APIGatewayProxyEvent,\n APIGatewayProxyEventV2,\n APIGatewayProxyResult,\n APIGatewayProxyStructuredResultV2,\n Context,\n} from \"aws-lambda\";\nimport { Buffer } from \"node:buffer\";\nimport { X_RESTATE_SERVER } from \"../../user_agent.js\";\nimport { ensureError } from \"../../types/errors.js\";\nimport * as zlib from \"node:zlib\";\nimport { InputReader, OutputWriter, RestateHandler } from \"./types.js\";\nimport { emptyInputReader, tryCreateContextualLogger } from \"./utils.js\";\n\nconst RESPONSE_COMPRESSION_THRESHOLD = 3 * 1024 * 1024;\n\nexport class LambdaHandler {\n constructor(\n private readonly handler: RestateHandler,\n private readonly compressionSupported: boolean\n ) {}\n\n async handleRequest(\n event: APIGatewayProxyEvent | APIGatewayProxyEventV2,\n context: Context\n ): Promise<APIGatewayProxyResult | APIGatewayProxyStructuredResultV2> {\n const abortController = new AbortController();\n try {\n //\n // Request path\n //\n const path = \"path\" in event ? event.path : event.rawPath;\n\n // Deal with content-encoding\n let requestContentEncoding;\n let requestAcceptEncoding;\n for (const [key, value] of Object.entries(event.headers)) {\n if (\n key.localeCompare(\"content-encoding\", undefined, {\n sensitivity: \"accent\",\n }) === 0\n ) {\n requestContentEncoding = value;\n } else if (\n key.localeCompare(\"accept-encoding\", undefined, {\n sensitivity: \"accent\",\n }) === 0\n ) {\n requestAcceptEncoding = value;\n }\n }\n\n //\n // Convert the request body to a Uint8Array stream\n // Lambda functions receive the body as base64 encoded string\n //\n let inputReader: InputReader;\n if (!event.body) {\n inputReader = emptyInputReader();\n } else {\n let bodyBuffer: Buffer | undefined;\n if (event.isBase64Encoded) {\n bodyBuffer = Buffer.from(event.body, \"base64\");\n } else {\n bodyBuffer = Buffer.from(new TextEncoder().encode(event.body));\n }\n\n // Now decode if needed\n if (requestContentEncoding && requestContentEncoding.includes(\"zstd\")) {\n if (!this.compressionSupported) {\n throw new Error(\n \"The input is compressed using zstd, but this lambda deployment doesn't support compression. Make sure to deploy the Lambda using Node > 22\"\n );\n }\n\n // Input encoded with zstd, let's decode it!\n bodyBuffer = (\n zlib as unknown as { zstdDecompressSync: (b: Buffer) => Buffer }\n ).zstdDecompressSync(bodyBuffer);\n }\n\n // Prep the stream to pass through the endpoint handler\n // eslint-disable-next-line @typescript-eslint/require-await\n inputReader = (async function* () {\n yield bodyBuffer as Uint8Array;\n })()[Symbol.asyncIterator]();\n }\n\n const chunks: Uint8Array[] = [];\n const outputWriter: OutputWriter = {\n write: function (value: Uint8Array): Promise<void> {\n chunks.push(value);\n return Promise.resolve();\n },\n close: function (): Promise<void> {\n return Promise.resolve();\n },\n };\n\n const response = this.handler.handle(\n {\n headers: event.headers,\n url: path,\n extraArgs: [context],\n },\n {\n AWSRequestId: context.awsRequestId,\n }\n );\n\n try {\n await response.process({\n inputReader,\n outputWriter,\n abortSignal: abortController.signal,\n });\n } catch (e) {\n // handle should never throw\n const error = ensureError(e);\n const logger =\n tryCreateContextualLogger(\n this.handler.endpoint.loggerTransport,\n path,\n event.headers\n ) ?? this.handler.endpoint.rlog;\n logger.error(\"Unexpected error: \" + (error.stack ?? error.message));\n return {\n headers: {\n \"content-type\": \"application/json\",\n \"x-restate-server\": X_RESTATE_SERVER,\n },\n statusCode: 500,\n isBase64Encoded: false,\n body: JSON.stringify({ message: error.message }),\n };\n }\n\n const responseBodyBuffer = Buffer.concat(chunks);\n let responseBody;\n\n // Now let's encode if we need to.\n if (\n this.compressionSupported &&\n responseBodyBuffer.length > RESPONSE_COMPRESSION_THRESHOLD &&\n requestAcceptEncoding &&\n requestAcceptEncoding.includes(\"zstd\")\n ) {\n response.headers[\"content-encoding\"] = \"zstd\";\n\n responseBody = (\n zlib as unknown as { zstdCompressSync: (b: Buffer) => Buffer }\n )\n .zstdCompressSync(responseBodyBuffer)\n .toString(\"base64\");\n } else {\n responseBody = responseBodyBuffer.toString(\"base64\");\n }\n return {\n headers: response.headers,\n statusCode: response.statusCode,\n isBase64Encoded: true,\n body: responseBody,\n };\n } finally {\n abortController.abort();\n }\n }\n}\n\nexport function isCompressionSupported() {\n return \"zstdDecompressSync\" in zlib && \"zstdCompressSync\" in zlib;\n}\n"],"mappings":";;;;;;;AAyBA,MAAM,iCAAiC,IAAI,OAAO;AAElD,IAAa,gBAAb,MAA2B;CACzB,YACE,AAAiBA,SACjB,AAAiBC,sBACjB;EAFiB;EACA;;CAGnB,MAAM,cACJ,OACA,SACoE;EACpE,MAAM,kBAAkB,IAAI,iBAAiB;AAC7C,MAAI;GAIF,MAAM,OAAO,UAAU,QAAQ,MAAM,OAAO,MAAM;GAGlD,IAAI;GACJ,IAAI;AACJ,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,MAAM,QAAQ,CACtD,KACE,IAAI,cAAc,oBAAoB,QAAW,EAC/C,aAAa,UACd,CAAC,KAAK,EAEP,0BAAyB;YAEzB,IAAI,cAAc,mBAAmB,QAAW,EAC9C,aAAa,UACd,CAAC,KAAK,EAEP,yBAAwB;GAQ5B,IAAIC;AACJ,OAAI,CAAC,MAAM,KACT,eAAc,kBAAkB;QAC3B;IACL,IAAIC;AACJ,QAAI,MAAM,gBACR,cAAa,OAAO,KAAK,MAAM,MAAM,SAAS;QAE9C,cAAa,OAAO,KAAK,IAAI,aAAa,CAAC,OAAO,MAAM,KAAK,CAAC;AAIhE,QAAI,0BAA0B,uBAAuB,SAAS,OAAO,EAAE;AACrE,SAAI,CAAC,KAAK,qBACR,OAAM,IAAI,MACR,6IACD;AAIH,kBACE,KACA,mBAAmB,WAAW;;AAKlC,mBAAe,mBAAmB;AAChC,WAAM;QACJ,CAAC,OAAO,gBAAgB;;GAG9B,MAAMC,SAAuB,EAAE;GAC/B,MAAMC,eAA6B;IACjC,OAAO,SAAU,OAAkC;AACjD,YAAO,KAAK,MAAM;AAClB,YAAO,QAAQ,SAAS;;IAE1B,OAAO,WAA2B;AAChC,YAAO,QAAQ,SAAS;;IAE3B;GAED,MAAM,WAAW,KAAK,QAAQ,OAC5B;IACE,SAAS,MAAM;IACf,KAAK;IACL,WAAW,CAAC,QAAQ;IACrB,EACD,EACE,cAAc,QAAQ,cACvB,CACF;AAED,OAAI;AACF,UAAM,SAAS,QAAQ;KACrB;KACA;KACA,aAAa,gBAAgB;KAC9B,CAAC;YACK,GAAG;IAEV,MAAM,QAAQ,YAAY,EAAE;AAO5B,KALE,0BACE,KAAK,QAAQ,SAAS,iBACtB,MACA,MAAM,QACP,IAAI,KAAK,QAAQ,SAAS,MACtB,MAAM,wBAAwB,MAAM,SAAS,MAAM,SAAS;AACnE,WAAO;KACL,SAAS;MACP,gBAAgB;MAChB,oBAAoB;MACrB;KACD,YAAY;KACZ,iBAAiB;KACjB,MAAM,KAAK,UAAU,EAAE,SAAS,MAAM,SAAS,CAAC;KACjD;;GAGH,MAAM,qBAAqB,OAAO,OAAO,OAAO;GAChD,IAAI;AAGJ,OACE,KAAK,wBACL,mBAAmB,SAAS,kCAC5B,yBACA,sBAAsB,SAAS,OAAO,EACtC;AACA,aAAS,QAAQ,sBAAsB;AAEvC,mBACE,KAEC,iBAAiB,mBAAmB,CACpC,SAAS,SAAS;SAErB,gBAAe,mBAAmB,SAAS,SAAS;AAEtD,UAAO;IACL,SAAS,SAAS;IAClB,YAAY,SAAS;IACrB,iBAAiB;IACjB,MAAM;IACP;YACO;AACR,mBAAgB,OAAO;;;;AAK7B,SAAgB,yBAAyB;AACvC,QAAO,wBAAwB,QAAQ,sBAAsB"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import type { Endpoint } from "../endpoint.js";
|
|
2
|
+
export interface Headers {
|
|
3
|
+
[name: string]: string | string[] | undefined;
|
|
4
|
+
}
|
|
5
|
+
export interface ResponseHeaders {
|
|
6
|
+
[name: string]: string;
|
|
7
|
+
}
|
|
8
|
+
export interface AdditionalContext {
|
|
9
|
+
[name: string]: string;
|
|
10
|
+
}
|
|
11
|
+
export interface RestateRequest {
|
|
12
|
+
readonly url: string;
|
|
13
|
+
readonly headers: Headers;
|
|
14
|
+
readonly extraArgs: unknown[];
|
|
15
|
+
}
|
|
16
|
+
export type InputReaderNextResult = {
|
|
17
|
+
done: false | undefined;
|
|
18
|
+
value: Uint8Array;
|
|
19
|
+
} | {
|
|
20
|
+
done: true;
|
|
21
|
+
value: undefined;
|
|
22
|
+
};
|
|
23
|
+
export type InputReader = AsyncIterator<Uint8Array>;
|
|
24
|
+
export interface OutputWriter {
|
|
25
|
+
write(value: Uint8Array): Promise<void>;
|
|
26
|
+
close(): Promise<void>;
|
|
27
|
+
}
|
|
28
|
+
export interface RestateResponse {
|
|
29
|
+
readonly headers: ResponseHeaders;
|
|
30
|
+
readonly statusCode: number;
|
|
31
|
+
process(value: {
|
|
32
|
+
inputReader: InputReader;
|
|
33
|
+
outputWriter: OutputWriter;
|
|
34
|
+
abortSignal: AbortSignal;
|
|
35
|
+
}): Promise<void>;
|
|
36
|
+
}
|
|
37
|
+
export interface RestateHandler {
|
|
38
|
+
endpoint: Endpoint;
|
|
39
|
+
handle(request: RestateRequest, context?: AdditionalContext): RestateResponse;
|
|
40
|
+
}
|
|
41
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/endpoint/handlers/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAE/C,MAAM,WAAW,OAAO;IACtB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,CAAC;CAC/C;AAED,MAAM,WAAW,eAAe;IAC9B,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,SAAS,EAAE,OAAO,EAAE,CAAC;CAC/B;AAED,MAAM,MAAM,qBAAqB,GAC7B;IAAE,IAAI,EAAE,KAAK,GAAG,SAAS,CAAC;IAAC,KAAK,EAAE,UAAU,CAAA;CAAE,GAC9C;IAAE,IAAI,EAAE,IAAI,CAAC;IAAC,KAAK,EAAE,SAAS,CAAA;CAAE,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;AAEpD,MAAM,WAAW,YAAY;IAE3B,KAAK,CAAC,KAAK,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACxC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACxB;AAED,MAAM,WAAW,eAAe;IAC9B,QAAQ,CAAC,OAAO,EAAE,eAAe,CAAC;IAClC,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAK5B,OAAO,CAAC,KAAK,EAAE;QACb,WAAW,EAAE,WAAW,CAAC;QACzB,YAAY,EAAE,YAAY,CAAC;QAC3B,WAAW,EAAE,WAAW,CAAC;KAC1B,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CACnB;AAED,MAAM,WAAW,cAAc;IAE7B,QAAQ,EAAE,QAAQ,CAAC;IAEnB,MAAM,CAAC,OAAO,EAAE,cAAc,EAAE,OAAO,CAAC,EAAE,iBAAiB,GAAG,eAAe,CAAC;CAC/E"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/endpoint/handlers/types.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
const require_logger_transport = require('../../logging/logger_transport.cjs');
|
|
2
|
+
const require_components = require('../components.cjs');
|
|
3
|
+
const require_logger = require('../../logging/logger.cjs');
|
|
4
|
+
const require_user_agent = require('../../user_agent.cjs');
|
|
5
|
+
|
|
6
|
+
//#region src/endpoint/handlers/utils.ts
|
|
7
|
+
function tryCreateContextualLogger(loggerTransport, url, headers, additionalContext) {
|
|
8
|
+
try {
|
|
9
|
+
const path = new URL(url, "https://example.com").pathname;
|
|
10
|
+
const parsed = require_components.parseUrlComponents(path);
|
|
11
|
+
if (parsed.type !== "invoke") return;
|
|
12
|
+
const invocationId = invocationIdFromHeaders(headers);
|
|
13
|
+
return require_logger.createLogger(loggerTransport, require_logger_transport.LogSource.SYSTEM, new require_logger_transport.LoggerContext(invocationId, parsed.componentName, parsed.handlerName, void 0, void 0, additionalContext));
|
|
14
|
+
} catch {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
function invocationIdFromHeaders(headers) {
|
|
19
|
+
const invocationIdHeader = headers["x-restate-invocation-id"];
|
|
20
|
+
return typeof invocationIdHeader === "string" ? invocationIdHeader : Array.isArray(invocationIdHeader) ? invocationIdHeader[0] ?? "unknown id" : "unknown id";
|
|
21
|
+
}
|
|
22
|
+
function errorResponse(code, message) {
|
|
23
|
+
return simpleResponse(code, {
|
|
24
|
+
"content-type": "application/json",
|
|
25
|
+
"x-restate-server": require_user_agent.X_RESTATE_SERVER
|
|
26
|
+
}, new TextEncoder().encode(JSON.stringify({ message })));
|
|
27
|
+
}
|
|
28
|
+
function simpleResponse(statusCode, headers, body) {
|
|
29
|
+
return {
|
|
30
|
+
headers,
|
|
31
|
+
statusCode,
|
|
32
|
+
async process({ inputReader, outputWriter }) {
|
|
33
|
+
if (inputReader !== void 0) while (true) {
|
|
34
|
+
const { done } = await inputReader.next();
|
|
35
|
+
if (done) break;
|
|
36
|
+
}
|
|
37
|
+
await outputWriter.write(body);
|
|
38
|
+
await outputWriter.close();
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
function emptyInputReader() {
|
|
43
|
+
return (async function* () {})()[Symbol.asyncIterator]();
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
//#endregion
|
|
47
|
+
exports.emptyInputReader = emptyInputReader;
|
|
48
|
+
exports.errorResponse = errorResponse;
|
|
49
|
+
exports.invocationIdFromHeaders = invocationIdFromHeaders;
|
|
50
|
+
exports.simpleResponse = simpleResponse;
|
|
51
|
+
exports.tryCreateContextualLogger = tryCreateContextualLogger;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { LoggerTransport } from "../../logging/logger_transport.js";
|
|
2
|
+
import type { Headers, InputReader, ResponseHeaders, RestateResponse } from "./types.js";
|
|
3
|
+
import { Logger } from "../../logging/logger.js";
|
|
4
|
+
export declare function tryCreateContextualLogger(loggerTransport: LoggerTransport, url: string, headers: Headers, additionalContext?: {
|
|
5
|
+
[name: string]: string;
|
|
6
|
+
}): Logger | undefined;
|
|
7
|
+
export declare function invocationIdFromHeaders(headers: Headers): string;
|
|
8
|
+
export declare function errorResponse(code: number, message: string): RestateResponse;
|
|
9
|
+
export declare function simpleResponse(statusCode: number, headers: ResponseHeaders, body: Uint8Array): RestateResponse;
|
|
10
|
+
export declare function emptyInputReader(): InputReader;
|
|
11
|
+
//# sourceMappingURL=utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../src/endpoint/handlers/utils.ts"],"names":[],"mappings":"AAAA,OAAO,EAGL,eAAe,EAChB,MAAM,mCAAmC,CAAC;AAC3C,OAAO,KAAK,EACV,OAAO,EACP,WAAW,EACX,eAAe,EACf,eAAe,EAChB,MAAM,YAAY,CAAC;AACpB,OAAO,EAAgB,MAAM,EAAE,MAAM,yBAAyB,CAAC;AAI/D,wBAAgB,yBAAyB,CACvC,eAAe,EAAE,eAAe,EAChC,GAAG,EAAE,MAAM,EACX,OAAO,EAAE,OAAO,EAChB,iBAAiB,CAAC,EAAE;IAAE,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,GAC7C,MAAM,GAAG,SAAS,CAuBpB;AAED,wBAAgB,uBAAuB,CAAC,OAAO,EAAE,OAAO,UASvD;AAED,wBAAgB,aAAa,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,eAAe,CAS5E;AAED,wBAAgB,cAAc,CAC5B,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,eAAe,EACxB,IAAI,EAAE,UAAU,GACf,eAAe,CAkBjB;AAED,wBAAgB,gBAAgB,IAAI,WAAW,CAE9C"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { LogSource, LoggerContext } from "../../logging/logger_transport.js";
|
|
2
|
+
import { parseUrlComponents } from "../components.js";
|
|
3
|
+
import { createLogger } from "../../logging/logger.js";
|
|
4
|
+
import { X_RESTATE_SERVER } from "../../user_agent.js";
|
|
5
|
+
|
|
6
|
+
//#region src/endpoint/handlers/utils.ts
|
|
7
|
+
function tryCreateContextualLogger(loggerTransport, url, headers, additionalContext) {
|
|
8
|
+
try {
|
|
9
|
+
const path = new URL(url, "https://example.com").pathname;
|
|
10
|
+
const parsed = parseUrlComponents(path);
|
|
11
|
+
if (parsed.type !== "invoke") return;
|
|
12
|
+
const invocationId = invocationIdFromHeaders(headers);
|
|
13
|
+
return createLogger(loggerTransport, LogSource.SYSTEM, new LoggerContext(invocationId, parsed.componentName, parsed.handlerName, void 0, void 0, additionalContext));
|
|
14
|
+
} catch {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
function invocationIdFromHeaders(headers) {
|
|
19
|
+
const invocationIdHeader = headers["x-restate-invocation-id"];
|
|
20
|
+
return typeof invocationIdHeader === "string" ? invocationIdHeader : Array.isArray(invocationIdHeader) ? invocationIdHeader[0] ?? "unknown id" : "unknown id";
|
|
21
|
+
}
|
|
22
|
+
function errorResponse(code, message) {
|
|
23
|
+
return simpleResponse(code, {
|
|
24
|
+
"content-type": "application/json",
|
|
25
|
+
"x-restate-server": X_RESTATE_SERVER
|
|
26
|
+
}, new TextEncoder().encode(JSON.stringify({ message })));
|
|
27
|
+
}
|
|
28
|
+
function simpleResponse(statusCode, headers, body) {
|
|
29
|
+
return {
|
|
30
|
+
headers,
|
|
31
|
+
statusCode,
|
|
32
|
+
async process({ inputReader, outputWriter }) {
|
|
33
|
+
if (inputReader !== void 0) while (true) {
|
|
34
|
+
const { done } = await inputReader.next();
|
|
35
|
+
if (done) break;
|
|
36
|
+
}
|
|
37
|
+
await outputWriter.write(body);
|
|
38
|
+
await outputWriter.close();
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
function emptyInputReader() {
|
|
43
|
+
return (async function* () {})()[Symbol.asyncIterator]();
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
//#endregion
|
|
47
|
+
export { emptyInputReader, errorResponse, invocationIdFromHeaders, simpleResponse, tryCreateContextualLogger };
|
|
48
|
+
//# sourceMappingURL=utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils.js","names":[],"sources":["../../../src/endpoint/handlers/utils.ts"],"sourcesContent":["import {\n LoggerContext,\n LogSource,\n LoggerTransport,\n} from \"../../logging/logger_transport.js\";\nimport type {\n Headers,\n InputReader,\n ResponseHeaders,\n RestateResponse,\n} from \"./types.js\";\nimport { createLogger, Logger } from \"../../logging/logger.js\";\nimport { parseUrlComponents } from \"../components.js\";\nimport { X_RESTATE_SERVER } from \"../../user_agent.js\";\n\nexport function tryCreateContextualLogger(\n loggerTransport: LoggerTransport,\n url: string,\n headers: Headers,\n additionalContext?: { [name: string]: string }\n): Logger | undefined {\n try {\n const path = new URL(url, \"https://example.com\").pathname;\n const parsed = parseUrlComponents(path);\n if (parsed.type !== \"invoke\") {\n return undefined;\n }\n const invocationId = invocationIdFromHeaders(headers);\n return createLogger(\n loggerTransport,\n LogSource.SYSTEM,\n new LoggerContext(\n invocationId,\n parsed.componentName,\n parsed.handlerName,\n undefined,\n undefined,\n additionalContext\n )\n );\n } catch {\n return undefined;\n }\n}\n\nexport function invocationIdFromHeaders(headers: Headers) {\n const invocationIdHeader = headers[\"x-restate-invocation-id\"];\n const invocationId =\n typeof invocationIdHeader === \"string\"\n ? invocationIdHeader\n : Array.isArray(invocationIdHeader)\n ? (invocationIdHeader[0] ?? \"unknown id\")\n : \"unknown id\";\n return invocationId;\n}\n\nexport function errorResponse(code: number, message: string): RestateResponse {\n return simpleResponse(\n code,\n {\n \"content-type\": \"application/json\",\n \"x-restate-server\": X_RESTATE_SERVER,\n },\n new TextEncoder().encode(JSON.stringify({ message }))\n );\n}\n\nexport function simpleResponse(\n statusCode: number,\n headers: ResponseHeaders,\n body: Uint8Array\n): RestateResponse {\n return {\n headers,\n statusCode,\n async process({ inputReader, outputWriter }): Promise<void> {\n if (inputReader !== undefined) {\n // Drain the input stream\n while (true) {\n const { done } = await inputReader.next();\n if (done) break;\n }\n }\n\n await outputWriter.write(body);\n // This closes both the writer and the stream!!!\n await outputWriter.close();\n },\n };\n}\n\nexport function emptyInputReader(): InputReader {\n return (async function* () {})()[Symbol.asyncIterator]();\n}\n"],"mappings":";;;;;;AAeA,SAAgB,0BACd,iBACA,KACA,SACA,mBACoB;AACpB,KAAI;EACF,MAAM,OAAO,IAAI,IAAI,KAAK,sBAAsB,CAAC;EACjD,MAAM,SAAS,mBAAmB,KAAK;AACvC,MAAI,OAAO,SAAS,SAClB;EAEF,MAAM,eAAe,wBAAwB,QAAQ;AACrD,SAAO,aACL,iBACA,UAAU,QACV,IAAI,cACF,cACA,OAAO,eACP,OAAO,aACP,QACA,QACA,kBACD,CACF;SACK;AACN;;;AAIJ,SAAgB,wBAAwB,SAAkB;CACxD,MAAM,qBAAqB,QAAQ;AAOnC,QALE,OAAO,uBAAuB,WAC1B,qBACA,MAAM,QAAQ,mBAAmB,GAC9B,mBAAmB,MAAM,eAC1B;;AAIV,SAAgB,cAAc,MAAc,SAAkC;AAC5E,QAAO,eACL,MACA;EACE,gBAAgB;EAChB,oBAAoB;EACrB,EACD,IAAI,aAAa,CAAC,OAAO,KAAK,UAAU,EAAE,SAAS,CAAC,CAAC,CACtD;;AAGH,SAAgB,eACd,YACA,SACA,MACiB;AACjB,QAAO;EACL;EACA;EACA,MAAM,QAAQ,EAAE,aAAa,gBAA+B;AAC1D,OAAI,gBAAgB,OAElB,QAAO,MAAM;IACX,MAAM,EAAE,SAAS,MAAM,YAAY,MAAM;AACzC,QAAI,KAAM;;AAId,SAAM,aAAa,MAAM,KAAK;AAE9B,SAAM,aAAa,OAAO;;EAE7B;;AAGH,SAAgB,mBAAgC;AAC9C,SAAQ,mBAAmB,KAAK,CAAC,OAAO,gBAAgB"}
|