@atproto/pds 0.4.59 → 0.4.61
Sign up to get free protection for your applications and to get access to all the features.
- package/CHANGELOG.md +48 -0
- package/dist/account-manager/helpers/account.d.ts +1 -0
- package/dist/account-manager/helpers/account.d.ts.map +1 -1
- package/dist/account-manager/helpers/account.js +15 -1
- package/dist/account-manager/helpers/account.js.map +1 -1
- package/dist/account-manager/helpers/invite.d.ts +1 -1
- package/dist/account-manager/helpers/invite.d.ts.map +1 -1
- package/dist/account-manager/helpers/invite.js +20 -9
- package/dist/account-manager/helpers/invite.js.map +1 -1
- package/dist/account-manager/index.d.ts +2 -0
- package/dist/account-manager/index.d.ts.map +1 -1
- package/dist/account-manager/index.js +8 -1
- package/dist/account-manager/index.js.map +1 -1
- package/dist/api/app/bsky/actor/getProfile.d.ts.map +1 -1
- package/dist/api/app/bsky/actor/getProfile.js +2 -9
- package/dist/api/app/bsky/actor/getProfile.js.map +1 -1
- package/dist/api/app/bsky/actor/getProfiles.d.ts.map +1 -1
- package/dist/api/app/bsky/actor/getProfiles.js +2 -6
- package/dist/api/app/bsky/actor/getProfiles.js.map +1 -1
- package/dist/api/app/bsky/feed/getActorLikes.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getActorLikes.js +2 -9
- package/dist/api/app/bsky/feed/getActorLikes.js.map +1 -1
- package/dist/api/app/bsky/feed/getAuthorFeed.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getAuthorFeed.js +2 -9
- package/dist/api/app/bsky/feed/getAuthorFeed.js.map +1 -1
- package/dist/api/app/bsky/feed/getFeed.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getFeed.js +2 -1
- package/dist/api/app/bsky/feed/getFeed.js.map +1 -1
- package/dist/api/app/bsky/feed/getPostThread.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getPostThread.js +12 -14
- package/dist/api/app/bsky/feed/getPostThread.js.map +1 -1
- package/dist/api/app/bsky/feed/getTimeline.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getTimeline.js +2 -6
- package/dist/api/app/bsky/feed/getTimeline.js.map +1 -1
- package/dist/api/com/atproto/admin/getAccountInfo.d.ts.map +1 -1
- package/dist/api/com/atproto/admin/getAccountInfo.js +6 -14
- package/dist/api/com/atproto/admin/getAccountInfo.js.map +1 -1
- package/dist/api/com/atproto/admin/getAccountInfos.d.ts +4 -0
- package/dist/api/com/atproto/admin/getAccountInfos.d.ts.map +1 -0
- package/dist/api/com/atproto/admin/getAccountInfos.js +32 -0
- package/dist/api/com/atproto/admin/getAccountInfos.js.map +1 -0
- package/dist/api/com/atproto/admin/index.d.ts.map +1 -1
- package/dist/api/com/atproto/admin/index.js +2 -0
- package/dist/api/com/atproto/admin/index.js.map +1 -1
- package/dist/api/com/atproto/admin/util.d.ts +17 -0
- package/dist/api/com/atproto/admin/util.d.ts.map +1 -1
- package/dist/api/com/atproto/admin/util.js +27 -1
- package/dist/api/com/atproto/admin/util.js.map +1 -1
- package/dist/api/com/atproto/repo/getRecord.d.ts.map +1 -1
- package/dist/api/com/atproto/repo/getRecord.js +2 -2
- package/dist/api/com/atproto/repo/getRecord.js.map +1 -1
- package/dist/api/com/atproto/server/requestPasswordReset.js +1 -1
- package/dist/api/com/atproto/server/requestPasswordReset.js.map +1 -1
- package/dist/config/config.d.ts +17 -0
- package/dist/config/config.d.ts.map +1 -1
- package/dist/config/config.js +11 -1
- package/dist/config/config.js.map +1 -1
- package/dist/config/env.d.ts +7 -1
- package/dist/config/env.d.ts.map +1 -1
- package/dist/config/env.js +9 -1
- package/dist/config/env.js.map +1 -1
- package/dist/context.d.ts +6 -2
- package/dist/context.d.ts.map +1 -1
- package/dist/context.js +55 -11
- package/dist/context.js.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -1
- package/dist/lexicon/index.d.ts +4 -0
- package/dist/lexicon/index.d.ts.map +1 -1
- package/dist/lexicon/index.js +8 -0
- package/dist/lexicon/index.js.map +1 -1
- package/dist/lexicon/lexicons.d.ts +118 -0
- package/dist/lexicon/lexicons.d.ts.map +1 -1
- package/dist/lexicon/lexicons.js +135 -3
- package/dist/lexicon/lexicons.js.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/defs.d.ts +2 -0
- package/dist/lexicon/types/app/bsky/actor/defs.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/defs.js.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/profile.d.ts +1 -0
- package/dist/lexicon/types/app/bsky/actor/profile.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/profile.js.map +1 -1
- package/dist/lexicon/types/app/bsky/feed/defs.d.ts +13 -2
- package/dist/lexicon/types/app/bsky/feed/defs.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/feed/defs.js +21 -1
- package/dist/lexicon/types/app/bsky/feed/defs.js.map +1 -1
- package/dist/lexicon/types/app/bsky/feed/getAuthorFeed.d.ts +1 -0
- package/dist/lexicon/types/app/bsky/feed/getAuthorFeed.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.d.ts +2 -0
- package/dist/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.d.ts +2 -0
- package/dist/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.d.ts.map +1 -1
- package/dist/lexicon/types/com/atproto/repo/getRecord.d.ts +1 -0
- package/dist/lexicon/types/com/atproto/repo/getRecord.d.ts.map +1 -1
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.d.ts +39 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.d.ts.map +1 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.js +3 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.js.map +1 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.d.ts +39 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.d.ts.map +1 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.js +3 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.js.map +1 -0
- package/dist/mailer/index.d.ts +1 -1
- package/dist/mailer/index.d.ts.map +1 -1
- package/dist/mailer/index.js.map +1 -1
- package/dist/mailer/templates/confirm-email.js +1 -1
- package/dist/mailer/templates/confirm-email.js.map +2 -2
- package/dist/mailer/templates/delete-account.js +1 -1
- package/dist/mailer/templates/delete-account.js.map +2 -2
- package/dist/mailer/templates/plc-operation.js +1 -1
- package/dist/mailer/templates/plc-operation.js.map +2 -2
- package/dist/mailer/templates/reset-password.js +1 -1
- package/dist/mailer/templates/reset-password.js.map +2 -2
- package/dist/mailer/templates/update-email.js +1 -1
- package/dist/mailer/templates/update-email.js.map +2 -2
- package/dist/pipethrough.d.ts +26 -26
- package/dist/pipethrough.d.ts.map +1 -1
- package/dist/pipethrough.js +360 -228
- package/dist/pipethrough.js.map +1 -1
- package/dist/read-after-write/util.d.ts +13 -5
- package/dist/read-after-write/util.d.ts.map +1 -1
- package/dist/read-after-write/util.js +37 -22
- package/dist/read-after-write/util.js.map +1 -1
- package/package.json +15 -14
- package/src/account-manager/helpers/account.ts +22 -0
- package/src/account-manager/helpers/invite.ts +19 -9
- package/src/account-manager/index.ts +13 -1
- package/src/api/app/bsky/actor/getProfile.ts +3 -17
- package/src/api/app/bsky/actor/getProfiles.ts +3 -15
- package/src/api/app/bsky/feed/getActorLikes.ts +3 -19
- package/src/api/app/bsky/feed/getAuthorFeed.ts +3 -17
- package/src/api/app/bsky/feed/getFeed.ts +3 -1
- package/src/api/app/bsky/feed/getPostThread.ts +16 -23
- package/src/api/app/bsky/feed/getTimeline.ts +3 -14
- package/src/api/com/atproto/admin/getAccountInfo.ts +6 -13
- package/src/api/com/atproto/admin/getAccountInfos.ts +33 -0
- package/src/api/com/atproto/admin/index.ts +2 -0
- package/src/api/com/atproto/admin/util.ts +38 -0
- package/src/api/com/atproto/repo/getRecord.ts +5 -2
- package/src/api/com/atproto/server/requestPasswordReset.ts +1 -1
- package/src/config/config.ts +31 -1
- package/src/config/env.ts +22 -2
- package/src/context.ts +62 -17
- package/src/index.ts +1 -0
- package/src/lexicon/index.ts +24 -0
- package/src/lexicon/lexicons.ts +137 -3
- package/src/lexicon/types/app/bsky/actor/defs.ts +2 -0
- package/src/lexicon/types/app/bsky/actor/profile.ts +1 -0
- package/src/lexicon/types/app/bsky/feed/defs.ts +38 -2
- package/src/lexicon/types/app/bsky/feed/getAuthorFeed.ts +1 -0
- package/src/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.ts +2 -0
- package/src/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.ts +2 -0
- package/src/lexicon/types/com/atproto/repo/getRecord.ts +1 -0
- package/src/lexicon/types/tools/ozone/moderation/getRecords.ts +50 -0
- package/src/lexicon/types/tools/ozone/moderation/getRepos.ts +50 -0
- package/src/mailer/index.ts +1 -1
- package/src/mailer/templates/confirm-email.hbs +106 -336
- package/src/mailer/templates/delete-account.hbs +110 -346
- package/src/mailer/templates/plc-operation.hbs +107 -338
- package/src/mailer/templates/reset-password.d.ts +1 -1
- package/src/mailer/templates/reset-password.hbs +108 -344
- package/src/mailer/templates/update-email.hbs +107 -337
- package/src/pipethrough.ts +528 -233
- package/src/read-after-write/util.ts +58 -32
- package/tests/account-deletion.test.ts +1 -1
- package/tests/account.test.ts +2 -2
- package/tests/email-confirmation.test.ts +2 -2
- package/tests/plc-operations.test.ts +1 -1
- package/tests/proxied/proxy-catchall.test.ts +255 -0
- package/tests/proxied/proxy-header.test.ts +31 -1
- package/tests/proxied/read-after-write.test.ts +77 -0
package/dist/pipethrough.js
CHANGED
@@ -1,234 +1,403 @@
|
|
1
1
|
"use strict";
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
-
if (k2 === undefined) k2 = k;
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
-
}
|
8
|
-
Object.defineProperty(o, k2, desc);
|
9
|
-
}) : (function(o, m, k, k2) {
|
10
|
-
if (k2 === undefined) k2 = k;
|
11
|
-
o[k2] = m[k];
|
12
|
-
}));
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
-
}) : function(o, v) {
|
16
|
-
o["default"] = v;
|
17
|
-
});
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
19
|
-
if (mod && mod.__esModule) return mod;
|
20
|
-
var result = {};
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
22
|
-
__setModuleDefault(result, mod);
|
23
|
-
return result;
|
24
|
-
};
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
27
|
-
};
|
28
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
-
exports.
|
30
|
-
const
|
31
|
-
const node_net_1 = __importDefault(require("node:net"));
|
32
|
-
const node_stream_1 = __importDefault(require("node:stream"));
|
33
|
-
const lexicon_1 = require("@atproto/lexicon");
|
34
|
-
const xrpc_server_1 = require("@atproto/xrpc-server");
|
35
|
-
const xrpc_1 = require("@atproto/xrpc");
|
3
|
+
exports.PROTECTED_METHODS = exports.PRIVILEGED_METHODS = exports.asPipeThroughBuffer = exports.bufferUpstreamResponse = exports.isJsonContentType = exports.parseProxyHeader = exports.pipethrough = exports.proxyHandler = void 0;
|
4
|
+
const node_stream_1 = require("node:stream");
|
36
5
|
const common_1 = require("@atproto/common");
|
6
|
+
const xrpc_1 = require("@atproto/xrpc");
|
7
|
+
const xrpc_server_1 = require("@atproto/xrpc-server");
|
37
8
|
const lexicons_1 = require("./lexicon/lexicons");
|
38
9
|
const logger_1 = require("./logger");
|
39
10
|
const proxyHandler = (ctx) => {
|
40
11
|
const accessStandard = ctx.authVerifier.accessStandard();
|
41
12
|
return async (req, res, next) => {
|
13
|
+
// /!\ Hot path
|
42
14
|
try {
|
43
|
-
|
15
|
+
if (req.method !== 'GET' &&
|
16
|
+
req.method !== 'HEAD' &&
|
17
|
+
req.method !== 'POST') {
|
18
|
+
throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.InvalidRequest, 'XRPC requests only supports GET and POST');
|
19
|
+
}
|
20
|
+
const body = req.method === 'POST' ? req : undefined;
|
21
|
+
if (body != null && !body.readable) {
|
22
|
+
// Body was already consumed by a previous middleware
|
23
|
+
throw new xrpc_server_1.InternalServerError('Request body is not readable');
|
24
|
+
}
|
25
|
+
const lxm = (0, xrpc_server_1.parseReqNsid)(req);
|
26
|
+
if (exports.PROTECTED_METHODS.has(lxm)) {
|
27
|
+
throw new xrpc_server_1.InvalidRequestError('Bad token method', 'InvalidToken');
|
28
|
+
}
|
44
29
|
const auth = await accessStandard({ req, res });
|
45
|
-
if (exports.
|
46
|
-
(!auth.credentials.isPrivileged && exports.PRIVILEGED_METHODS.has(nsid))) {
|
30
|
+
if (!auth.credentials.isPrivileged && exports.PRIVILEGED_METHODS.has(lxm)) {
|
47
31
|
throw new xrpc_server_1.InvalidRequestError('Bad token method', 'InvalidToken');
|
48
32
|
}
|
49
|
-
const
|
50
|
-
|
51
|
-
|
52
|
-
|
33
|
+
const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm);
|
34
|
+
const headers = {
|
35
|
+
'accept-encoding': req.headers['accept-encoding'] || 'identity',
|
36
|
+
'accept-language': req.headers['accept-language'],
|
37
|
+
'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
|
38
|
+
'x-bsky-topics': req.headers['x-bsky-topics'],
|
39
|
+
'content-type': body && req.headers['content-type'],
|
40
|
+
'content-encoding': body && req.headers['content-encoding'],
|
41
|
+
'content-length': body && req.headers['content-length'],
|
42
|
+
authorization: auth.credentials.did
|
43
|
+
? `Bearer ${await ctx.serviceAuthJwt(auth.credentials.did, aud, lxm)}`
|
44
|
+
: undefined,
|
45
|
+
};
|
46
|
+
const dispatchOptions = {
|
47
|
+
origin,
|
48
|
+
method: req.method,
|
49
|
+
path: req.originalUrl,
|
50
|
+
body,
|
51
|
+
headers,
|
52
|
+
};
|
53
|
+
await pipethroughStream(ctx, dispatchOptions, (upstream) => {
|
54
|
+
res.status(upstream.statusCode);
|
55
|
+
for (const [name, val] of responseHeaders(upstream.headers)) {
|
56
|
+
res.setHeader(name, val);
|
57
|
+
}
|
58
|
+
// Note that we should not need to manually handle errors here (e.g. by
|
59
|
+
// destroying the response), as the http server will handle them for us.
|
60
|
+
res.on('error', logResponseError);
|
61
|
+
// Tell undici to write the upstream response directly to the response
|
62
|
+
return res;
|
53
63
|
});
|
54
|
-
const body = node_stream_1.default.Readable.toWeb(req);
|
55
|
-
const reqInit = formatReqInit(req, headers, body);
|
56
|
-
const proxyRes = await (0, exports.makeRequest)(url, reqInit);
|
57
|
-
await (0, exports.pipeProxyRes)(proxyRes, res);
|
58
64
|
}
|
59
65
|
catch (err) {
|
60
|
-
|
66
|
+
next(err);
|
61
67
|
}
|
62
|
-
return next();
|
63
68
|
};
|
64
69
|
};
|
65
70
|
exports.proxyHandler = proxyHandler;
|
66
|
-
const
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
}
|
71
|
+
const ACCEPT_ENCODING_COMPRESSED = [
|
72
|
+
['gzip', { q: 1.0 }],
|
73
|
+
['deflate', { q: 0.9 }],
|
74
|
+
['br', { q: 0.8 }],
|
75
|
+
['identity', { q: 0.1 }],
|
76
|
+
];
|
77
|
+
const ACCEPT_ENCODING_UNCOMPRESSED = [
|
78
|
+
['identity', { q: 1.0 }],
|
79
|
+
['gzip', { q: 0.3 }],
|
80
|
+
['deflate', { q: 0.2 }],
|
81
|
+
['br', { q: 0.1 }],
|
82
|
+
];
|
83
|
+
async function pipethrough(ctx, req, options) {
|
84
|
+
if (req.method !== 'GET' && req.method !== 'HEAD') {
|
85
|
+
// pipethrough() is used from within xrpcServer handlers, which means that
|
86
|
+
// the request body either has been parsed or is a readable stream that has
|
87
|
+
// been piped for decoding & size limiting. Because of this, forwarding the
|
88
|
+
// request body requires re-encoding it. Since we currently do not use
|
89
|
+
// pipethrough() with procedures, proxying of request body is not
|
90
|
+
// implemented.
|
91
|
+
throw new xrpc_server_1.InternalServerError(`Proxying of ${req.method} requests is not supported`);
|
92
|
+
}
|
93
|
+
const lxm = (0, xrpc_server_1.parseReqNsid)(req);
|
94
|
+
const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm);
|
95
|
+
const dispatchOptions = {
|
96
|
+
origin,
|
97
|
+
method: req.method,
|
98
|
+
path: req.originalUrl,
|
99
|
+
headers: {
|
100
|
+
'accept-language': req.headers['accept-language'],
|
101
|
+
'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
|
102
|
+
'x-bsky-topics': req.headers['x-bsky-topics'],
|
103
|
+
// Because we sometimes need to interpret the response (e.g. during
|
104
|
+
// read-after-write, through asPipeThroughBuffer()), we need to ask the
|
105
|
+
// upstream server for an encoding that both the requester and the PDS can
|
106
|
+
// understand. Since we might have to do the decoding ourselves, we will
|
107
|
+
// use our own preferences (and weight) to negotiate the encoding.
|
108
|
+
'accept-encoding': negotiateContentEncoding(req.headers['accept-encoding'], ctx.cfg.proxy.preferCompressed
|
109
|
+
? ACCEPT_ENCODING_COMPRESSED
|
110
|
+
: ACCEPT_ENCODING_UNCOMPRESSED),
|
111
|
+
authorization: options?.iss
|
112
|
+
? `Bearer ${await ctx.serviceAuthJwt(options.iss, options.aud ?? aud, options.lxm ?? lxm)}`
|
113
|
+
: undefined,
|
114
|
+
},
|
115
|
+
// Use a high water mark to buffer more data while performing async
|
116
|
+
// operations before this stream is consumed. This is especially useful
|
117
|
+
// while processing read-after-write operations.
|
118
|
+
highWaterMark: 2 * 65536, // twice the default (64KiB)
|
119
|
+
};
|
120
|
+
const { headers, body } = await pipethroughRequest(ctx, dispatchOptions);
|
121
|
+
return {
|
122
|
+
encoding: safeString(headers['content-type']) ?? 'application/json',
|
123
|
+
headers: Object.fromEntries(responseHeaders(headers)),
|
124
|
+
stream: body,
|
125
|
+
};
|
126
|
+
}
|
74
127
|
exports.pipethrough = pipethrough;
|
75
|
-
const pipethroughProcedure = async (ctx, req, requester, body) => {
|
76
|
-
const { url, aud, nsid: lxm } = await (0, exports.formatUrlAndAud)(ctx, req);
|
77
|
-
const headers = await (0, exports.formatHeaders)(ctx, req, { aud, lxm, requester });
|
78
|
-
const encodedBody = body
|
79
|
-
? new TextEncoder().encode((0, lexicon_1.stringifyLex)(body))
|
80
|
-
: undefined;
|
81
|
-
const reqInit = formatReqInit(req, headers, encodedBody);
|
82
|
-
const res = await (0, exports.makeRequest)(url, reqInit);
|
83
|
-
return (0, exports.parseProxyRes)(res);
|
84
|
-
};
|
85
|
-
exports.pipethroughProcedure = pipethroughProcedure;
|
86
128
|
// Request setup/formatting
|
87
129
|
// -------------------
|
88
|
-
|
89
|
-
|
90
|
-
'
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
130
|
+
async function parseProxyInfo(ctx, req, lxm) {
|
131
|
+
// /!\ Hot path
|
132
|
+
const proxyToHeader = req.header('atproto-proxy');
|
133
|
+
if (proxyToHeader)
|
134
|
+
return (0, exports.parseProxyHeader)(ctx, proxyToHeader);
|
135
|
+
const defaultProxy = defaultService(ctx, lxm);
|
136
|
+
if (defaultProxy)
|
137
|
+
return defaultProxy;
|
138
|
+
throw new xrpc_server_1.InvalidRequestError(`No service configured for ${lxm}`);
|
139
|
+
}
|
140
|
+
const parseProxyHeader = async (
|
141
|
+
// Using subset of AppContext for testing purposes
|
142
|
+
ctx, proxyTo) => {
|
143
|
+
// /!\ Hot path
|
144
|
+
const hashIndex = proxyTo.indexOf('#');
|
145
|
+
if (hashIndex === 0) {
|
146
|
+
throw new xrpc_server_1.InvalidRequestError('no did specified in proxy header');
|
102
147
|
}
|
103
|
-
|
104
|
-
|
105
|
-
throw new xrpc_server_1.InvalidRequestError(`Invalid service url: ${url.toString()}`);
|
148
|
+
if (hashIndex === -1 || hashIndex === proxyTo.length - 1) {
|
149
|
+
throw new xrpc_server_1.InvalidRequestError('no service id specified in proxy header');
|
106
150
|
}
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
const formatHeaders = async (ctx, req, opts) => {
|
111
|
-
const { aud, lxm, requester } = opts;
|
112
|
-
const headers = requester
|
113
|
-
? (await ctx.serviceAuthHeaders(requester, aud, lxm)).headers
|
114
|
-
: {};
|
115
|
-
// forward select headers to upstream services
|
116
|
-
for (const header of REQ_HEADERS_TO_FORWARD) {
|
117
|
-
const val = req.headers[header];
|
118
|
-
if (val) {
|
119
|
-
headers[header] = val;
|
120
|
-
}
|
151
|
+
// More than one hash
|
152
|
+
if (proxyTo.indexOf('#', hashIndex + 1) !== -1) {
|
153
|
+
throw new xrpc_server_1.InvalidRequestError('invalid proxy header format');
|
121
154
|
}
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
const formatReqInit = (req, headers, body) => {
|
126
|
-
if (req.method === 'GET') {
|
127
|
-
return {
|
128
|
-
method: 'get',
|
129
|
-
headers,
|
130
|
-
};
|
131
|
-
}
|
132
|
-
else if (req.method === 'HEAD') {
|
133
|
-
return {
|
134
|
-
method: 'head',
|
135
|
-
headers,
|
136
|
-
};
|
137
|
-
}
|
138
|
-
else if (req.method === 'POST') {
|
139
|
-
return {
|
140
|
-
method: 'post',
|
141
|
-
headers,
|
142
|
-
body,
|
143
|
-
duplex: 'half',
|
144
|
-
};
|
145
|
-
}
|
146
|
-
else {
|
147
|
-
throw new xrpc_server_1.InvalidRequestError('Method not found');
|
148
|
-
}
|
149
|
-
};
|
150
|
-
const parseProxyHeader = async (ctx, req) => {
|
151
|
-
const proxyTo = req.header('atproto-proxy');
|
152
|
-
if (!proxyTo)
|
153
|
-
return;
|
154
|
-
const [did, serviceId] = proxyTo.split('#');
|
155
|
-
if (!serviceId) {
|
156
|
-
throw new xrpc_server_1.InvalidRequestError('no service id specified');
|
155
|
+
// Basic validation
|
156
|
+
if (proxyTo.includes(' ')) {
|
157
|
+
throw new xrpc_server_1.InvalidRequestError('proxy header cannot contain spaces');
|
157
158
|
}
|
159
|
+
const did = proxyTo.slice(0, hashIndex);
|
158
160
|
const didDoc = await ctx.idResolver.did.resolve(did);
|
159
161
|
if (!didDoc) {
|
160
162
|
throw new xrpc_server_1.InvalidRequestError('could not resolve proxy did');
|
161
163
|
}
|
162
|
-
const
|
163
|
-
|
164
|
+
const serviceId = proxyTo.slice(hashIndex);
|
165
|
+
const url = (0, common_1.getServiceEndpoint)(didDoc, { id: serviceId });
|
166
|
+
if (!url) {
|
164
167
|
throw new xrpc_server_1.InvalidRequestError('could not resolve proxy did service url');
|
165
168
|
}
|
166
|
-
return { did,
|
169
|
+
return { did, url };
|
167
170
|
};
|
168
171
|
exports.parseProxyHeader = parseProxyHeader;
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
172
|
+
/**
|
173
|
+
* Utility function that wraps the undici stream() function and handles request
|
174
|
+
* and response errors by wrapping them in XRPCError instances. This function is
|
175
|
+
* more efficient than "pipethroughRequest" when a writable stream to pipe the
|
176
|
+
* upstream response to is available.
|
177
|
+
*/
|
178
|
+
async function pipethroughStream(ctx, dispatchOptions, successStreamFactory) {
|
179
|
+
return new Promise((resolve, reject) => {
|
180
|
+
void ctx.proxyAgent
|
181
|
+
.stream(dispatchOptions, (upstream) => {
|
182
|
+
if (upstream.statusCode >= 400) {
|
183
|
+
const passThrough = new node_stream_1.PassThrough();
|
184
|
+
void tryParsingError(upstream.headers, passThrough).then((parsed) => {
|
185
|
+
const xrpcError = new xrpc_1.XRPCError(upstream.statusCode === 500
|
186
|
+
? xrpc_1.ResponseType.UpstreamFailure
|
187
|
+
: upstream.statusCode, parsed.error, parsed.message, Object.fromEntries(responseHeaders(upstream.headers, false)), { cause: dispatchOptions });
|
188
|
+
reject(xrpcError);
|
189
|
+
}, reject);
|
190
|
+
return passThrough;
|
191
|
+
}
|
192
|
+
const writable = successStreamFactory(upstream);
|
193
|
+
// As soon as the control was passed to the writable stream (i.e. by
|
194
|
+
// returning the writable hereafter), pipethroughStream() is considered
|
195
|
+
// to have succeeded. Any error occurring while writing upstream data to
|
196
|
+
// the writable stream should be handled through the stream's error
|
197
|
+
// state (i.e. successStreamFactory() must ensure that error events on
|
198
|
+
// the returned writable will be handled).
|
199
|
+
resolve();
|
200
|
+
return writable;
|
201
|
+
})
|
202
|
+
// The following catch block will be triggered with either network errors
|
203
|
+
// or writable stream errors. In the latter case, the promise will already
|
204
|
+
// be resolved, and reject()ing it there after will have no effect. Those
|
205
|
+
// error would still be logged by the successStreamFactory() function.
|
206
|
+
.catch(handleUpstreamRequestError)
|
207
|
+
.catch(reject);
|
208
|
+
});
|
209
|
+
}
|
210
|
+
/**
|
211
|
+
* Utility function that wraps the undici request() function and handles request
|
212
|
+
* and response errors by wrapping them in XRPCError instances.
|
213
|
+
*/
|
214
|
+
async function pipethroughRequest(ctx, dispatchOptions) {
|
215
|
+
// HandlerPipeThroughStream requires a readable stream to be returned, so we
|
216
|
+
// use the (less efficient) request() function instead.
|
217
|
+
const upstream = await ctx.proxyAgent
|
218
|
+
.request(dispatchOptions)
|
219
|
+
.catch(handleUpstreamRequestError);
|
220
|
+
if (upstream.statusCode >= 400) {
|
221
|
+
const parsed = await tryParsingError(upstream.headers, upstream.body);
|
222
|
+
// Note "XRPCClientError" is used instead of "XRPCServerError" in order to
|
223
|
+
// allow users of this function to capture & handle these errors (namely in
|
224
|
+
// "app.bsky.feed.getPostThread").
|
225
|
+
throw new xrpc_1.XRPCError(upstream.statusCode === 500
|
226
|
+
? xrpc_1.ResponseType.UpstreamFailure
|
227
|
+
: upstream.statusCode, parsed.error, parsed.message, Object.fromEntries(responseHeaders(upstream.headers, false)), { cause: dispatchOptions });
|
179
228
|
}
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
229
|
+
return upstream;
|
230
|
+
}
|
231
|
+
function handleUpstreamRequestError(err, message = 'pipethrough network error') {
|
232
|
+
logger_1.httpLogger.warn({ err }, message);
|
233
|
+
throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure, message, undefined, {
|
234
|
+
cause: err,
|
235
|
+
});
|
236
|
+
}
|
237
|
+
// accept-encoding defaults to "identity with lowest priority"
|
238
|
+
const ACCEPT_ENC_DEFAULT = ['identity', { q: 0.001 }];
|
239
|
+
const ACCEPT_FORBID_STAR = ['*', { q: 0 }];
|
240
|
+
function negotiateContentEncoding(acceptHeader, preferences) {
|
241
|
+
const acceptMap = Object.fromEntries(parseAcceptEncoding(acceptHeader));
|
242
|
+
// Make sure the default (identity) is covered by the preferences
|
243
|
+
if (!preferences.some(coversIdentityAccept)) {
|
244
|
+
preferences = [...preferences, ACCEPT_ENC_DEFAULT];
|
185
245
|
}
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
'atproto-repo-rev',
|
195
|
-
'atproto-content-labelers',
|
196
|
-
];
|
197
|
-
const pipeProxyRes = async (upstreamRes, ownRes) => {
|
198
|
-
for (const headerName of RES_HEADERS_TO_FORWARD) {
|
199
|
-
const headerVal = upstreamRes.headers.get(headerName);
|
200
|
-
if (headerVal) {
|
201
|
-
ownRes.setHeader(headerName, headerVal);
|
246
|
+
const common = preferences.filter(([name]) => {
|
247
|
+
const acceptQ = (acceptMap[name] ?? acceptMap['*'])?.q;
|
248
|
+
// Per HTTP/1.1, "identity" is always acceptable unless explicitly rejected
|
249
|
+
if (name === 'identity') {
|
250
|
+
return acceptQ == null || acceptQ > 0;
|
251
|
+
}
|
252
|
+
else {
|
253
|
+
return acceptQ != null && acceptQ > 0;
|
202
254
|
}
|
255
|
+
});
|
256
|
+
// Since "identity" was present in the preferences, a missing "identity" in
|
257
|
+
// the common array means that the client explicitly rejected it. Let's reflect
|
258
|
+
// this by adding it to the common array.
|
259
|
+
if (!common.some(coversIdentityAccept)) {
|
260
|
+
common.push(ACCEPT_FORBID_STAR);
|
261
|
+
}
|
262
|
+
// If no common encodings are acceptable, throw a 406 Not Acceptable error
|
263
|
+
if (!common.some(isAllowedAccept)) {
|
264
|
+
throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.NotAcceptable, 'this service does not support any of the requested encodings');
|
265
|
+
}
|
266
|
+
return formatAcceptHeader(common);
|
267
|
+
}
|
268
|
+
function coversIdentityAccept([name]) {
|
269
|
+
return name === 'identity' || name === '*';
|
270
|
+
}
|
271
|
+
function isAllowedAccept([, flags]) {
|
272
|
+
return flags.q > 0;
|
273
|
+
}
|
274
|
+
/**
|
275
|
+
* @see {@link https://developer.mozilla.org/en-US/docs/Glossary/Quality_values}
|
276
|
+
*/
|
277
|
+
function formatAcceptHeader(accept) {
|
278
|
+
return accept.map(formatAcceptPart).join(',');
|
279
|
+
}
|
280
|
+
function formatAcceptPart([name, flags]) {
|
281
|
+
return `${name};q=${flags.q}`;
|
282
|
+
}
|
283
|
+
function parseAcceptEncoding(acceptEncodings) {
|
284
|
+
if (!acceptEncodings?.length)
|
285
|
+
return [];
|
286
|
+
return Array.isArray(acceptEncodings)
|
287
|
+
? acceptEncodings.flatMap(parseAcceptEncoding)
|
288
|
+
: acceptEncodings.split(',').map(parseAcceptEncodingDefinition);
|
289
|
+
}
|
290
|
+
function parseAcceptEncodingDefinition(def) {
|
291
|
+
const { length, 0: encoding, 1: params } = def.trim().split(';', 3);
|
292
|
+
if (length > 2) {
|
293
|
+
throw new xrpc_server_1.InvalidRequestError(`Invalid accept-encoding: "${def}"`);
|
203
294
|
}
|
204
|
-
if (
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
295
|
+
if (!encoding || encoding.includes('=')) {
|
296
|
+
throw new xrpc_server_1.InvalidRequestError(`Invalid accept-encoding: "${def}"`);
|
297
|
+
}
|
298
|
+
const flags = { q: 1 };
|
299
|
+
if (length === 2) {
|
300
|
+
const { length, 0: key, 1: value } = params.split('=', 3);
|
301
|
+
if (length !== 2) {
|
302
|
+
throw new xrpc_server_1.InvalidRequestError(`Invalid accept-encoding: "${def}"`);
|
303
|
+
}
|
304
|
+
if (key === 'q' || key === 'Q') {
|
305
|
+
const q = parseFloat(value);
|
306
|
+
if (q === 0 || (Number.isFinite(q) && q <= 1 && q >= 0.001)) {
|
307
|
+
flags.q = q;
|
308
|
+
}
|
309
|
+
else {
|
310
|
+
throw new xrpc_server_1.InvalidRequestError(`Invalid accept-encoding: "${def}"`);
|
311
|
+
}
|
209
312
|
}
|
210
313
|
else {
|
211
|
-
|
314
|
+
throw new xrpc_server_1.InvalidRequestError(`Invalid accept-encoding: "${def}"`);
|
212
315
|
}
|
213
|
-
ownRes.status(200);
|
214
|
-
const resStream = node_stream_1.default.Readable.fromWeb(upstreamRes.body);
|
215
|
-
await node_stream_1.default.promises.pipeline(resStream, ownRes);
|
216
316
|
}
|
217
|
-
|
218
|
-
|
317
|
+
return [encoding.toLowerCase(), flags];
|
318
|
+
}
|
319
|
+
function isJsonContentType(contentType) {
|
320
|
+
if (!contentType)
|
321
|
+
return undefined;
|
322
|
+
return /application\/(?:\w+\+)?json/i.test(contentType);
|
323
|
+
}
|
324
|
+
exports.isJsonContentType = isJsonContentType;
|
325
|
+
async function tryParsingError(headers, readable) {
|
326
|
+
if (isJsonContentType(headers['content-type']) === false) {
|
327
|
+
// We don't known how to parse non JSON content types so we can discard the
|
328
|
+
// whole response.
|
329
|
+
//
|
330
|
+
// @NOTE we could also simply "drain" the stream here. This would prevent
|
331
|
+
// the upstream HTTP/1.1 connection from getting destroyed (closed). This
|
332
|
+
// would however imply to read the whole upstream response, which would be
|
333
|
+
// costly in terms of bandwidth and I/O processing. It is recommended to use
|
334
|
+
// HTTP/2 to avoid this issue (be able to destroy a single response stream
|
335
|
+
// without resetting the whole connection). This is not expected to happen
|
336
|
+
// too much as 4xx and 5xx responses are expected to be JSON.
|
337
|
+
readable.destroy();
|
338
|
+
return {};
|
219
339
|
}
|
220
|
-
|
221
|
-
|
222
|
-
const
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
};
|
231
|
-
|
340
|
+
try {
|
341
|
+
const buffer = await bufferUpstreamResponse(readable, headers['content-encoding']);
|
342
|
+
const errInfo = JSON.parse(buffer.toString('utf8'));
|
343
|
+
return {
|
344
|
+
error: safeString(errInfo?.['error']),
|
345
|
+
message: safeString(errInfo?.['message']),
|
346
|
+
};
|
347
|
+
}
|
348
|
+
catch (err) {
|
349
|
+
// Failed to read, decode, buffer or parse. No big deal.
|
350
|
+
return {};
|
351
|
+
}
|
352
|
+
}
|
353
|
+
async function bufferUpstreamResponse(readable, contentEncoding) {
|
354
|
+
try {
|
355
|
+
// Needed for type-safety (should never happen irl)
|
356
|
+
if (Array.isArray(contentEncoding)) {
|
357
|
+
throw new TypeError('upstream service returned multiple content-encoding headers');
|
358
|
+
}
|
359
|
+
return await (0, common_1.streamToNodeBuffer)((0, common_1.decodeStream)(readable, contentEncoding));
|
360
|
+
}
|
361
|
+
catch (err) {
|
362
|
+
if (!readable.destroyed)
|
363
|
+
readable.destroy();
|
364
|
+
throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure, err instanceof TypeError ? err.message : 'unable to decode request body', undefined, { cause: err });
|
365
|
+
}
|
366
|
+
}
|
367
|
+
exports.bufferUpstreamResponse = bufferUpstreamResponse;
|
368
|
+
async function asPipeThroughBuffer(input) {
|
369
|
+
return {
|
370
|
+
buffer: await bufferUpstreamResponse(input.stream, input.headers?.['content-encoding']),
|
371
|
+
headers: (0, common_1.omit)(input.headers, ['content-encoding', 'content-length']),
|
372
|
+
encoding: input.encoding,
|
373
|
+
};
|
374
|
+
}
|
375
|
+
exports.asPipeThroughBuffer = asPipeThroughBuffer;
|
376
|
+
// Response parsing/forwarding
|
377
|
+
// -------------------
|
378
|
+
const RES_HEADERS_TO_FORWARD = ['atproto-repo-rev', 'atproto-content-labelers'];
|
379
|
+
function* responseHeaders(headers, includeContentHeaders = true) {
|
380
|
+
if (includeContentHeaders) {
|
381
|
+
const length = headers['content-length'];
|
382
|
+
if (length)
|
383
|
+
yield ['content-length', length];
|
384
|
+
const encoding = headers['content-encoding'];
|
385
|
+
if (encoding)
|
386
|
+
yield ['content-encoding', encoding];
|
387
|
+
const type = headers['content-type'];
|
388
|
+
if (type)
|
389
|
+
yield ['content-type', type];
|
390
|
+
const language = headers['content-language'];
|
391
|
+
if (language)
|
392
|
+
yield ['content-language', language];
|
393
|
+
}
|
394
|
+
for (let i = 0; i < RES_HEADERS_TO_FORWARD.length; i++) {
|
395
|
+
const name = RES_HEADERS_TO_FORWARD[i];
|
396
|
+
const val = headers[name];
|
397
|
+
if (typeof val === 'string')
|
398
|
+
yield [name, val];
|
399
|
+
}
|
400
|
+
}
|
232
401
|
// Utils
|
233
402
|
// -------------------
|
234
403
|
exports.PRIVILEGED_METHODS = new Set([
|
@@ -292,47 +461,10 @@ const defaultService = (ctx, nsid) => {
|
|
292
461
|
return ctx.cfg.bskyAppView;
|
293
462
|
}
|
294
463
|
};
|
295
|
-
const parseRes = (nsid, res) => {
|
296
|
-
const buffer = new Uint8Array(res.buffer);
|
297
|
-
const json = safeParseJson(ui8.toString(buffer, 'utf8'));
|
298
|
-
const lex = json && (0, lexicon_1.jsonToLex)(json);
|
299
|
-
return lexicons_1.lexicons.assertValidXrpcOutput(nsid, lex);
|
300
|
-
};
|
301
|
-
exports.parseRes = parseRes;
|
302
|
-
const readArrayBufferRes = async (res) => {
|
303
|
-
try {
|
304
|
-
return await res.arrayBuffer();
|
305
|
-
}
|
306
|
-
catch (err) {
|
307
|
-
logger_1.httpLogger.warn({ err }, 'pipethrough network error');
|
308
|
-
throw new xrpc_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure);
|
309
|
-
}
|
310
|
-
};
|
311
|
-
const isSafeUrl = (url) => {
|
312
|
-
if (url.protocol !== 'https:')
|
313
|
-
return false;
|
314
|
-
if (!url.hostname || url.hostname === 'localhost')
|
315
|
-
return false;
|
316
|
-
if (node_net_1.default.isIP(url.hostname) !== 0)
|
317
|
-
return false;
|
318
|
-
return true;
|
319
|
-
};
|
320
464
|
const safeString = (str) => {
|
321
465
|
return typeof str === 'string' ? str : undefined;
|
322
466
|
};
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
}
|
327
|
-
catch {
|
328
|
-
return null;
|
329
|
-
}
|
330
|
-
};
|
331
|
-
const simpleHeaders = (headers) => {
|
332
|
-
const result = {};
|
333
|
-
for (const [key, val] of headers) {
|
334
|
-
result[key] = val;
|
335
|
-
}
|
336
|
-
return result;
|
337
|
-
};
|
467
|
+
function logResponseError(err) {
|
468
|
+
logger_1.httpLogger.warn({ err }, 'error forwarding upstream response');
|
469
|
+
}
|
338
470
|
//# sourceMappingURL=pipethrough.js.map
|