@atproto/pds 0.4.58 → 0.4.60

Sign up to get free protection for your applications and to get access to all the features.
Files changed (130) hide show
  1. package/CHANGELOG.md +46 -0
  2. package/dist/actor-store/repo/sql-repo-reader.d.ts.map +1 -1
  3. package/dist/actor-store/repo/sql-repo-reader.js +2 -5
  4. package/dist/actor-store/repo/sql-repo-reader.js.map +1 -1
  5. package/dist/api/app/bsky/actor/getProfile.d.ts.map +1 -1
  6. package/dist/api/app/bsky/actor/getProfile.js +2 -9
  7. package/dist/api/app/bsky/actor/getProfile.js.map +1 -1
  8. package/dist/api/app/bsky/actor/getProfiles.d.ts.map +1 -1
  9. package/dist/api/app/bsky/actor/getProfiles.js +2 -6
  10. package/dist/api/app/bsky/actor/getProfiles.js.map +1 -1
  11. package/dist/api/app/bsky/feed/getActorLikes.d.ts.map +1 -1
  12. package/dist/api/app/bsky/feed/getActorLikes.js +2 -9
  13. package/dist/api/app/bsky/feed/getActorLikes.js.map +1 -1
  14. package/dist/api/app/bsky/feed/getAuthorFeed.d.ts.map +1 -1
  15. package/dist/api/app/bsky/feed/getAuthorFeed.js +2 -9
  16. package/dist/api/app/bsky/feed/getAuthorFeed.js.map +1 -1
  17. package/dist/api/app/bsky/feed/getFeed.d.ts.map +1 -1
  18. package/dist/api/app/bsky/feed/getFeed.js +2 -1
  19. package/dist/api/app/bsky/feed/getFeed.js.map +1 -1
  20. package/dist/api/app/bsky/feed/getPostThread.d.ts.map +1 -1
  21. package/dist/api/app/bsky/feed/getPostThread.js +12 -14
  22. package/dist/api/app/bsky/feed/getPostThread.js.map +1 -1
  23. package/dist/api/app/bsky/feed/getTimeline.d.ts.map +1 -1
  24. package/dist/api/app/bsky/feed/getTimeline.js +2 -6
  25. package/dist/api/app/bsky/feed/getTimeline.js.map +1 -1
  26. package/dist/api/com/atproto/repo/getRecord.js +1 -1
  27. package/dist/api/com/atproto/repo/getRecord.js.map +1 -1
  28. package/dist/api/com/atproto/server/requestPasswordReset.js +1 -1
  29. package/dist/api/com/atproto/server/requestPasswordReset.js.map +1 -1
  30. package/dist/config/config.d.ts +9 -0
  31. package/dist/config/config.d.ts.map +1 -1
  32. package/dist/config/config.js +10 -1
  33. package/dist/config/config.js.map +1 -1
  34. package/dist/config/env.d.ts +6 -1
  35. package/dist/config/env.d.ts.map +1 -1
  36. package/dist/config/env.js +8 -1
  37. package/dist/config/env.js.map +1 -1
  38. package/dist/context.d.ts +6 -2
  39. package/dist/context.d.ts.map +1 -1
  40. package/dist/context.js +55 -11
  41. package/dist/context.js.map +1 -1
  42. package/dist/index.d.ts.map +1 -1
  43. package/dist/index.js +1 -0
  44. package/dist/index.js.map +1 -1
  45. package/dist/lexicon/lexicons.d.ts +77 -0
  46. package/dist/lexicon/lexicons.d.ts.map +1 -1
  47. package/dist/lexicon/lexicons.js +86 -3
  48. package/dist/lexicon/lexicons.js.map +1 -1
  49. package/dist/lexicon/types/app/bsky/actor/defs.d.ts +16 -0
  50. package/dist/lexicon/types/app/bsky/actor/defs.d.ts.map +1 -1
  51. package/dist/lexicon/types/app/bsky/actor/defs.js +9 -1
  52. package/dist/lexicon/types/app/bsky/actor/defs.js.map +1 -1
  53. package/dist/lexicon/types/app/bsky/actor/profile.d.ts +1 -0
  54. package/dist/lexicon/types/app/bsky/actor/profile.d.ts.map +1 -1
  55. package/dist/lexicon/types/app/bsky/actor/profile.js.map +1 -1
  56. package/dist/lexicon/types/app/bsky/feed/defs.d.ts +13 -2
  57. package/dist/lexicon/types/app/bsky/feed/defs.d.ts.map +1 -1
  58. package/dist/lexicon/types/app/bsky/feed/defs.js +21 -1
  59. package/dist/lexicon/types/app/bsky/feed/defs.js.map +1 -1
  60. package/dist/lexicon/types/app/bsky/feed/getAuthorFeed.d.ts +1 -0
  61. package/dist/lexicon/types/app/bsky/feed/getAuthorFeed.d.ts.map +1 -1
  62. package/dist/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.d.ts +2 -0
  63. package/dist/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.d.ts.map +1 -1
  64. package/dist/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.d.ts +2 -0
  65. package/dist/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.d.ts.map +1 -1
  66. package/dist/lexicon/types/tools/ozone/moderation/defs.d.ts +2 -0
  67. package/dist/lexicon/types/tools/ozone/moderation/defs.d.ts.map +1 -1
  68. package/dist/lexicon/types/tools/ozone/moderation/defs.js.map +1 -1
  69. package/dist/lexicon/types/tools/ozone/moderation/queryStatuses.d.ts +3 -0
  70. package/dist/lexicon/types/tools/ozone/moderation/queryStatuses.d.ts.map +1 -1
  71. package/dist/mailer/index.d.ts +1 -1
  72. package/dist/mailer/index.d.ts.map +1 -1
  73. package/dist/mailer/index.js.map +1 -1
  74. package/dist/mailer/templates/confirm-email.js +1 -1
  75. package/dist/mailer/templates/confirm-email.js.map +2 -2
  76. package/dist/mailer/templates/delete-account.js +1 -1
  77. package/dist/mailer/templates/delete-account.js.map +2 -2
  78. package/dist/mailer/templates/plc-operation.js +1 -1
  79. package/dist/mailer/templates/plc-operation.js.map +2 -2
  80. package/dist/mailer/templates/reset-password.js +1 -1
  81. package/dist/mailer/templates/reset-password.js.map +2 -2
  82. package/dist/mailer/templates/update-email.js +1 -1
  83. package/dist/mailer/templates/update-email.js.map +2 -2
  84. package/dist/pipethrough.d.ts +26 -26
  85. package/dist/pipethrough.d.ts.map +1 -1
  86. package/dist/pipethrough.js +328 -228
  87. package/dist/pipethrough.js.map +1 -1
  88. package/dist/read-after-write/util.d.ts +13 -5
  89. package/dist/read-after-write/util.d.ts.map +1 -1
  90. package/dist/read-after-write/util.js +37 -22
  91. package/dist/read-after-write/util.js.map +1 -1
  92. package/package.json +16 -15
  93. package/src/actor-store/repo/sql-repo-reader.ts +3 -6
  94. package/src/api/app/bsky/actor/getProfile.ts +3 -17
  95. package/src/api/app/bsky/actor/getProfiles.ts +3 -15
  96. package/src/api/app/bsky/feed/getActorLikes.ts +3 -19
  97. package/src/api/app/bsky/feed/getAuthorFeed.ts +3 -17
  98. package/src/api/app/bsky/feed/getFeed.ts +3 -1
  99. package/src/api/app/bsky/feed/getPostThread.ts +16 -23
  100. package/src/api/app/bsky/feed/getTimeline.ts +3 -14
  101. package/src/api/com/atproto/repo/getRecord.ts +1 -1
  102. package/src/api/com/atproto/server/requestPasswordReset.ts +1 -1
  103. package/src/config/config.ts +21 -1
  104. package/src/config/env.ts +20 -2
  105. package/src/context.ts +62 -17
  106. package/src/index.ts +1 -0
  107. package/src/lexicon/lexicons.ts +92 -3
  108. package/src/lexicon/types/app/bsky/actor/defs.ts +25 -0
  109. package/src/lexicon/types/app/bsky/actor/profile.ts +1 -0
  110. package/src/lexicon/types/app/bsky/feed/defs.ts +38 -2
  111. package/src/lexicon/types/app/bsky/feed/getAuthorFeed.ts +1 -0
  112. package/src/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.ts +2 -0
  113. package/src/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.ts +2 -0
  114. package/src/lexicon/types/tools/ozone/moderation/defs.ts +2 -0
  115. package/src/lexicon/types/tools/ozone/moderation/queryStatuses.ts +3 -0
  116. package/src/mailer/index.ts +1 -1
  117. package/src/mailer/templates/confirm-email.hbs +106 -336
  118. package/src/mailer/templates/delete-account.hbs +110 -346
  119. package/src/mailer/templates/plc-operation.hbs +107 -338
  120. package/src/mailer/templates/reset-password.d.ts +1 -1
  121. package/src/mailer/templates/reset-password.hbs +108 -344
  122. package/src/mailer/templates/update-email.hbs +107 -337
  123. package/src/pipethrough.ts +489 -233
  124. package/src/read-after-write/util.ts +58 -32
  125. package/tests/account-deletion.test.ts +1 -1
  126. package/tests/account.test.ts +2 -2
  127. package/tests/email-confirmation.test.ts +2 -2
  128. package/tests/plc-operations.test.ts +1 -1
  129. package/tests/proxied/proxy-catchall.test.ts +255 -0
  130. package/tests/proxied/proxy-header.test.ts +31 -1
@@ -1,234 +1,371 @@
1
1
  "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
25
- var __importDefault = (this && this.__importDefault) || function (mod) {
26
- return (mod && mod.__esModule) ? mod : { "default": mod };
27
- };
28
2
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.parseRes = exports.PROTECTED_METHODS = exports.PRIVILEGED_METHODS = exports.parseProxyRes = exports.pipeProxyRes = exports.makeRequest = exports.parseProxyHeader = exports.formatHeaders = exports.formatUrlAndAud = exports.pipethroughProcedure = exports.pipethrough = exports.proxyHandler = void 0;
30
- const ui8 = __importStar(require("uint8arrays"));
31
- const node_net_1 = __importDefault(require("node:net"));
32
- const node_stream_1 = __importDefault(require("node:stream"));
33
- const lexicon_1 = require("@atproto/lexicon");
34
- const xrpc_server_1 = require("@atproto/xrpc-server");
35
- const xrpc_1 = require("@atproto/xrpc");
3
+ exports.PROTECTED_METHODS = exports.PRIVILEGED_METHODS = exports.asPipeThroughBuffer = exports.bufferUpstreamResponse = exports.isJsonContentType = exports.parseProxyHeader = exports.pipethrough = exports.proxyHandler = void 0;
4
+ const node_stream_1 = require("node:stream");
36
5
  const common_1 = require("@atproto/common");
6
+ const xrpc_1 = require("@atproto/xrpc");
7
+ const xrpc_server_1 = require("@atproto/xrpc-server");
37
8
  const lexicons_1 = require("./lexicon/lexicons");
38
9
  const logger_1 = require("./logger");
39
10
  const proxyHandler = (ctx) => {
40
11
  const accessStandard = ctx.authVerifier.accessStandard();
41
12
  return async (req, res, next) => {
13
+ // /!\ Hot path
42
14
  try {
43
- const { url, aud, nsid } = await (0, exports.formatUrlAndAud)(ctx, req);
15
+ if (req.method !== 'GET' &&
16
+ req.method !== 'HEAD' &&
17
+ req.method !== 'POST') {
18
+ throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.InvalidRequest, 'XRPC requests only supports GET and POST');
19
+ }
20
+ const body = req.method === 'POST' ? req : undefined;
21
+ if (body != null && !body.readable) {
22
+ // Body was already consumed by a previous middleware
23
+ throw new xrpc_server_1.InternalServerError('Request body is not readable');
24
+ }
25
+ const lxm = (0, xrpc_server_1.parseReqNsid)(req);
26
+ if (exports.PROTECTED_METHODS.has(lxm)) {
27
+ throw new xrpc_server_1.InvalidRequestError('Bad token method', 'InvalidToken');
28
+ }
44
29
  const auth = await accessStandard({ req, res });
45
- if (exports.PROTECTED_METHODS.has(nsid) ||
46
- (!auth.credentials.isPrivileged && exports.PRIVILEGED_METHODS.has(nsid))) {
30
+ if (!auth.credentials.isPrivileged && exports.PRIVILEGED_METHODS.has(lxm)) {
47
31
  throw new xrpc_server_1.InvalidRequestError('Bad token method', 'InvalidToken');
48
32
  }
49
- const headers = await (0, exports.formatHeaders)(ctx, req, {
50
- aud,
51
- lxm: nsid,
52
- requester: auth.credentials.did,
33
+ const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm);
34
+ const headers = {
35
+ 'accept-encoding': req.headers['accept-encoding'],
36
+ 'accept-language': req.headers['accept-language'],
37
+ 'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
38
+ 'x-bsky-topics': req.headers['x-bsky-topics'],
39
+ 'content-type': body && req.headers['content-type'],
40
+ 'content-encoding': body && req.headers['content-encoding'],
41
+ 'content-length': body && req.headers['content-length'],
42
+ authorization: auth.credentials.did
43
+ ? `Bearer ${await ctx.serviceAuthJwt(auth.credentials.did, aud, lxm)}`
44
+ : undefined,
45
+ };
46
+ const dispatchOptions = {
47
+ origin,
48
+ method: req.method,
49
+ path: req.originalUrl,
50
+ body,
51
+ headers,
52
+ };
53
+ await pipethroughStream(ctx, dispatchOptions, (upstream) => {
54
+ res.status(upstream.statusCode);
55
+ for (const [name, val] of responseHeaders(upstream.headers)) {
56
+ res.setHeader(name, val);
57
+ }
58
+ // Note that we should not need to manually handle errors here (e.g. by
59
+ // destroying the response), as the http server will handle them for us.
60
+ res.on('error', logResponseError);
61
+ // Tell undici to write the upstream response directly to the response
62
+ return res;
53
63
  });
54
- const body = node_stream_1.default.Readable.toWeb(req);
55
- const reqInit = formatReqInit(req, headers, body);
56
- const proxyRes = await (0, exports.makeRequest)(url, reqInit);
57
- await (0, exports.pipeProxyRes)(proxyRes, res);
58
64
  }
59
65
  catch (err) {
60
- return next(err);
66
+ next(err);
61
67
  }
62
- return next();
63
68
  };
64
69
  };
65
70
  exports.proxyHandler = proxyHandler;
66
- const pipethrough = async (ctx, req, requester, override = {}) => {
67
- const { url, aud, nsid } = await (0, exports.formatUrlAndAud)(ctx, req, override.aud);
68
- const lxm = override.lxm ?? nsid;
69
- const headers = await (0, exports.formatHeaders)(ctx, req, { aud, lxm, requester });
70
- const reqInit = formatReqInit(req, headers);
71
- const res = await (0, exports.makeRequest)(url, reqInit);
72
- return (0, exports.parseProxyRes)(res);
73
- };
71
+ // List of content encodings that are supported by the PDS. Because proxying
72
+ // occurs between data centers, where connectivity is supposedly stable & good,
73
+ // and because payloads are small, we prefer encoding that are fast (gzip,
74
+ // deflate, identity) over heavier encodings (Brotli). Upstream servers should
75
+ // be configured to prefer any encoding over identity in case of big,
76
+ // uncompressed payloads.
77
+ const SUPPORTED_ENCODINGS = [
78
+ ['gzip', { q: '1.0' }],
79
+ ['deflate', { q: '0.9' }],
80
+ ['identity', { q: '0.3' }],
81
+ ['br', { q: '0.1' }],
82
+ ];
83
+ async function pipethrough(ctx, req, options) {
84
+ if (req.method !== 'GET' && req.method !== 'HEAD') {
85
+ // pipethrough() is used from within xrpcServer handlers, which means that
86
+ // the request body either has been parsed or is a readable stream that has
87
+ // been piped for decoding & size limiting. Because of this, forwarding the
88
+ // request body requires re-encoding it. Since we currently do not use
89
+ // pipethrough() with procedures, proxying of request body is not
90
+ // implemented.
91
+ throw new xrpc_server_1.InternalServerError(`Proxying of ${req.method} requests is not supported`);
92
+ }
93
+ const lxm = (0, xrpc_server_1.parseReqNsid)(req);
94
+ const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm);
95
+ // Because we sometimes need to interpret the response (e.g. during
96
+ // read-after-write, through asPipeThroughBuffer()), we need to ask the
97
+ // upstream server for an encoding that both the requester and the PDS can
98
+ // understand.
99
+ const acceptEncoding = negotiateAccept(req.headers['accept-encoding'], SUPPORTED_ENCODINGS);
100
+ const headers = {
101
+ 'accept-language': req.headers['accept-language'],
102
+ 'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
103
+ 'x-bsky-topics': req.headers['x-bsky-topics'],
104
+ 'accept-encoding': `${formatAccepted(acceptEncoding)}, *;q=0`, // Reject anything else (q=0)
105
+ authorization: options?.iss
106
+ ? `Bearer ${await ctx.serviceAuthJwt(options.iss, options.aud ?? aud, options.lxm ?? lxm)}`
107
+ : undefined,
108
+ };
109
+ const dispatchOptions = {
110
+ origin,
111
+ method: req.method,
112
+ path: req.originalUrl,
113
+ headers,
114
+ // Use a high water mark to buffer more data while performing async
115
+ // operations before this stream is consumed. This is especially useful
116
+ // while processing read-after-write operations.
117
+ highWaterMark: 2 * 65536, // twice the default (64KiB)
118
+ };
119
+ const upstream = await pipethroughRequest(ctx, dispatchOptions);
120
+ return {
121
+ stream: upstream.body,
122
+ headers: Object.fromEntries(responseHeaders(upstream.headers)),
123
+ encoding: safeString(upstream.headers['content-type']) ?? 'application/json',
124
+ };
125
+ }
74
126
  exports.pipethrough = pipethrough;
75
- const pipethroughProcedure = async (ctx, req, requester, body) => {
76
- const { url, aud, nsid: lxm } = await (0, exports.formatUrlAndAud)(ctx, req);
77
- const headers = await (0, exports.formatHeaders)(ctx, req, { aud, lxm, requester });
78
- const encodedBody = body
79
- ? new TextEncoder().encode((0, lexicon_1.stringifyLex)(body))
80
- : undefined;
81
- const reqInit = formatReqInit(req, headers, encodedBody);
82
- const res = await (0, exports.makeRequest)(url, reqInit);
83
- return (0, exports.parseProxyRes)(res);
84
- };
85
- exports.pipethroughProcedure = pipethroughProcedure;
86
127
  // Request setup/formatting
87
128
  // -------------------
88
- const REQ_HEADERS_TO_FORWARD = [
89
- 'accept-language',
90
- 'content-type',
91
- 'atproto-accept-labelers',
92
- 'x-bsky-topics',
93
- ];
94
- const formatUrlAndAud = async (ctx, req, audOverride) => {
95
- const proxyTo = await (0, exports.parseProxyHeader)(ctx, req);
96
- const nsid = (0, xrpc_server_1.parseReqNsid)(req);
97
- const defaultProxy = defaultService(ctx, nsid);
98
- const serviceUrl = proxyTo?.serviceUrl ?? defaultProxy?.url;
99
- const aud = audOverride ?? proxyTo?.did ?? defaultProxy?.did;
100
- if (!serviceUrl || !aud) {
101
- throw new xrpc_server_1.InvalidRequestError(`No service configured for ${req.path}`);
129
+ async function parseProxyInfo(ctx, req, lxm) {
130
+ // /!\ Hot path
131
+ const proxyToHeader = req.header('atproto-proxy');
132
+ if (proxyToHeader)
133
+ return (0, exports.parseProxyHeader)(ctx, proxyToHeader);
134
+ const defaultProxy = defaultService(ctx, lxm);
135
+ if (defaultProxy)
136
+ return defaultProxy;
137
+ throw new xrpc_server_1.InvalidRequestError(`No service configured for ${lxm}`);
138
+ }
139
+ const parseProxyHeader = async (
140
+ // Using subset of AppContext for testing purposes
141
+ ctx, proxyTo) => {
142
+ // /!\ Hot path
143
+ const hashIndex = proxyTo.indexOf('#');
144
+ if (hashIndex === 0) {
145
+ throw new xrpc_server_1.InvalidRequestError('no did specified in proxy header');
102
146
  }
103
- const url = new URL(req.originalUrl, serviceUrl);
104
- if (!ctx.cfg.service.devMode && !isSafeUrl(url)) {
105
- throw new xrpc_server_1.InvalidRequestError(`Invalid service url: ${url.toString()}`);
147
+ if (hashIndex === -1 || hashIndex === proxyTo.length - 1) {
148
+ throw new xrpc_server_1.InvalidRequestError('no service id specified in proxy header');
106
149
  }
107
- return { url, aud, nsid };
108
- };
109
- exports.formatUrlAndAud = formatUrlAndAud;
110
- const formatHeaders = async (ctx, req, opts) => {
111
- const { aud, lxm, requester } = opts;
112
- const headers = requester
113
- ? (await ctx.serviceAuthHeaders(requester, aud, lxm)).headers
114
- : {};
115
- // forward select headers to upstream services
116
- for (const header of REQ_HEADERS_TO_FORWARD) {
117
- const val = req.headers[header];
118
- if (val) {
119
- headers[header] = val;
120
- }
150
+ // More than one hash
151
+ if (proxyTo.indexOf('#', hashIndex + 1) !== -1) {
152
+ throw new xrpc_server_1.InvalidRequestError('invalid proxy header format');
121
153
  }
122
- return headers;
123
- };
124
- exports.formatHeaders = formatHeaders;
125
- const formatReqInit = (req, headers, body) => {
126
- if (req.method === 'GET') {
127
- return {
128
- method: 'get',
129
- headers,
130
- };
131
- }
132
- else if (req.method === 'HEAD') {
133
- return {
134
- method: 'head',
135
- headers,
136
- };
137
- }
138
- else if (req.method === 'POST') {
139
- return {
140
- method: 'post',
141
- headers,
142
- body,
143
- duplex: 'half',
144
- };
145
- }
146
- else {
147
- throw new xrpc_server_1.InvalidRequestError('Method not found');
148
- }
149
- };
150
- const parseProxyHeader = async (ctx, req) => {
151
- const proxyTo = req.header('atproto-proxy');
152
- if (!proxyTo)
153
- return;
154
- const [did, serviceId] = proxyTo.split('#');
155
- if (!serviceId) {
156
- throw new xrpc_server_1.InvalidRequestError('no service id specified');
154
+ // Basic validation
155
+ if (proxyTo.includes(' ')) {
156
+ throw new xrpc_server_1.InvalidRequestError('proxy header cannot contain spaces');
157
157
  }
158
+ const did = proxyTo.slice(0, hashIndex);
158
159
  const didDoc = await ctx.idResolver.did.resolve(did);
159
160
  if (!didDoc) {
160
161
  throw new xrpc_server_1.InvalidRequestError('could not resolve proxy did');
161
162
  }
162
- const serviceUrl = (0, common_1.getServiceEndpoint)(didDoc, { id: `#${serviceId}` });
163
- if (!serviceUrl) {
163
+ const serviceId = proxyTo.slice(hashIndex);
164
+ const url = (0, common_1.getServiceEndpoint)(didDoc, { id: serviceId });
165
+ if (!url) {
164
166
  throw new xrpc_server_1.InvalidRequestError('could not resolve proxy did service url');
165
167
  }
166
- return { did, serviceUrl };
168
+ return { did, url };
167
169
  };
168
170
  exports.parseProxyHeader = parseProxyHeader;
169
- // Sending request
170
- // -------------------
171
- const makeRequest = async (url, reqInit) => {
172
- let res;
171
+ /**
172
+ * Utility function that wraps the undici stream() function and handles request
173
+ * and response errors by wrapping them in XRPCError instances. This function is
174
+ * more efficient than "pipethroughRequest" when a writable stream to pipe the
175
+ * upstream response to is available.
176
+ */
177
+ async function pipethroughStream(ctx, dispatchOptions, successStreamFactory) {
178
+ return new Promise((resolve, reject) => {
179
+ void ctx.proxyAgent
180
+ .stream(dispatchOptions, (upstream) => {
181
+ if (upstream.statusCode >= 400) {
182
+ const passThrough = new node_stream_1.PassThrough();
183
+ void tryParsingError(upstream.headers, passThrough).then((parsed) => {
184
+ const xrpcError = new xrpc_1.XRPCError(upstream.statusCode === 500
185
+ ? xrpc_1.ResponseType.UpstreamFailure
186
+ : upstream.statusCode, parsed.error, parsed.message, Object.fromEntries(responseHeaders(upstream.headers, false)), { cause: dispatchOptions });
187
+ reject(xrpcError);
188
+ }, reject);
189
+ return passThrough;
190
+ }
191
+ const writable = successStreamFactory(upstream);
192
+ // As soon as the control was passed to the writable stream (i.e. by
193
+ // returning the writable hereafter), pipethroughStream() is considered
194
+ // to have succeeded. Any error occurring while writing upstream data to
195
+ // the writable stream should be handled through the stream's error
196
+ // state (i.e. successStreamFactory() must ensure that error events on
197
+ // the returned writable will be handled).
198
+ resolve();
199
+ return writable;
200
+ })
201
+ // The following catch block will be triggered with either network errors
202
+ // or writable stream errors. In the latter case, the promise will already
203
+ // be resolved, and reject()ing it there after will have no effect. Those
204
+ // error would still be logged by the successStreamFactory() function.
205
+ .catch(handleUpstreamRequestError)
206
+ .catch(reject);
207
+ });
208
+ }
209
+ /**
210
+ * Utility function that wraps the undici request() function and handles request
211
+ * and response errors by wrapping them in XRPCError instances.
212
+ */
213
+ async function pipethroughRequest(ctx, dispatchOptions) {
214
+ // HandlerPipeThroughStream requires a readable stream to be returned, so we
215
+ // use the (less efficient) request() function instead.
216
+ const upstream = await ctx.proxyAgent
217
+ .request(dispatchOptions)
218
+ .catch(handleUpstreamRequestError);
219
+ if (upstream.statusCode >= 400) {
220
+ const parsed = await tryParsingError(upstream.headers, upstream.body);
221
+ // Note "XRPCClientError" is used instead of "XRPCServerError" in order to
222
+ // allow users of this function to capture & handle these errors (namely in
223
+ // "app.bsky.feed.getPostThread").
224
+ throw new xrpc_1.XRPCError(upstream.statusCode === 500
225
+ ? xrpc_1.ResponseType.UpstreamFailure
226
+ : upstream.statusCode, parsed.error, parsed.message, Object.fromEntries(responseHeaders(upstream.headers, false)), { cause: dispatchOptions });
227
+ }
228
+ return upstream;
229
+ }
230
+ function handleUpstreamRequestError(err, message = 'pipethrough network error') {
231
+ logger_1.httpLogger.warn({ err }, message);
232
+ throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure, message, undefined, {
233
+ cause: err,
234
+ });
235
+ }
236
+ function negotiateAccept(acceptHeader, supported) {
237
+ // Optimization: if no accept-encoding header is present, skip negotiation
238
+ if (!acceptHeader?.length) {
239
+ return supported;
240
+ }
241
+ const acceptNames = extractAcceptedNames(acceptHeader);
242
+ const common = acceptNames.includes('*')
243
+ ? supported
244
+ : supported.filter(nameIncludedIn, acceptNames);
245
+ // There must be at least one common encoding with a non-zero q value
246
+ if (!common.some(isNotRejected)) {
247
+ throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.NotAcceptable, 'this service does not support any of the requested encodings');
248
+ }
249
+ return common;
250
+ }
251
+ function formatAccepted(accept) {
252
+ return accept.map(formatEncodingDev).join(', ');
253
+ }
254
+ function formatEncodingDev([enc, flags]) {
255
+ let ret = enc;
256
+ for (const name in flags)
257
+ ret += `;${name}=${flags[name]}`;
258
+ return ret;
259
+ }
260
+ function nameIncludedIn(accept) {
261
+ return this.includes(accept[0]);
262
+ }
263
+ function isNotRejected(accept) {
264
+ return accept[1]['q'] !== '0';
265
+ }
266
+ function extractAcceptedNames(acceptHeader) {
267
+ if (!acceptHeader?.length) {
268
+ return ['*'];
269
+ }
270
+ return Array.isArray(acceptHeader)
271
+ ? acceptHeader.flatMap(extractAcceptedNames)
272
+ : acceptHeader.split(',').map(extractAcceptedName).filter(isNonNullable);
273
+ }
274
+ function extractAcceptedName(def) {
275
+ // No need to fully parse since we only care about allowed values
276
+ const parts = def.split(';');
277
+ if (parts.some(isQzero))
278
+ return undefined;
279
+ return parts[0].trim();
280
+ }
281
+ function isQzero(def) {
282
+ return def.trim() === 'q=0';
283
+ }
284
+ function isNonNullable(val) {
285
+ return val != null;
286
+ }
287
+ function isJsonContentType(contentType) {
288
+ if (contentType == null)
289
+ return undefined;
290
+ return /application\/(?:\w+\+)?json/i.test(contentType);
291
+ }
292
+ exports.isJsonContentType = isJsonContentType;
293
+ async function tryParsingError(headers, readable) {
294
+ if (isJsonContentType(headers['content-type']) === false) {
295
+ // We don't known how to parse non JSON content types so we can discard the
296
+ // whole response.
297
+ //
298
+ // @NOTE we could also simply "drain" the stream here. This would prevent
299
+ // the upstream HTTP/1.1 connection from getting destroyed (closed). This
300
+ // would however imply to read the whole upstream response, which would be
301
+ // costly in terms of bandwidth and I/O processing. It is recommended to use
302
+ // HTTP/2 to avoid this issue (be able to destroy a single response stream
303
+ // without resetting the whole connection). This is not expected to happen
304
+ // too much as 4xx and 5xx responses are expected to be JSON.
305
+ readable.destroy();
306
+ return {};
307
+ }
173
308
  try {
174
- res = await fetch(url, reqInit);
309
+ const buffer = await bufferUpstreamResponse(readable, headers['content-encoding']);
310
+ const errInfo = JSON.parse(buffer.toString('utf8'));
311
+ return {
312
+ error: safeString(errInfo?.['error']),
313
+ message: safeString(errInfo?.['message']),
314
+ };
175
315
  }
176
316
  catch (err) {
177
- logger_1.httpLogger.warn({ err }, 'pipethrough network error');
178
- throw new xrpc_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure);
317
+ // Failed to read, decode, buffer or parse. No big deal.
318
+ return {};
179
319
  }
180
- if (res.status !== xrpc_1.ResponseType.Success) {
181
- const arrBuffer = await readArrayBufferRes(res);
182
- const ui8Buffer = new Uint8Array(arrBuffer);
183
- const errInfo = safeParseJson(ui8.toString(ui8Buffer, 'utf8'));
184
- throw new xrpc_1.XRPCError(res.status, safeString(errInfo?.['error']), safeString(errInfo?.['message']), simpleHeaders(res.headers));
320
+ }
321
+ async function bufferUpstreamResponse(readable, contentEncoding) {
322
+ try {
323
+ // Needed for type-safety (should never happen irl)
324
+ if (Array.isArray(contentEncoding)) {
325
+ throw new TypeError('upstream service returned multiple content-encoding headers');
326
+ }
327
+ return await (0, common_1.streamToNodeBuffer)((0, common_1.decodeStream)(readable, contentEncoding));
185
328
  }
186
- return res;
187
- };
188
- exports.makeRequest = makeRequest;
329
+ catch (err) {
330
+ if (!readable.destroyed)
331
+ readable.destroy();
332
+ throw new xrpc_server_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure, err instanceof TypeError ? err.message : 'unable to decode request body', undefined, { cause: err });
333
+ }
334
+ }
335
+ exports.bufferUpstreamResponse = bufferUpstreamResponse;
336
+ async function asPipeThroughBuffer(input) {
337
+ return {
338
+ buffer: await bufferUpstreamResponse(input.stream, input.headers?.['content-encoding']),
339
+ headers: (0, common_1.omit)(input.headers, ['content-encoding', 'content-length']),
340
+ encoding: input.encoding,
341
+ };
342
+ }
343
+ exports.asPipeThroughBuffer = asPipeThroughBuffer;
189
344
  // Response parsing/forwarding
190
345
  // -------------------
191
- const RES_HEADERS_TO_FORWARD = [
192
- 'content-type',
193
- 'content-language',
194
- 'atproto-repo-rev',
195
- 'atproto-content-labelers',
196
- ];
197
- const pipeProxyRes = async (upstreamRes, ownRes) => {
198
- for (const headerName of RES_HEADERS_TO_FORWARD) {
199
- const headerVal = upstreamRes.headers.get(headerName);
200
- if (headerVal) {
201
- ownRes.setHeader(headerName, headerVal);
202
- }
203
- }
204
- if (upstreamRes.body) {
205
- const contentLength = upstreamRes.headers.get('content-length');
206
- const contentEncoding = upstreamRes.headers.get('content-encoding');
207
- if (contentLength && (!contentEncoding || contentEncoding === 'identity')) {
208
- ownRes.setHeader('content-length', contentLength);
209
- }
210
- else {
211
- ownRes.setHeader('transfer-encoding', 'chunked');
212
- }
213
- ownRes.status(200);
214
- const resStream = node_stream_1.default.Readable.fromWeb(upstreamRes.body);
215
- await node_stream_1.default.promises.pipeline(resStream, ownRes);
346
+ const RES_HEADERS_TO_FORWARD = ['atproto-repo-rev', 'atproto-content-labelers'];
347
+ function* responseHeaders(headers, includeContentHeaders = true) {
348
+ if (includeContentHeaders) {
349
+ const length = headers['content-length'];
350
+ if (length)
351
+ yield ['content-length', length];
352
+ const encoding = headers['content-encoding'];
353
+ if (encoding)
354
+ yield ['content-encoding', encoding];
355
+ const type = headers['content-type'];
356
+ if (type)
357
+ yield ['content-type', type];
358
+ const language = headers['content-language'];
359
+ if (language)
360
+ yield ['content-language', language];
216
361
  }
217
- else {
218
- ownRes.status(200).end();
362
+ for (let i = 0; i < RES_HEADERS_TO_FORWARD.length; i++) {
363
+ const name = RES_HEADERS_TO_FORWARD[i];
364
+ const val = headers[name];
365
+ if (typeof val === 'string')
366
+ yield [name, val];
219
367
  }
220
- };
221
- exports.pipeProxyRes = pipeProxyRes;
222
- const parseProxyRes = async (res) => {
223
- const buffer = await readArrayBufferRes(res);
224
- const encoding = res.headers.get('content-type') ?? 'application/json';
225
- const resHeaders = RES_HEADERS_TO_FORWARD.reduce((acc, cur) => {
226
- acc[cur] = res.headers.get(cur) ?? undefined;
227
- return acc;
228
- }, {});
229
- return { encoding, buffer, headers: (0, common_1.noUndefinedVals)(resHeaders) };
230
- };
231
- exports.parseProxyRes = parseProxyRes;
368
+ }
232
369
  // Utils
233
370
  // -------------------
234
371
  exports.PRIVILEGED_METHODS = new Set([
@@ -292,47 +429,10 @@ const defaultService = (ctx, nsid) => {
292
429
  return ctx.cfg.bskyAppView;
293
430
  }
294
431
  };
295
- const parseRes = (nsid, res) => {
296
- const buffer = new Uint8Array(res.buffer);
297
- const json = safeParseJson(ui8.toString(buffer, 'utf8'));
298
- const lex = json && (0, lexicon_1.jsonToLex)(json);
299
- return lexicons_1.lexicons.assertValidXrpcOutput(nsid, lex);
300
- };
301
- exports.parseRes = parseRes;
302
- const readArrayBufferRes = async (res) => {
303
- try {
304
- return await res.arrayBuffer();
305
- }
306
- catch (err) {
307
- logger_1.httpLogger.warn({ err }, 'pipethrough network error');
308
- throw new xrpc_1.XRPCError(xrpc_1.ResponseType.UpstreamFailure);
309
- }
310
- };
311
- const isSafeUrl = (url) => {
312
- if (url.protocol !== 'https:')
313
- return false;
314
- if (!url.hostname || url.hostname === 'localhost')
315
- return false;
316
- if (node_net_1.default.isIP(url.hostname) !== 0)
317
- return false;
318
- return true;
319
- };
320
432
  const safeString = (str) => {
321
433
  return typeof str === 'string' ? str : undefined;
322
434
  };
323
- const safeParseJson = (json) => {
324
- try {
325
- return JSON.parse(json);
326
- }
327
- catch {
328
- return null;
329
- }
330
- };
331
- const simpleHeaders = (headers) => {
332
- const result = {};
333
- for (const [key, val] of headers) {
334
- result[key] = val;
335
- }
336
- return result;
337
- };
435
+ function logResponseError(err) {
436
+ logger_1.httpLogger.warn({ err }, 'error forwarding upstream response');
437
+ }
338
438
  //# sourceMappingURL=pipethrough.js.map