@atproto/pds 0.4.59 → 0.4.61
Sign up to get free protection for your applications and to get access to all the features.
- package/CHANGELOG.md +48 -0
- package/dist/account-manager/helpers/account.d.ts +1 -0
- package/dist/account-manager/helpers/account.d.ts.map +1 -1
- package/dist/account-manager/helpers/account.js +15 -1
- package/dist/account-manager/helpers/account.js.map +1 -1
- package/dist/account-manager/helpers/invite.d.ts +1 -1
- package/dist/account-manager/helpers/invite.d.ts.map +1 -1
- package/dist/account-manager/helpers/invite.js +20 -9
- package/dist/account-manager/helpers/invite.js.map +1 -1
- package/dist/account-manager/index.d.ts +2 -0
- package/dist/account-manager/index.d.ts.map +1 -1
- package/dist/account-manager/index.js +8 -1
- package/dist/account-manager/index.js.map +1 -1
- package/dist/api/app/bsky/actor/getProfile.d.ts.map +1 -1
- package/dist/api/app/bsky/actor/getProfile.js +2 -9
- package/dist/api/app/bsky/actor/getProfile.js.map +1 -1
- package/dist/api/app/bsky/actor/getProfiles.d.ts.map +1 -1
- package/dist/api/app/bsky/actor/getProfiles.js +2 -6
- package/dist/api/app/bsky/actor/getProfiles.js.map +1 -1
- package/dist/api/app/bsky/feed/getActorLikes.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getActorLikes.js +2 -9
- package/dist/api/app/bsky/feed/getActorLikes.js.map +1 -1
- package/dist/api/app/bsky/feed/getAuthorFeed.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getAuthorFeed.js +2 -9
- package/dist/api/app/bsky/feed/getAuthorFeed.js.map +1 -1
- package/dist/api/app/bsky/feed/getFeed.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getFeed.js +2 -1
- package/dist/api/app/bsky/feed/getFeed.js.map +1 -1
- package/dist/api/app/bsky/feed/getPostThread.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getPostThread.js +12 -14
- package/dist/api/app/bsky/feed/getPostThread.js.map +1 -1
- package/dist/api/app/bsky/feed/getTimeline.d.ts.map +1 -1
- package/dist/api/app/bsky/feed/getTimeline.js +2 -6
- package/dist/api/app/bsky/feed/getTimeline.js.map +1 -1
- package/dist/api/com/atproto/admin/getAccountInfo.d.ts.map +1 -1
- package/dist/api/com/atproto/admin/getAccountInfo.js +6 -14
- package/dist/api/com/atproto/admin/getAccountInfo.js.map +1 -1
- package/dist/api/com/atproto/admin/getAccountInfos.d.ts +4 -0
- package/dist/api/com/atproto/admin/getAccountInfos.d.ts.map +1 -0
- package/dist/api/com/atproto/admin/getAccountInfos.js +32 -0
- package/dist/api/com/atproto/admin/getAccountInfos.js.map +1 -0
- package/dist/api/com/atproto/admin/index.d.ts.map +1 -1
- package/dist/api/com/atproto/admin/index.js +2 -0
- package/dist/api/com/atproto/admin/index.js.map +1 -1
- package/dist/api/com/atproto/admin/util.d.ts +17 -0
- package/dist/api/com/atproto/admin/util.d.ts.map +1 -1
- package/dist/api/com/atproto/admin/util.js +27 -1
- package/dist/api/com/atproto/admin/util.js.map +1 -1
- package/dist/api/com/atproto/repo/getRecord.d.ts.map +1 -1
- package/dist/api/com/atproto/repo/getRecord.js +2 -2
- package/dist/api/com/atproto/repo/getRecord.js.map +1 -1
- package/dist/api/com/atproto/server/requestPasswordReset.js +1 -1
- package/dist/api/com/atproto/server/requestPasswordReset.js.map +1 -1
- package/dist/config/config.d.ts +17 -0
- package/dist/config/config.d.ts.map +1 -1
- package/dist/config/config.js +11 -1
- package/dist/config/config.js.map +1 -1
- package/dist/config/env.d.ts +7 -1
- package/dist/config/env.d.ts.map +1 -1
- package/dist/config/env.js +9 -1
- package/dist/config/env.js.map +1 -1
- package/dist/context.d.ts +6 -2
- package/dist/context.d.ts.map +1 -1
- package/dist/context.js +55 -11
- package/dist/context.js.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -1
- package/dist/lexicon/index.d.ts +4 -0
- package/dist/lexicon/index.d.ts.map +1 -1
- package/dist/lexicon/index.js +8 -0
- package/dist/lexicon/index.js.map +1 -1
- package/dist/lexicon/lexicons.d.ts +118 -0
- package/dist/lexicon/lexicons.d.ts.map +1 -1
- package/dist/lexicon/lexicons.js +135 -3
- package/dist/lexicon/lexicons.js.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/defs.d.ts +2 -0
- package/dist/lexicon/types/app/bsky/actor/defs.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/defs.js.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/profile.d.ts +1 -0
- package/dist/lexicon/types/app/bsky/actor/profile.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/profile.js.map +1 -1
- package/dist/lexicon/types/app/bsky/feed/defs.d.ts +13 -2
- package/dist/lexicon/types/app/bsky/feed/defs.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/feed/defs.js +21 -1
- package/dist/lexicon/types/app/bsky/feed/defs.js.map +1 -1
- package/dist/lexicon/types/app/bsky/feed/getAuthorFeed.d.ts +1 -0
- package/dist/lexicon/types/app/bsky/feed/getAuthorFeed.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.d.ts +2 -0
- package/dist/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.d.ts +2 -0
- package/dist/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.d.ts.map +1 -1
- package/dist/lexicon/types/com/atproto/repo/getRecord.d.ts +1 -0
- package/dist/lexicon/types/com/atproto/repo/getRecord.d.ts.map +1 -1
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.d.ts +39 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.d.ts.map +1 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.js +3 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRecords.js.map +1 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.d.ts +39 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.d.ts.map +1 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.js +3 -0
- package/dist/lexicon/types/tools/ozone/moderation/getRepos.js.map +1 -0
- package/dist/mailer/index.d.ts +1 -1
- package/dist/mailer/index.d.ts.map +1 -1
- package/dist/mailer/index.js.map +1 -1
- package/dist/mailer/templates/confirm-email.js +1 -1
- package/dist/mailer/templates/confirm-email.js.map +2 -2
- package/dist/mailer/templates/delete-account.js +1 -1
- package/dist/mailer/templates/delete-account.js.map +2 -2
- package/dist/mailer/templates/plc-operation.js +1 -1
- package/dist/mailer/templates/plc-operation.js.map +2 -2
- package/dist/mailer/templates/reset-password.js +1 -1
- package/dist/mailer/templates/reset-password.js.map +2 -2
- package/dist/mailer/templates/update-email.js +1 -1
- package/dist/mailer/templates/update-email.js.map +2 -2
- package/dist/pipethrough.d.ts +26 -26
- package/dist/pipethrough.d.ts.map +1 -1
- package/dist/pipethrough.js +360 -228
- package/dist/pipethrough.js.map +1 -1
- package/dist/read-after-write/util.d.ts +13 -5
- package/dist/read-after-write/util.d.ts.map +1 -1
- package/dist/read-after-write/util.js +37 -22
- package/dist/read-after-write/util.js.map +1 -1
- package/package.json +15 -14
- package/src/account-manager/helpers/account.ts +22 -0
- package/src/account-manager/helpers/invite.ts +19 -9
- package/src/account-manager/index.ts +13 -1
- package/src/api/app/bsky/actor/getProfile.ts +3 -17
- package/src/api/app/bsky/actor/getProfiles.ts +3 -15
- package/src/api/app/bsky/feed/getActorLikes.ts +3 -19
- package/src/api/app/bsky/feed/getAuthorFeed.ts +3 -17
- package/src/api/app/bsky/feed/getFeed.ts +3 -1
- package/src/api/app/bsky/feed/getPostThread.ts +16 -23
- package/src/api/app/bsky/feed/getTimeline.ts +3 -14
- package/src/api/com/atproto/admin/getAccountInfo.ts +6 -13
- package/src/api/com/atproto/admin/getAccountInfos.ts +33 -0
- package/src/api/com/atproto/admin/index.ts +2 -0
- package/src/api/com/atproto/admin/util.ts +38 -0
- package/src/api/com/atproto/repo/getRecord.ts +5 -2
- package/src/api/com/atproto/server/requestPasswordReset.ts +1 -1
- package/src/config/config.ts +31 -1
- package/src/config/env.ts +22 -2
- package/src/context.ts +62 -17
- package/src/index.ts +1 -0
- package/src/lexicon/index.ts +24 -0
- package/src/lexicon/lexicons.ts +137 -3
- package/src/lexicon/types/app/bsky/actor/defs.ts +2 -0
- package/src/lexicon/types/app/bsky/actor/profile.ts +1 -0
- package/src/lexicon/types/app/bsky/feed/defs.ts +38 -2
- package/src/lexicon/types/app/bsky/feed/getAuthorFeed.ts +1 -0
- package/src/lexicon/types/app/bsky/graph/getSuggestedFollowsByActor.ts +2 -0
- package/src/lexicon/types/app/bsky/unspecced/getSuggestionsSkeleton.ts +2 -0
- package/src/lexicon/types/com/atproto/repo/getRecord.ts +1 -0
- package/src/lexicon/types/tools/ozone/moderation/getRecords.ts +50 -0
- package/src/lexicon/types/tools/ozone/moderation/getRepos.ts +50 -0
- package/src/mailer/index.ts +1 -1
- package/src/mailer/templates/confirm-email.hbs +106 -336
- package/src/mailer/templates/delete-account.hbs +110 -346
- package/src/mailer/templates/plc-operation.hbs +107 -338
- package/src/mailer/templates/reset-password.d.ts +1 -1
- package/src/mailer/templates/reset-password.hbs +108 -344
- package/src/mailer/templates/update-email.hbs +107 -337
- package/src/pipethrough.ts +528 -233
- package/src/read-after-write/util.ts +58 -32
- package/tests/account-deletion.test.ts +1 -1
- package/tests/account.test.ts +2 -2
- package/tests/email-confirmation.test.ts +2 -2
- package/tests/plc-operations.test.ts +1 -1
- package/tests/proxied/proxy-catchall.test.ts +255 -0
- package/tests/proxied/proxy-header.test.ts +31 -1
- package/tests/proxied/read-after-write.test.ts +77 -0
package/src/pipethrough.ts
CHANGED
@@ -1,261 +1,591 @@
|
|
1
1
|
import express from 'express'
|
2
|
-
import
|
3
|
-
import
|
4
|
-
import
|
5
|
-
|
6
|
-
import {
|
2
|
+
import { IncomingHttpHeaders, ServerResponse } from 'node:http'
|
3
|
+
import { PassThrough, Readable } from 'node:stream'
|
4
|
+
import { Dispatcher } from 'undici'
|
5
|
+
|
6
|
+
import {
|
7
|
+
decodeStream,
|
8
|
+
getServiceEndpoint,
|
9
|
+
omit,
|
10
|
+
streamToNodeBuffer,
|
11
|
+
} from '@atproto/common'
|
12
|
+
import { ResponseType, XRPCError as XRPCClientError } from '@atproto/xrpc'
|
7
13
|
import {
|
8
14
|
CatchallHandler,
|
9
|
-
|
15
|
+
HandlerPipeThroughBuffer,
|
16
|
+
HandlerPipeThroughStream,
|
17
|
+
InternalServerError,
|
10
18
|
InvalidRequestError,
|
11
19
|
parseReqNsid,
|
20
|
+
XRPCError as XRPCServerError,
|
12
21
|
} from '@atproto/xrpc-server'
|
13
|
-
|
14
|
-
import { getServiceEndpoint, noUndefinedVals } from '@atproto/common'
|
15
|
-
import { ids, lexicons } from './lexicon/lexicons'
|
16
|
-
import { httpLogger } from './logger'
|
22
|
+
|
17
23
|
import AppContext from './context'
|
24
|
+
import { ids } from './lexicon/lexicons'
|
25
|
+
import { httpLogger } from './logger'
|
18
26
|
|
19
27
|
export const proxyHandler = (ctx: AppContext): CatchallHandler => {
|
20
28
|
const accessStandard = ctx.authVerifier.accessStandard()
|
21
29
|
return async (req, res, next) => {
|
30
|
+
// /!\ Hot path
|
31
|
+
|
22
32
|
try {
|
23
|
-
const { url, aud, nsid } = await formatUrlAndAud(ctx, req)
|
24
|
-
const auth = await accessStandard({ req, res })
|
25
33
|
if (
|
26
|
-
|
27
|
-
|
34
|
+
req.method !== 'GET' &&
|
35
|
+
req.method !== 'HEAD' &&
|
36
|
+
req.method !== 'POST'
|
28
37
|
) {
|
38
|
+
throw new XRPCServerError(
|
39
|
+
ResponseType.InvalidRequest,
|
40
|
+
'XRPC requests only supports GET and POST',
|
41
|
+
)
|
42
|
+
}
|
43
|
+
|
44
|
+
const body = req.method === 'POST' ? req : undefined
|
45
|
+
if (body != null && !body.readable) {
|
46
|
+
// Body was already consumed by a previous middleware
|
47
|
+
throw new InternalServerError('Request body is not readable')
|
48
|
+
}
|
49
|
+
|
50
|
+
const lxm = parseReqNsid(req)
|
51
|
+
if (PROTECTED_METHODS.has(lxm)) {
|
29
52
|
throw new InvalidRequestError('Bad token method', 'InvalidToken')
|
30
53
|
}
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
54
|
+
|
55
|
+
const auth = await accessStandard({ req, res })
|
56
|
+
if (!auth.credentials.isPrivileged && PRIVILEGED_METHODS.has(lxm)) {
|
57
|
+
throw new InvalidRequestError('Bad token method', 'InvalidToken')
|
58
|
+
}
|
59
|
+
|
60
|
+
const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm)
|
61
|
+
|
62
|
+
const headers: IncomingHttpHeaders = {
|
63
|
+
'accept-encoding': req.headers['accept-encoding'] || 'identity',
|
64
|
+
'accept-language': req.headers['accept-language'],
|
65
|
+
'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
|
66
|
+
'x-bsky-topics': req.headers['x-bsky-topics'],
|
67
|
+
|
68
|
+
'content-type': body && req.headers['content-type'],
|
69
|
+
'content-encoding': body && req.headers['content-encoding'],
|
70
|
+
'content-length': body && req.headers['content-length'],
|
71
|
+
|
72
|
+
authorization: auth.credentials.did
|
73
|
+
? `Bearer ${await ctx.serviceAuthJwt(auth.credentials.did, aud, lxm)}`
|
74
|
+
: undefined,
|
75
|
+
}
|
76
|
+
|
77
|
+
const dispatchOptions: Dispatcher.RequestOptions = {
|
78
|
+
origin,
|
79
|
+
method: req.method,
|
80
|
+
path: req.originalUrl,
|
81
|
+
body,
|
82
|
+
headers,
|
83
|
+
}
|
84
|
+
|
85
|
+
await pipethroughStream(ctx, dispatchOptions, (upstream) => {
|
86
|
+
res.status(upstream.statusCode)
|
87
|
+
|
88
|
+
for (const [name, val] of responseHeaders(upstream.headers)) {
|
89
|
+
res.setHeader(name, val)
|
90
|
+
}
|
91
|
+
|
92
|
+
// Note that we should not need to manually handle errors here (e.g. by
|
93
|
+
// destroying the response), as the http server will handle them for us.
|
94
|
+
res.on('error', logResponseError)
|
95
|
+
|
96
|
+
// Tell undici to write the upstream response directly to the response
|
97
|
+
return res
|
35
98
|
})
|
36
|
-
const body: webStream.ReadableStream<Uint8Array> =
|
37
|
-
stream.Readable.toWeb(req)
|
38
|
-
const reqInit = formatReqInit(req, headers, body)
|
39
|
-
const proxyRes = await makeRequest(url, reqInit)
|
40
|
-
await pipeProxyRes(proxyRes, res)
|
41
99
|
} catch (err) {
|
42
|
-
|
100
|
+
next(err)
|
43
101
|
}
|
44
|
-
return next()
|
45
102
|
}
|
46
103
|
}
|
47
104
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
105
|
+
const ACCEPT_ENCODING_COMPRESSED = [
|
106
|
+
['gzip', { q: 1.0 }],
|
107
|
+
['deflate', { q: 0.9 }],
|
108
|
+
['br', { q: 0.8 }],
|
109
|
+
['identity', { q: 0.1 }],
|
110
|
+
] as const satisfies Accept[]
|
111
|
+
|
112
|
+
const ACCEPT_ENCODING_UNCOMPRESSED = [
|
113
|
+
['identity', { q: 1.0 }],
|
114
|
+
['gzip', { q: 0.3 }],
|
115
|
+
['deflate', { q: 0.2 }],
|
116
|
+
['br', { q: 0.1 }],
|
117
|
+
] as const satisfies Accept[]
|
118
|
+
|
119
|
+
export type PipethroughOptions = {
|
120
|
+
/**
|
121
|
+
* Specify the issuer (requester) for service auth. If not provided, no
|
122
|
+
* authorization headers will be added to the request.
|
123
|
+
*/
|
124
|
+
iss?: string
|
125
|
+
|
126
|
+
/**
|
127
|
+
* Override the audience for service auth. If not provided, the audience will
|
128
|
+
* be determined based on the proxy service.
|
129
|
+
*/
|
130
|
+
aud?: string
|
131
|
+
|
132
|
+
/**
|
133
|
+
* Override the lexicon method for service auth. If not provided, the lexicon
|
134
|
+
* method will be determined based on the request path.
|
135
|
+
*/
|
136
|
+
lxm?: string
|
63
137
|
}
|
64
138
|
|
65
|
-
export
|
139
|
+
export async function pipethrough(
|
66
140
|
ctx: AppContext,
|
67
141
|
req: express.Request,
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
142
|
+
options?: PipethroughOptions,
|
143
|
+
): Promise<
|
144
|
+
HandlerPipeThroughStream & {
|
145
|
+
stream: Readable
|
146
|
+
headers: Record<string, string>
|
147
|
+
encoding: string
|
148
|
+
}
|
149
|
+
> {
|
150
|
+
if (req.method !== 'GET' && req.method !== 'HEAD') {
|
151
|
+
// pipethrough() is used from within xrpcServer handlers, which means that
|
152
|
+
// the request body either has been parsed or is a readable stream that has
|
153
|
+
// been piped for decoding & size limiting. Because of this, forwarding the
|
154
|
+
// request body requires re-encoding it. Since we currently do not use
|
155
|
+
// pipethrough() with procedures, proxying of request body is not
|
156
|
+
// implemented.
|
157
|
+
throw new InternalServerError(
|
158
|
+
`Proxying of ${req.method} requests is not supported`,
|
159
|
+
)
|
160
|
+
}
|
80
161
|
|
81
|
-
|
82
|
-
// -------------------
|
162
|
+
const lxm = parseReqNsid(req)
|
83
163
|
|
84
|
-
const
|
85
|
-
'accept-language',
|
86
|
-
'content-type',
|
87
|
-
'atproto-accept-labelers',
|
88
|
-
'x-bsky-topics',
|
89
|
-
]
|
164
|
+
const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm)
|
90
165
|
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
166
|
+
const dispatchOptions: Dispatcher.RequestOptions = {
|
167
|
+
origin,
|
168
|
+
method: req.method,
|
169
|
+
path: req.originalUrl,
|
170
|
+
headers: {
|
171
|
+
'accept-language': req.headers['accept-language'],
|
172
|
+
'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
|
173
|
+
'x-bsky-topics': req.headers['x-bsky-topics'],
|
174
|
+
|
175
|
+
// Because we sometimes need to interpret the response (e.g. during
|
176
|
+
// read-after-write, through asPipeThroughBuffer()), we need to ask the
|
177
|
+
// upstream server for an encoding that both the requester and the PDS can
|
178
|
+
// understand. Since we might have to do the decoding ourselves, we will
|
179
|
+
// use our own preferences (and weight) to negotiate the encoding.
|
180
|
+
'accept-encoding': negotiateContentEncoding(
|
181
|
+
req.headers['accept-encoding'],
|
182
|
+
ctx.cfg.proxy.preferCompressed
|
183
|
+
? ACCEPT_ENCODING_COMPRESSED
|
184
|
+
: ACCEPT_ENCODING_UNCOMPRESSED,
|
185
|
+
),
|
186
|
+
|
187
|
+
authorization: options?.iss
|
188
|
+
? `Bearer ${await ctx.serviceAuthJwt(options.iss, options.aud ?? aud, options.lxm ?? lxm)}`
|
189
|
+
: undefined,
|
190
|
+
},
|
191
|
+
|
192
|
+
// Use a high water mark to buffer more data while performing async
|
193
|
+
// operations before this stream is consumed. This is especially useful
|
194
|
+
// while processing read-after-write operations.
|
195
|
+
highWaterMark: 2 * 65536, // twice the default (64KiB)
|
103
196
|
}
|
104
|
-
|
105
|
-
|
106
|
-
|
197
|
+
|
198
|
+
const { headers, body } = await pipethroughRequest(ctx, dispatchOptions)
|
199
|
+
|
200
|
+
return {
|
201
|
+
encoding: safeString(headers['content-type']) ?? 'application/json',
|
202
|
+
headers: Object.fromEntries(responseHeaders(headers)),
|
203
|
+
stream: body,
|
107
204
|
}
|
108
|
-
return { url, aud, nsid }
|
109
205
|
}
|
110
206
|
|
111
|
-
|
207
|
+
// Request setup/formatting
|
208
|
+
// -------------------
|
209
|
+
|
210
|
+
async function parseProxyInfo(
|
112
211
|
ctx: AppContext,
|
113
212
|
req: express.Request,
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
requester: string | null
|
118
|
-
},
|
119
|
-
): Promise<{ authorization?: string }> => {
|
120
|
-
const { aud, lxm, requester } = opts
|
121
|
-
const headers = requester
|
122
|
-
? (await ctx.serviceAuthHeaders(requester, aud, lxm)).headers
|
123
|
-
: {}
|
124
|
-
// forward select headers to upstream services
|
125
|
-
for (const header of REQ_HEADERS_TO_FORWARD) {
|
126
|
-
const val = req.headers[header]
|
127
|
-
if (val) {
|
128
|
-
headers[header] = val
|
129
|
-
}
|
130
|
-
}
|
131
|
-
return headers
|
132
|
-
}
|
213
|
+
lxm: string,
|
214
|
+
): Promise<{ url: string; did: string }> {
|
215
|
+
// /!\ Hot path
|
133
216
|
|
134
|
-
const
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
)
|
139
|
-
|
140
|
-
|
141
|
-
method: 'get',
|
142
|
-
headers,
|
143
|
-
}
|
144
|
-
} else if (req.method === 'HEAD') {
|
145
|
-
return {
|
146
|
-
method: 'head',
|
147
|
-
headers,
|
148
|
-
}
|
149
|
-
} else if (req.method === 'POST') {
|
150
|
-
return {
|
151
|
-
method: 'post',
|
152
|
-
headers,
|
153
|
-
body,
|
154
|
-
duplex: 'half',
|
155
|
-
} as RequestInit
|
156
|
-
} else {
|
157
|
-
throw new InvalidRequestError('Method not found')
|
158
|
-
}
|
217
|
+
const proxyToHeader = req.header('atproto-proxy')
|
218
|
+
if (proxyToHeader) return parseProxyHeader(ctx, proxyToHeader)
|
219
|
+
|
220
|
+
const defaultProxy = defaultService(ctx, lxm)
|
221
|
+
if (defaultProxy) return defaultProxy
|
222
|
+
|
223
|
+
throw new InvalidRequestError(`No service configured for ${lxm}`)
|
159
224
|
}
|
160
225
|
|
161
226
|
export const parseProxyHeader = async (
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
227
|
+
// Using subset of AppContext for testing purposes
|
228
|
+
ctx: Pick<AppContext, 'idResolver'>,
|
229
|
+
proxyTo: string,
|
230
|
+
): Promise<{ did: string; url: string }> => {
|
231
|
+
// /!\ Hot path
|
232
|
+
|
233
|
+
const hashIndex = proxyTo.indexOf('#')
|
234
|
+
|
235
|
+
if (hashIndex === 0) {
|
236
|
+
throw new InvalidRequestError('no did specified in proxy header')
|
170
237
|
}
|
238
|
+
|
239
|
+
if (hashIndex === -1 || hashIndex === proxyTo.length - 1) {
|
240
|
+
throw new InvalidRequestError('no service id specified in proxy header')
|
241
|
+
}
|
242
|
+
|
243
|
+
// More than one hash
|
244
|
+
if (proxyTo.indexOf('#', hashIndex + 1) !== -1) {
|
245
|
+
throw new InvalidRequestError('invalid proxy header format')
|
246
|
+
}
|
247
|
+
|
248
|
+
// Basic validation
|
249
|
+
if (proxyTo.includes(' ')) {
|
250
|
+
throw new InvalidRequestError('proxy header cannot contain spaces')
|
251
|
+
}
|
252
|
+
|
253
|
+
const did = proxyTo.slice(0, hashIndex)
|
171
254
|
const didDoc = await ctx.idResolver.did.resolve(did)
|
172
255
|
if (!didDoc) {
|
173
256
|
throw new InvalidRequestError('could not resolve proxy did')
|
174
257
|
}
|
175
|
-
|
176
|
-
|
258
|
+
|
259
|
+
const serviceId = proxyTo.slice(hashIndex)
|
260
|
+
const url = getServiceEndpoint(didDoc, { id: serviceId })
|
261
|
+
if (!url) {
|
177
262
|
throw new InvalidRequestError('could not resolve proxy did service url')
|
178
263
|
}
|
179
|
-
|
264
|
+
|
265
|
+
return { did, url }
|
180
266
|
}
|
181
267
|
|
182
|
-
|
183
|
-
|
268
|
+
/**
|
269
|
+
* Utility function that wraps the undici stream() function and handles request
|
270
|
+
* and response errors by wrapping them in XRPCError instances. This function is
|
271
|
+
* more efficient than "pipethroughRequest" when a writable stream to pipe the
|
272
|
+
* upstream response to is available.
|
273
|
+
*/
|
274
|
+
async function pipethroughStream(
|
275
|
+
ctx: AppContext,
|
276
|
+
dispatchOptions: Dispatcher.RequestOptions,
|
277
|
+
successStreamFactory: Dispatcher.StreamFactory,
|
278
|
+
): Promise<void> {
|
279
|
+
return new Promise<void>((resolve, reject) => {
|
280
|
+
void ctx.proxyAgent
|
281
|
+
.stream(dispatchOptions, (upstream) => {
|
282
|
+
if (upstream.statusCode >= 400) {
|
283
|
+
const passThrough = new PassThrough()
|
184
284
|
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
285
|
+
void tryParsingError(upstream.headers, passThrough).then((parsed) => {
|
286
|
+
const xrpcError = new XRPCClientError(
|
287
|
+
upstream.statusCode === 500
|
288
|
+
? ResponseType.UpstreamFailure
|
289
|
+
: upstream.statusCode,
|
290
|
+
parsed.error,
|
291
|
+
parsed.message,
|
292
|
+
Object.fromEntries(responseHeaders(upstream.headers, false)),
|
293
|
+
{ cause: dispatchOptions },
|
294
|
+
)
|
295
|
+
|
296
|
+
reject(xrpcError)
|
297
|
+
}, reject)
|
298
|
+
|
299
|
+
return passThrough
|
300
|
+
}
|
301
|
+
|
302
|
+
const writable = successStreamFactory(upstream)
|
303
|
+
|
304
|
+
// As soon as the control was passed to the writable stream (i.e. by
|
305
|
+
// returning the writable hereafter), pipethroughStream() is considered
|
306
|
+
// to have succeeded. Any error occurring while writing upstream data to
|
307
|
+
// the writable stream should be handled through the stream's error
|
308
|
+
// state (i.e. successStreamFactory() must ensure that error events on
|
309
|
+
// the returned writable will be handled).
|
310
|
+
resolve()
|
311
|
+
|
312
|
+
return writable
|
313
|
+
})
|
314
|
+
// The following catch block will be triggered with either network errors
|
315
|
+
// or writable stream errors. In the latter case, the promise will already
|
316
|
+
// be resolved, and reject()ing it there after will have no effect. Those
|
317
|
+
// error would still be logged by the successStreamFactory() function.
|
318
|
+
.catch(handleUpstreamRequestError)
|
319
|
+
.catch(reject)
|
320
|
+
})
|
321
|
+
}
|
322
|
+
|
323
|
+
/**
|
324
|
+
* Utility function that wraps the undici request() function and handles request
|
325
|
+
* and response errors by wrapping them in XRPCError instances.
|
326
|
+
*/
|
327
|
+
async function pipethroughRequest(
|
328
|
+
ctx: AppContext,
|
329
|
+
dispatchOptions: Dispatcher.RequestOptions,
|
330
|
+
) {
|
331
|
+
// HandlerPipeThroughStream requires a readable stream to be returned, so we
|
332
|
+
// use the (less efficient) request() function instead.
|
333
|
+
|
334
|
+
const upstream = await ctx.proxyAgent
|
335
|
+
.request(dispatchOptions)
|
336
|
+
.catch(handleUpstreamRequestError)
|
337
|
+
|
338
|
+
if (upstream.statusCode >= 400) {
|
339
|
+
const parsed = await tryParsingError(upstream.headers, upstream.body)
|
340
|
+
|
341
|
+
// Note "XRPCClientError" is used instead of "XRPCServerError" in order to
|
342
|
+
// allow users of this function to capture & handle these errors (namely in
|
343
|
+
// "app.bsky.feed.getPostThread").
|
344
|
+
throw new XRPCClientError(
|
345
|
+
upstream.statusCode === 500
|
346
|
+
? ResponseType.UpstreamFailure
|
347
|
+
: upstream.statusCode,
|
348
|
+
parsed.error,
|
349
|
+
parsed.message,
|
350
|
+
Object.fromEntries(responseHeaders(upstream.headers, false)),
|
351
|
+
{ cause: dispatchOptions },
|
205
352
|
)
|
206
353
|
}
|
207
|
-
|
354
|
+
|
355
|
+
return upstream
|
208
356
|
}
|
209
357
|
|
210
|
-
|
358
|
+
function handleUpstreamRequestError(
|
359
|
+
err: unknown,
|
360
|
+
message = 'pipethrough network error',
|
361
|
+
): never {
|
362
|
+
httpLogger.warn({ err }, message)
|
363
|
+
throw new XRPCServerError(ResponseType.UpstreamFailure, message, undefined, {
|
364
|
+
cause: err,
|
365
|
+
})
|
366
|
+
}
|
367
|
+
|
368
|
+
// Request parsing/forwarding
|
211
369
|
// -------------------
|
212
370
|
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
]
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
)
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
371
|
+
type AcceptFlags = { q: number }
|
372
|
+
type Accept = [name: string, flags: AcceptFlags]
|
373
|
+
|
374
|
+
// accept-encoding defaults to "identity with lowest priority"
|
375
|
+
const ACCEPT_ENC_DEFAULT = ['identity', { q: 0.001 }] as const satisfies Accept
|
376
|
+
const ACCEPT_FORBID_STAR = ['*', { q: 0 }] as const satisfies Accept
|
377
|
+
|
378
|
+
function negotiateContentEncoding(
|
379
|
+
acceptHeader: undefined | string | string[],
|
380
|
+
preferences: readonly Accept[],
|
381
|
+
): string {
|
382
|
+
const acceptMap = Object.fromEntries<undefined | AcceptFlags>(
|
383
|
+
parseAcceptEncoding(acceptHeader),
|
384
|
+
)
|
385
|
+
|
386
|
+
// Make sure the default (identity) is covered by the preferences
|
387
|
+
if (!preferences.some(coversIdentityAccept)) {
|
388
|
+
preferences = [...preferences, ACCEPT_ENC_DEFAULT]
|
389
|
+
}
|
390
|
+
|
391
|
+
const common = preferences.filter(([name]) => {
|
392
|
+
const acceptQ = (acceptMap[name] ?? acceptMap['*'])?.q
|
393
|
+
// Per HTTP/1.1, "identity" is always acceptable unless explicitly rejected
|
394
|
+
if (name === 'identity') {
|
395
|
+
return acceptQ == null || acceptQ > 0
|
396
|
+
} else {
|
397
|
+
return acceptQ != null && acceptQ > 0
|
228
398
|
}
|
399
|
+
})
|
400
|
+
|
401
|
+
// Since "identity" was present in the preferences, a missing "identity" in
|
402
|
+
// the common array means that the client explicitly rejected it. Let's reflect
|
403
|
+
// this by adding it to the common array.
|
404
|
+
if (!common.some(coversIdentityAccept)) {
|
405
|
+
common.push(ACCEPT_FORBID_STAR)
|
406
|
+
}
|
407
|
+
|
408
|
+
// If no common encodings are acceptable, throw a 406 Not Acceptable error
|
409
|
+
if (!common.some(isAllowedAccept)) {
|
410
|
+
throw new XRPCServerError(
|
411
|
+
ResponseType.NotAcceptable,
|
412
|
+
'this service does not support any of the requested encodings',
|
413
|
+
)
|
229
414
|
}
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
415
|
+
|
416
|
+
return formatAcceptHeader(common as [Accept, ...Accept[]])
|
417
|
+
}
|
418
|
+
|
419
|
+
function coversIdentityAccept([name]: Accept): boolean {
|
420
|
+
return name === 'identity' || name === '*'
|
421
|
+
}
|
422
|
+
|
423
|
+
function isAllowedAccept([, flags]: Accept): boolean {
|
424
|
+
return flags.q > 0
|
425
|
+
}
|
426
|
+
|
427
|
+
/**
|
428
|
+
* @see {@link https://developer.mozilla.org/en-US/docs/Glossary/Quality_values}
|
429
|
+
*/
|
430
|
+
function formatAcceptHeader(accept: readonly [Accept, ...Accept[]]): string {
|
431
|
+
return accept.map(formatAcceptPart).join(',')
|
432
|
+
}
|
433
|
+
|
434
|
+
function formatAcceptPart([name, flags]: Accept): string {
|
435
|
+
return `${name};q=${flags.q}`
|
436
|
+
}
|
437
|
+
|
438
|
+
function parseAcceptEncoding(
|
439
|
+
acceptEncodings: undefined | string | string[],
|
440
|
+
): Accept[] {
|
441
|
+
if (!acceptEncodings?.length) return []
|
442
|
+
|
443
|
+
return Array.isArray(acceptEncodings)
|
444
|
+
? acceptEncodings.flatMap(parseAcceptEncoding)
|
445
|
+
: acceptEncodings.split(',').map(parseAcceptEncodingDefinition)
|
446
|
+
}
|
447
|
+
|
448
|
+
function parseAcceptEncodingDefinition(def: string): Accept {
|
449
|
+
const { length, 0: encoding, 1: params } = def.trim().split(';', 3)
|
450
|
+
|
451
|
+
if (length > 2) {
|
452
|
+
throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
|
453
|
+
}
|
454
|
+
|
455
|
+
if (!encoding || encoding.includes('=')) {
|
456
|
+
throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
|
457
|
+
}
|
458
|
+
|
459
|
+
const flags = { q: 1 }
|
460
|
+
if (length === 2) {
|
461
|
+
const { length, 0: key, 1: value } = params.split('=', 3)
|
462
|
+
if (length !== 2) {
|
463
|
+
throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
|
464
|
+
}
|
465
|
+
|
466
|
+
if (key === 'q' || key === 'Q') {
|
467
|
+
const q = parseFloat(value)
|
468
|
+
if (q === 0 || (Number.isFinite(q) && q <= 1 && q >= 0.001)) {
|
469
|
+
flags.q = q
|
470
|
+
} else {
|
471
|
+
throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
|
472
|
+
}
|
235
473
|
} else {
|
236
|
-
|
474
|
+
throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
|
475
|
+
}
|
476
|
+
}
|
477
|
+
|
478
|
+
return [encoding.toLowerCase(), flags]
|
479
|
+
}
|
480
|
+
|
481
|
+
export function isJsonContentType(contentType?: string): boolean | undefined {
|
482
|
+
if (!contentType) return undefined
|
483
|
+
return /application\/(?:\w+\+)?json/i.test(contentType)
|
484
|
+
}
|
485
|
+
|
486
|
+
async function tryParsingError(
|
487
|
+
headers: IncomingHttpHeaders,
|
488
|
+
readable: Readable,
|
489
|
+
): Promise<{ error?: string; message?: string }> {
|
490
|
+
if (isJsonContentType(headers['content-type']) === false) {
|
491
|
+
// We don't known how to parse non JSON content types so we can discard the
|
492
|
+
// whole response.
|
493
|
+
//
|
494
|
+
// @NOTE we could also simply "drain" the stream here. This would prevent
|
495
|
+
// the upstream HTTP/1.1 connection from getting destroyed (closed). This
|
496
|
+
// would however imply to read the whole upstream response, which would be
|
497
|
+
// costly in terms of bandwidth and I/O processing. It is recommended to use
|
498
|
+
// HTTP/2 to avoid this issue (be able to destroy a single response stream
|
499
|
+
// without resetting the whole connection). This is not expected to happen
|
500
|
+
// too much as 4xx and 5xx responses are expected to be JSON.
|
501
|
+
readable.destroy()
|
502
|
+
|
503
|
+
return {}
|
504
|
+
}
|
505
|
+
|
506
|
+
try {
|
507
|
+
const buffer = await bufferUpstreamResponse(
|
508
|
+
readable,
|
509
|
+
headers['content-encoding'],
|
510
|
+
)
|
511
|
+
|
512
|
+
const errInfo: unknown = JSON.parse(buffer.toString('utf8'))
|
513
|
+
return {
|
514
|
+
error: safeString(errInfo?.['error']),
|
515
|
+
message: safeString(errInfo?.['message']),
|
237
516
|
}
|
238
|
-
|
239
|
-
|
240
|
-
|
517
|
+
} catch (err) {
|
518
|
+
// Failed to read, decode, buffer or parse. No big deal.
|
519
|
+
return {}
|
520
|
+
}
|
521
|
+
}
|
522
|
+
|
523
|
+
export async function bufferUpstreamResponse(
|
524
|
+
readable: Readable,
|
525
|
+
contentEncoding?: string | string[],
|
526
|
+
): Promise<Buffer> {
|
527
|
+
try {
|
528
|
+
// Needed for type-safety (should never happen irl)
|
529
|
+
if (Array.isArray(contentEncoding)) {
|
530
|
+
throw new TypeError(
|
531
|
+
'upstream service returned multiple content-encoding headers',
|
532
|
+
)
|
533
|
+
}
|
534
|
+
|
535
|
+
return await streamToNodeBuffer(decodeStream(readable, contentEncoding))
|
536
|
+
} catch (err) {
|
537
|
+
if (!readable.destroyed) readable.destroy()
|
538
|
+
|
539
|
+
throw new XRPCServerError(
|
540
|
+
ResponseType.UpstreamFailure,
|
541
|
+
err instanceof TypeError ? err.message : 'unable to decode request body',
|
542
|
+
undefined,
|
543
|
+
{ cause: err },
|
241
544
|
)
|
242
|
-
await stream.promises.pipeline(resStream, ownRes)
|
243
|
-
} else {
|
244
|
-
ownRes.status(200).end()
|
245
545
|
}
|
246
546
|
}
|
247
547
|
|
248
|
-
export
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
548
|
+
export async function asPipeThroughBuffer(
|
549
|
+
input: HandlerPipeThroughStream,
|
550
|
+
): Promise<HandlerPipeThroughBuffer> {
|
551
|
+
return {
|
552
|
+
buffer: await bufferUpstreamResponse(
|
553
|
+
input.stream,
|
554
|
+
input.headers?.['content-encoding'],
|
555
|
+
),
|
556
|
+
headers: omit(input.headers, ['content-encoding', 'content-length']),
|
557
|
+
encoding: input.encoding,
|
558
|
+
}
|
559
|
+
}
|
560
|
+
|
561
|
+
// Response parsing/forwarding
|
562
|
+
// -------------------
|
563
|
+
|
564
|
+
const RES_HEADERS_TO_FORWARD = ['atproto-repo-rev', 'atproto-content-labelers']
|
565
|
+
|
566
|
+
function* responseHeaders(
|
567
|
+
headers: IncomingHttpHeaders,
|
568
|
+
includeContentHeaders = true,
|
569
|
+
): Generator<[string, string]> {
|
570
|
+
if (includeContentHeaders) {
|
571
|
+
const length = headers['content-length']
|
572
|
+
if (length) yield ['content-length', length]
|
573
|
+
|
574
|
+
const encoding = headers['content-encoding']
|
575
|
+
if (encoding) yield ['content-encoding', encoding]
|
576
|
+
|
577
|
+
const type = headers['content-type']
|
578
|
+
if (type) yield ['content-type', type]
|
579
|
+
|
580
|
+
const language = headers['content-language']
|
581
|
+
if (language) yield ['content-language', language]
|
582
|
+
}
|
583
|
+
|
584
|
+
for (let i = 0; i < RES_HEADERS_TO_FORWARD.length; i++) {
|
585
|
+
const name = RES_HEADERS_TO_FORWARD[i]
|
586
|
+
const val = headers[name]
|
587
|
+
if (typeof val === 'string') yield [name, val]
|
588
|
+
}
|
259
589
|
}
|
260
590
|
|
261
591
|
// Utils
|
@@ -328,45 +658,10 @@ const defaultService = (
|
|
328
658
|
}
|
329
659
|
}
|
330
660
|
|
331
|
-
|
332
|
-
const buffer = new Uint8Array(res.buffer)
|
333
|
-
const json = safeParseJson(ui8.toString(buffer, 'utf8'))
|
334
|
-
const lex = json && jsonToLex(json)
|
335
|
-
return lexicons.assertValidXrpcOutput(nsid, lex) as T
|
336
|
-
}
|
337
|
-
|
338
|
-
const readArrayBufferRes = async (res: Response): Promise<ArrayBuffer> => {
|
339
|
-
try {
|
340
|
-
return await res.arrayBuffer()
|
341
|
-
} catch (err) {
|
342
|
-
httpLogger.warn({ err }, 'pipethrough network error')
|
343
|
-
throw new XRPCError(ResponseType.UpstreamFailure)
|
344
|
-
}
|
345
|
-
}
|
346
|
-
|
347
|
-
const isSafeUrl = (url: URL) => {
|
348
|
-
if (url.protocol !== 'https:') return false
|
349
|
-
if (!url.hostname || url.hostname === 'localhost') return false
|
350
|
-
if (net.isIP(url.hostname) !== 0) return false
|
351
|
-
return true
|
352
|
-
}
|
353
|
-
|
354
|
-
const safeString = (str: string): string | undefined => {
|
661
|
+
const safeString = (str: unknown): string | undefined => {
|
355
662
|
return typeof str === 'string' ? str : undefined
|
356
663
|
}
|
357
664
|
|
358
|
-
|
359
|
-
|
360
|
-
return JSON.parse(json)
|
361
|
-
} catch {
|
362
|
-
return null
|
363
|
-
}
|
364
|
-
}
|
365
|
-
|
366
|
-
const simpleHeaders = (headers: Headers): Record<string, string> => {
|
367
|
-
const result = {}
|
368
|
-
for (const [key, val] of headers) {
|
369
|
-
result[key] = val
|
370
|
-
}
|
371
|
-
return result
|
665
|
+
function logResponseError(this: ServerResponse, err: unknown): void {
|
666
|
+
httpLogger.warn({ err }, 'error forwarding upstream response')
|
372
667
|
}
|