@atproto/bsky 0.0.103 → 0.0.105

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/CHANGELOG.md +31 -0
  2. package/dist/api/blob-dispatcher.d.ts +4 -0
  3. package/dist/api/blob-dispatcher.d.ts.map +1 -0
  4. package/dist/api/blob-dispatcher.js +37 -0
  5. package/dist/api/blob-dispatcher.js.map +1 -0
  6. package/dist/api/blob-resolver.d.ts +17 -8
  7. package/dist/api/blob-resolver.d.ts.map +1 -1
  8. package/dist/api/blob-resolver.js +246 -99
  9. package/dist/api/blob-resolver.js.map +1 -1
  10. package/dist/api/well-known.d.ts.map +1 -1
  11. package/dist/api/well-known.js +30 -24
  12. package/dist/api/well-known.js.map +1 -1
  13. package/dist/config.d.ts +14 -1
  14. package/dist/config.d.ts.map +1 -1
  15. package/dist/config.js +40 -5
  16. package/dist/config.js.map +1 -1
  17. package/dist/context.d.ts +3 -0
  18. package/dist/context.d.ts.map +1 -1
  19. package/dist/context.js +3 -0
  20. package/dist/context.js.map +1 -1
  21. package/dist/data-plane/server/indexing/index.js +2 -2
  22. package/dist/image/server.d.ts +7 -13
  23. package/dist/image/server.d.ts.map +1 -1
  24. package/dist/image/server.js +119 -115
  25. package/dist/image/server.js.map +1 -1
  26. package/dist/image/sharp.d.ts +11 -2
  27. package/dist/image/sharp.d.ts.map +1 -1
  28. package/dist/image/sharp.js +35 -38
  29. package/dist/image/sharp.js.map +1 -1
  30. package/dist/image/util.d.ts +6 -4
  31. package/dist/image/util.d.ts.map +1 -1
  32. package/dist/image/util.js +14 -10
  33. package/dist/image/util.js.map +1 -1
  34. package/dist/index.d.ts +1 -1
  35. package/dist/index.d.ts.map +1 -1
  36. package/dist/index.js +6 -10
  37. package/dist/index.js.map +1 -1
  38. package/dist/util/http.d.ts +12 -0
  39. package/dist/util/http.d.ts.map +1 -0
  40. package/dist/util/http.js +36 -0
  41. package/dist/util/http.js.map +1 -0
  42. package/dist/util/retry.d.ts +2 -5
  43. package/dist/util/retry.d.ts.map +1 -1
  44. package/dist/util/retry.js +8 -27
  45. package/dist/util/retry.js.map +1 -1
  46. package/package.json +18 -14
  47. package/src/api/blob-dispatcher.ts +38 -0
  48. package/src/api/blob-resolver.ts +341 -106
  49. package/src/api/well-known.ts +31 -24
  50. package/src/config.ts +63 -6
  51. package/src/context.ts +6 -0
  52. package/src/data-plane/server/indexing/index.ts +3 -3
  53. package/src/image/server.ts +131 -107
  54. package/src/image/sharp.ts +48 -52
  55. package/src/image/util.ts +20 -12
  56. package/src/index.ts +8 -15
  57. package/src/util/http.ts +41 -0
  58. package/src/util/retry.ts +8 -32
  59. package/tests/_util.ts +50 -3
  60. package/tests/blob-resolver.test.ts +62 -36
  61. package/tests/image/server.test.ts +40 -32
  62. package/tests/image/sharp.test.ts +17 -4
  63. package/tests/label-hydration.test.ts +6 -6
  64. package/tests/server.test.ts +41 -56
  65. package/tsconfig.build.tsbuildinfo +1 -1
@@ -1,97 +1,278 @@
1
- import { pipeline, Readable } from 'stream'
2
- import express from 'express'
3
- import createError from 'http-errors'
4
- import axios, { AxiosError } from 'axios'
1
+ import {
2
+ ACCEPT_ENCODING_COMPRESSED,
3
+ ACCEPT_ENCODING_UNCOMPRESSED,
4
+ buildProxiedContentEncoding,
5
+ formatAcceptHeader,
6
+ } from '@atproto-labs/xrpc-utils'
7
+ import {
8
+ createDecoders,
9
+ VerifyCidError,
10
+ VerifyCidTransform,
11
+ } from '@atproto/common'
12
+ import { AtprotoDid, isAtprotoDid } from '@atproto/did'
13
+ import createError, { isHttpError } from 'http-errors'
5
14
  import { CID } from 'multiformats/cid'
6
- import { ensureValidDid } from '@atproto/syntax'
7
- import { forwardStreamErrors, VerifyCidTransform } from '@atproto/common'
8
- import { DidNotFoundError } from '@atproto/identity'
15
+ import { Duplex, Transform, Writable } from 'node:stream'
16
+ import { pipeline } from 'node:stream/promises'
17
+ import { Dispatcher } from 'undici'
18
+
19
+ import { ServerConfig } from '../config'
9
20
  import AppContext from '../context'
10
- import { httpLogger as log } from '../logger'
11
- import { retryHttp } from '../util/retry'
12
21
  import {
13
22
  Code,
23
+ DataPlaneClient,
14
24
  getServiceEndpoint,
15
25
  isDataplaneError,
16
26
  unpackIdentityServices,
17
27
  } from '../data-plane'
28
+ import { parseCid } from '../hydration/util'
29
+ import { httpLogger as log } from '../logger'
30
+ import { Middleware, proxyResponseHeaders, responseSignal } from '../util/http'
18
31
 
19
- // Resolve and verify blob from its origin host
32
+ export function createMiddleware(ctx: AppContext): Middleware {
33
+ return async (req, res, next) => {
34
+ if (req.method !== 'GET' && req.method !== 'HEAD') return next()
35
+ if (!req.url?.startsWith('/blob/')) return next()
36
+ const { length, 2: didParam, 3: cidParam } = req.url.split('/')
37
+ if (length !== 4 || !didParam || !cidParam) return next()
20
38
 
21
- export const createRouter = (ctx: AppContext): express.Router => {
22
- const router = express.Router()
39
+ // @TODO Check sec-fetch-* headers (e.g. to prevent files from being
40
+ // displayed as a web page) ?
23
41
 
24
- router.get('/blob/:did/:cid', async function (req, res, next) {
25
42
  try {
26
- const { did, cid: cidStr } = req.params
27
- try {
28
- ensureValidDid(did)
29
- } catch (err) {
30
- return next(createError(400, 'Invalid did'))
31
- }
32
- let cid: CID
33
- try {
34
- cid = CID.parse(cidStr)
35
- } catch (err) {
36
- return next(createError(400, 'Invalid cid'))
43
+ const streamOptions: StreamBlobOptions = {
44
+ did: didParam,
45
+ cid: cidParam,
46
+ signal: responseSignal(res),
47
+ // Because we will be verifying the CID, we need to ensure that the
48
+ // upstream response can be de-compressed. We do this by negotiating the
49
+ // "accept-encoding" header based on the downstream client's capabilities.
50
+ acceptEncoding: buildProxiedContentEncoding(
51
+ req.headers['accept-encoding'],
52
+ ctx.cfg.proxyPreferCompressed,
53
+ ),
37
54
  }
38
55
 
39
- const verifiedImage = await resolveBlob(ctx, did, cid)
56
+ await streamBlob(ctx, streamOptions, (upstream, { cid, did, url }) => {
57
+ const encoding = upstream.headers['content-encoding']
58
+ const verifier = createCidVerifier(cid, encoding)
40
59
 
41
- // Send chunked response, destroying stream early (before
42
- // closing chunk) if the bytes don't match the expected cid.
43
- res.statusCode = 200
44
- res.setHeader('content-type', verifiedImage.contentType)
45
- res.setHeader('x-content-type-options', 'nosniff')
46
- res.setHeader('content-security-policy', `default-src 'none'; sandbox`)
47
- pipeline(verifiedImage.stream, res, (err) => {
48
- if (err) {
60
+ const logError = (err: unknown) => {
49
61
  log.warn(
50
- { err, did, cid: cidStr, pds: verifiedImage.pds },
62
+ { err, did, cid: cid.toString(), pds: url.origin },
51
63
  'blob resolution failed during transmission',
52
64
  )
53
65
  }
66
+
67
+ const onError = (err: unknown) => {
68
+ // No need to pipe the data (verifier) into the response, as it is
69
+ // "errored". The response processing will continue in the "catch"
70
+ // block below (because streamBlob() will reject the promise in case
71
+ // of "error" event on the writable stream returned by the factory).
72
+ clearTimeout(graceTimer)
73
+ logError(err)
74
+ }
75
+
76
+ // Catch any error that occurs before the timer bellow is triggered.
77
+ // The promise returned by streamBlob() will be rejected as soon as
78
+ // the verifier errors.
79
+ verifier.on('error', onError)
80
+
81
+ // The way I/O work, it is likely that, in case of small payloads, the
82
+ // full upstream response is already buffered at this point. In order to
83
+ // return a 404 instead of a broken response stream, we allow the event
84
+ // loop to to process any pending I/O events before we start piping the
85
+ // bytes to the response. For larger payloads, the response will look
86
+ // like a 200 with a broken chunked response stream. The only way around
87
+ // that would be to buffer the entire response before piping it to the
88
+ // response, which will hurt latency (need the full payload) and memory
89
+ // usage (either RAM or DISK). Since this is more of an edge case, we
90
+ // allow the broken response stream to be sent.
91
+ const graceTimer = setTimeout(() => {
92
+ verifier.off('error', onError)
93
+
94
+ // Make sure that the content served from the bsky api domain cannot
95
+ // be used to perform XSS attacks (by serving HTML pages)
96
+ res.setHeader(
97
+ 'Content-Security-Policy',
98
+ `default-src 'none'; sandbox`,
99
+ )
100
+ res.setHeader('X-Content-Type-Options', 'nosniff')
101
+ res.setHeader('X-Frame-Options', 'DENY')
102
+ res.setHeader('X-XSS-Protection', '0')
103
+
104
+ // @TODO Add a cache-control header ?
105
+ // @TODO Add content-disposition header (to force download) ?
106
+
107
+ proxyResponseHeaders(upstream, res)
108
+
109
+ // Force chunked encoding. This is required because the verifier will
110
+ // trigger an error *after* the last chunk has been passed through.
111
+ // Because the number of bytes sent will match the content-length, the
112
+ // HTTP response will be considered "complete" by the HTTP server. At
113
+ // this point, only trailers headers could indicate that an error
114
+ // occurred, but that is not the behavior we expect.
115
+ res.removeHeader('content-length')
116
+
117
+ // From this point on, triggering the next middleware (including any
118
+ // error handler) can be problematic because content-type,
119
+ // content-enconding, etc. headers have already been set. Because of
120
+ // this, we make sure that res.headersSent is set to true, preventing
121
+ // another error handler middleware from being called (from the catch
122
+ // block bellow). Not flushing the headers here would require to
123
+ // revert the headers set from this middleware (which we don't do for
124
+ // now).
125
+ res.flushHeaders()
126
+
127
+ // Pipe the verifier output into the HTTP response
128
+ void pipeline([verifier, res]).catch(logError)
129
+ }, 10) // 0 works too. Allow for additional data to come in for 10ms.
130
+
131
+ // Write the upstream response into the verifier.
132
+ return verifier
54
133
  })
55
134
  } catch (err) {
56
- if (err instanceof AxiosError) {
57
- if (err.code === AxiosError.ETIMEDOUT) {
58
- log.warn(
59
- { host: err.request?.host, path: err.request?.path },
60
- 'blob resolution timeout',
61
- )
62
- return next(createError(504)) // Gateway timeout
63
- }
64
- if (!err.response || err.response.status >= 500) {
135
+ if (res.headersSent || res.destroyed) {
136
+ res.destroy()
137
+ } else if (err instanceof VerifyCidError) {
138
+ // @NOTE This only works because of the graceTimer above. It will also
139
+ // only be triggered for small payloads.
140
+ next(createError(404, err.message))
141
+ } else if (isHttpError(err)) {
142
+ next(err)
143
+ } else {
144
+ next(createError(502, 'Upstream Error', { cause: err }))
145
+ }
146
+ }
147
+ }
148
+ }
149
+
150
+ export type StreamBlobOptions = {
151
+ cid: string
152
+ did: string
153
+ acceptEncoding?: string
154
+ signal?: AbortSignal
155
+ }
156
+
157
+ export type StreamBlobFactory = (
158
+ data: Dispatcher.StreamFactoryData,
159
+ info: {
160
+ url: URL
161
+ did: AtprotoDid
162
+ cid: CID
163
+ },
164
+ ) => Writable
165
+
166
+ export async function streamBlob(
167
+ ctx: AppContext,
168
+ options: StreamBlobOptions,
169
+ factory: StreamBlobFactory,
170
+ ) {
171
+ const { did, cid } = parseBlobParams(options)
172
+ const url = await getBlobUrl(ctx.dataplane, did, cid)
173
+
174
+ const headers = getBlobHeaders(ctx.cfg, url)
175
+
176
+ headers.set(
177
+ 'accept-encoding',
178
+ options.acceptEncoding ||
179
+ formatAcceptHeader(
180
+ ctx.cfg.proxyPreferCompressed
181
+ ? ACCEPT_ENCODING_COMPRESSED
182
+ : ACCEPT_ENCODING_UNCOMPRESSED,
183
+ ),
184
+ )
185
+
186
+ let headersReceived = false
187
+
188
+ return ctx.blobDispatcher
189
+ .stream(
190
+ {
191
+ method: 'GET',
192
+ origin: url.origin,
193
+ path: url.pathname + url.search,
194
+ headers,
195
+ signal: options.signal,
196
+ },
197
+ (upstream) => {
198
+ headersReceived = true
199
+
200
+ if (upstream.statusCode !== 200) {
65
201
  log.warn(
66
- { host: err.request?.host, path: err.request?.path },
67
- 'blob resolution failed upstream',
202
+ {
203
+ did,
204
+ cid: cid.toString(),
205
+ pds: url.origin,
206
+ status: upstream.statusCode,
207
+ },
208
+ `blob resolution failed upstream`,
68
209
  )
69
- return next(createError(502))
210
+
211
+ throw upstream.statusCode >= 400 && upstream.statusCode < 500
212
+ ? createError(404, 'Blob not found', { cause: upstream }) // 4xx => 404
213
+ : createError(502, 'Upstream Error', { cause: upstream }) // !200 && !4xx => 502
70
214
  }
71
- return next(createError(404, 'Blob not found'))
72
- }
73
- if (err instanceof DidNotFoundError) {
74
- return next(createError(404, 'Blob not found'))
215
+
216
+ return factory(upstream, { url, did, cid })
217
+ },
218
+ )
219
+ .catch((err) => {
220
+ // Is this a connection error, or a stream error ?
221
+ if (!headersReceived) {
222
+ // connection error, dns error, headers timeout, ...
223
+ log.warn(
224
+ { err, did, cid: cid.toString(), pds: url.origin },
225
+ 'blob resolution failed during connection',
226
+ )
227
+
228
+ throw createError(502, 'Upstream Error', { cause: err })
75
229
  }
76
- return next(err)
77
- }
78
- })
79
230
 
80
- return router
231
+ throw err
232
+ })
233
+ }
234
+
235
+ function parseBlobParams(params: { cid: string; did: string }) {
236
+ const { cid, did } = params
237
+ if (!isAtprotoDid(did)) throw createError(400, 'Invalid did')
238
+ const cidObj = parseCid(cid)
239
+ if (!cidObj) throw createError(400, 'Invalid cid')
240
+ return { cid: cidObj, did }
81
241
  }
82
242
 
83
- export async function resolveBlob(ctx: AppContext, did: string, cid: CID) {
84
- const cidStr = cid.toString()
243
+ async function getBlobUrl(
244
+ dataplane: DataPlaneClient,
245
+ did: string,
246
+ cid: CID,
247
+ ): Promise<URL> {
248
+ const pds = await getBlobPds(dataplane, did, cid)
249
+
250
+ const url = new URL(`/xrpc/com.atproto.sync.getBlob`, pds)
251
+ url.searchParams.set('did', did)
252
+ url.searchParams.set('cid', cid.toString())
253
+
254
+ return url
255
+ }
85
256
 
257
+ async function getBlobPds(
258
+ dataplane: DataPlaneClient,
259
+ did: string,
260
+ cid: CID,
261
+ ): Promise<string> {
86
262
  const [identity, { takenDown }] = await Promise.all([
87
- ctx.dataplane.getIdentityByDid({ did }).catch((err) => {
263
+ dataplane.getIdentityByDid({ did }).catch((err) => {
88
264
  if (isDataplaneError(err, Code.NotFound)) {
89
265
  return undefined
90
266
  }
91
267
  throw err
92
268
  }),
93
- ctx.dataplane.getBlobTakedown({ did, cid: cid.toString() }),
269
+ dataplane.getBlobTakedown({ did, cid: cid.toString() }),
94
270
  ])
271
+
272
+ if (takenDown) {
273
+ throw createError(404, 'Blob not found')
274
+ }
275
+
95
276
  const services = identity && unpackIdentityServices(identity.services)
96
277
  const pds =
97
278
  services &&
@@ -99,62 +280,116 @@ export async function resolveBlob(ctx: AppContext, did: string, cid: CID) {
99
280
  id: 'atproto_pds',
100
281
  type: 'AtprotoPersonalDataServer',
101
282
  })
283
+
102
284
  if (!pds) {
103
285
  throw createError(404, 'Origin not found')
104
286
  }
105
- if (takenDown) {
106
- throw createError(404, 'Blob not found')
107
- }
108
287
 
109
- const blobResult = await retryHttp(() =>
110
- getBlob(ctx, { pds, did, cid: cidStr }),
111
- )
112
- const imageStream: Readable = blobResult.data
113
- const verifyCid = new VerifyCidTransform(cid)
114
-
115
- forwardStreamErrors(imageStream, verifyCid)
116
- return {
117
- pds,
118
- contentType:
119
- blobResult.headers['content-type'] || 'application/octet-stream',
120
- stream: imageStream.pipe(verifyCid),
121
- }
122
- }
123
-
124
- async function getBlob(
125
- ctx: AppContext,
126
- opts: { pds: string; did: string; cid: string },
127
- ) {
128
- const { pds, did, cid } = opts
129
- return axios.get(`${pds}/xrpc/com.atproto.sync.getBlob`, {
130
- params: { did, cid },
131
- decompress: true,
132
- responseType: 'stream',
133
- timeout: 5000, // 5sec of inactivity on the connection
134
- headers: getRateLimitBypassHeaders(ctx, pds),
135
- })
288
+ return pds
136
289
  }
137
290
 
138
- function getRateLimitBypassHeaders(
139
- ctx: AppContext,
140
- pds: string,
141
- ): { 'x-ratelimit-bypass'?: string } {
142
- const {
291
+ function getBlobHeaders(
292
+ {
143
293
  blobRateLimitBypassKey: bypassKey,
144
294
  blobRateLimitBypassHostname: bypassHostname,
145
- } = ctx.cfg
146
- if (!bypassKey || !bypassHostname) {
147
- return {}
148
- }
149
- const url = new URL(pds)
150
- if (bypassHostname.startsWith('.')) {
151
- if (url.hostname.endsWith(bypassHostname)) {
152
- return { 'x-ratelimit-bypass': bypassKey }
153
- }
154
- } else {
155
- if (url.hostname === bypassHostname) {
156
- return { 'x-ratelimit-bypass': bypassKey }
295
+ }: ServerConfig,
296
+ url: URL,
297
+ ): Map<string, string> {
298
+ const headers = new Map<string, string>()
299
+
300
+ if (bypassKey && bypassHostname) {
301
+ const matchesUrl = bypassHostname.startsWith('.')
302
+ ? url.hostname.endsWith(bypassHostname)
303
+ : url.hostname === bypassHostname
304
+
305
+ if (matchesUrl) {
306
+ headers.set('x-ratelimit-bypass', bypassKey)
157
307
  }
158
308
  }
159
- return {}
309
+
310
+ return headers
311
+ }
312
+
313
+ /**
314
+ * This function creates a passthrough stream that will decompress (if needed)
315
+ * and verify the CID of the input stream. The output data will be identical to
316
+ * the input data.
317
+ *
318
+ * If you need the un-compressed data, you should use a decompress + verify
319
+ * pipeline instead.
320
+ */
321
+ function createCidVerifier(cid: CID, encoding?: string | string[]): Duplex {
322
+ // If the upstream content is compressed, we do not want to return a
323
+ // de-compressed stream here. Indeed, the "compression" middleware will
324
+ // compress the response before it is sent downstream, if it is not already
325
+ // compressed. Because of this, it is preferable to return the content as-is
326
+ // to avoid re-compressing it.
327
+ //
328
+ // We do still want to be able to verify the CID, which requires decompressing
329
+ // the input bytes.
330
+ //
331
+ // To that end, we create a passthrough in order to "tee" the stream into two
332
+ // streams: one that will be sent, unaltered, downstream, and a pipeline that
333
+ // will be used to decompress & verify the CID (discarding de-compressed
334
+ // data).
335
+
336
+ const decoders = createDecoders(encoding)
337
+ const verifier = new VerifyCidTransform(cid)
338
+
339
+ // Optimization: If the content is not compressed, we don't need to "tee" the
340
+ // stream, we can use the verifier as simple passthrough.
341
+ if (!decoders.length) return verifier
342
+
343
+ const pipelineController = new AbortController()
344
+ const pipelineStreams: Duplex[] = [...decoders, verifier]
345
+ const pipelineInput = pipelineStreams[0]!
346
+
347
+ // Create a promise that will resolve if, and only if, the decoding and
348
+ // verification succeed.
349
+ const pipelinePromise: Promise<null | Error> = pipeline(pipelineStreams, {
350
+ signal: pipelineController.signal,
351
+ }).then(
352
+ () => null,
353
+ (err) => {
354
+ const error = asError(err)
355
+
356
+ // the data being processed by the pipeline is invalid (e.g. invalid
357
+ // compressed content, non-matching the CID, ...). If that occurs, we can
358
+ // destroy the passthrough (this allows not to wait for the "flush" event
359
+ // to propagate the error).
360
+ passthrough.destroy(error)
361
+
362
+ return error
363
+ },
364
+ )
365
+
366
+ // We don't care about the un-compressed data, we only use the verifier to
367
+ // detect any error through the pipelinePromise. We still need to pass the
368
+ // verifier into flowing mode to ensure that the pipelinePromise resolves.
369
+ verifier.resume()
370
+
371
+ const passthrough = new Transform({
372
+ transform(chunk, encoding, callback) {
373
+ pipelineInput.write(chunk, encoding)
374
+ callback(null, chunk)
375
+ },
376
+ flush(callback) {
377
+ // End the input stream, which will resolve the pipeline promise
378
+ pipelineInput.end()
379
+ // End the pass-through stream according to the result of the pipeline
380
+ pipelinePromise.then(callback)
381
+ },
382
+ destroy(err, callback) {
383
+ pipelineController.abort() // Causes pipeline() to destroy all streams
384
+ callback(err)
385
+ },
386
+ })
387
+
388
+ return passthrough
389
+ }
390
+
391
+ function asError(err: unknown): Error {
392
+ return err instanceof Error
393
+ ? err
394
+ : new Error('Processing failed', { cause: err })
160
395
  }
@@ -4,31 +4,38 @@ import AppContext from '../context'
4
4
  export const createRouter = (ctx: AppContext): express.Router => {
5
5
  const router = express.Router()
6
6
 
7
- router.get('/.well-known/did.json', (_req, res) => {
8
- const hostname = ctx.cfg.publicUrl && new URL(ctx.cfg.publicUrl).hostname
9
- if (!hostname || ctx.cfg.serverDid !== `did:web:${hostname}`) {
10
- return res.sendStatus(404)
11
- }
12
- res.json({
13
- '@context': ['https://www.w3.org/ns/did/v1'],
14
- id: ctx.cfg.serverDid,
15
- verificationMethod: [
16
- {
17
- id: `${ctx.cfg.serverDid}#atproto`,
18
- type: 'Multikey',
19
- controller: ctx.cfg.serverDid,
20
- publicKeyMultibase: ctx.signingKey.did().replace('did:key:', ''),
21
- },
22
- ],
23
- service: [
24
- {
25
- id: '#bsky_notif',
26
- type: 'BskyNotificationService',
27
- serviceEndpoint: `https://${hostname}`,
28
- },
29
- ],
7
+ const did = ctx.cfg.serverDid
8
+ if (did.startsWith('did:web:')) {
9
+ const hostname = did.slice('did:web:'.length)
10
+ const serviceEndpoint = `https://${hostname}`
11
+
12
+ router.get('/.well-known/did.json', (_req, res) => {
13
+ res.json({
14
+ '@context': ['https://www.w3.org/ns/did/v1'],
15
+ id: did,
16
+ verificationMethod: [
17
+ {
18
+ id: `${did}#atproto`,
19
+ type: 'Multikey',
20
+ controller: did,
21
+ publicKeyMultibase: ctx.signingKey.did().replace('did:key:', ''),
22
+ },
23
+ ],
24
+ service: [
25
+ {
26
+ id: '#bsky_notif',
27
+ type: 'BskyNotificationService',
28
+ serviceEndpoint,
29
+ },
30
+ {
31
+ id: '#bsky_appview',
32
+ type: 'BskyAppView',
33
+ serviceEndpoint,
34
+ },
35
+ ],
36
+ })
30
37
  })
31
- })
38
+ }
32
39
 
33
40
  return router
34
41
  }
package/src/config.ts CHANGED
@@ -50,6 +50,14 @@ export interface ServerConfigValues {
50
50
  // client config
51
51
  clientCheckEmailConfirmed?: boolean
52
52
  topicsEnabled?: boolean
53
+ // http proxy agent
54
+ disableSsrfProtection?: boolean
55
+ proxyAllowHTTP2?: boolean
56
+ proxyHeadersTimeout?: number
57
+ proxyBodyTimeout?: number
58
+ proxyMaxResponseSize?: number
59
+ proxyMaxRetries?: number
60
+ proxyPreferCompressed?: boolean
53
61
  }
54
62
 
55
63
  export class ServerConfig {
@@ -58,7 +66,9 @@ export class ServerConfig {
58
66
 
59
67
  static readEnv(overrides?: Partial<ServerConfigValues>) {
60
68
  const version = process.env.BSKY_VERSION || undefined
61
- const debugMode = process.env.NODE_ENV !== 'production'
69
+ const debugMode =
70
+ // Because security related features are disabled in development mode, this requires explicit opt-in.
71
+ process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'test'
62
72
  const publicUrl = process.env.BSKY_PUBLIC_URL || undefined
63
73
  const serverDid = process.env.BSKY_SERVER_DID || 'did:example:test'
64
74
  const envPort = parseInt(process.env.BSKY_PORT || '', 10)
@@ -150,6 +160,23 @@ export class ServerConfig {
150
160
  const maxThreadDepth = process.env.BSKY_MAX_THREAD_DEPTH
151
161
  ? parseInt(process.env.BSKY_MAX_THREAD_DEPTH || '', 10)
152
162
  : undefined
163
+
164
+ const disableSsrfProtection = process.env.BSKY_DISABLE_SSRF_PROTECTION
165
+ ? process.env.BSKY_DISABLE_SSRF_PROTECTION === 'true'
166
+ : debugMode
167
+
168
+ const proxyAllowHTTP2 = process.env.BSKY_PROXY_ALLOW_HTTP2 === 'true'
169
+ const proxyHeadersTimeout =
170
+ parseInt(process.env.BSKY_PROXY_HEADERS_TIMEOUT || '', 10) || undefined
171
+ const proxyBodyTimeout =
172
+ parseInt(process.env.BSKY_PROXY_BODY_TIMEOUT || '', 10) || undefined
173
+ const proxyMaxResponseSize =
174
+ parseInt(process.env.BSKY_PROXY_MAX_RESPONSE_SIZE || '', 10) || undefined
175
+ const proxyMaxRetries =
176
+ parseInt(process.env.BSKY_PROXY_MAX_RETRIES || '', 10) || undefined
177
+ const proxyPreferCompressed =
178
+ process.env.BSKY_PROXY_PREFER_COMPRESSED === 'true'
179
+
153
180
  return new ServerConfig({
154
181
  version,
155
182
  debugMode,
@@ -193,6 +220,13 @@ export class ServerConfig {
193
220
  bigThreadUris,
194
221
  bigThreadDepth,
195
222
  maxThreadDepth,
223
+ disableSsrfProtection,
224
+ proxyAllowHTTP2,
225
+ proxyHeadersTimeout,
226
+ proxyBodyTimeout,
227
+ proxyMaxResponseSize,
228
+ proxyMaxRetries,
229
+ proxyPreferCompressed,
196
230
  ...stripUndefineds(overrides ?? {}),
197
231
  })
198
232
  }
@@ -217,11 +251,6 @@ export class ServerConfig {
217
251
  return this.assignedPort || this.cfg.port
218
252
  }
219
253
 
220
- get localUrl() {
221
- assert(this.port, 'No port assigned')
222
- return `http://localhost:${this.port}`
223
- }
224
-
225
254
  get publicUrl() {
226
255
  return this.cfg.publicUrl
227
256
  }
@@ -377,6 +406,34 @@ export class ServerConfig {
377
406
  get maxThreadDepth() {
378
407
  return this.cfg.maxThreadDepth
379
408
  }
409
+
410
+ get disableSsrfProtection(): boolean {
411
+ return this.cfg.disableSsrfProtection ?? false
412
+ }
413
+
414
+ get proxyAllowHTTP2(): boolean {
415
+ return this.cfg.proxyAllowHTTP2 ?? false
416
+ }
417
+
418
+ get proxyHeadersTimeout(): number {
419
+ return this.cfg.proxyHeadersTimeout ?? 30e3
420
+ }
421
+
422
+ get proxyBodyTimeout(): number {
423
+ return this.cfg.proxyBodyTimeout ?? 30e3
424
+ }
425
+
426
+ get proxyMaxResponseSize(): number {
427
+ return this.cfg.proxyMaxResponseSize ?? 10 * 1024 * 1024 // 10mb
428
+ }
429
+
430
+ get proxyMaxRetries(): number {
431
+ return this.cfg.proxyMaxRetries ?? 3
432
+ }
433
+
434
+ get proxyPreferCompressed(): boolean {
435
+ return this.cfg.proxyPreferCompressed ?? true
436
+ }
380
437
  }
381
438
 
382
439
  function stripUndefineds(