@atproto/pds 0.4.60 → 0.4.61

Sign up to get free protection for your applications and to get access to all the features.
Files changed (93) hide show
  1. package/CHANGELOG.md +17 -0
  2. package/dist/account-manager/helpers/account.d.ts +1 -0
  3. package/dist/account-manager/helpers/account.d.ts.map +1 -1
  4. package/dist/account-manager/helpers/account.js +15 -1
  5. package/dist/account-manager/helpers/account.js.map +1 -1
  6. package/dist/account-manager/helpers/invite.d.ts +1 -1
  7. package/dist/account-manager/helpers/invite.d.ts.map +1 -1
  8. package/dist/account-manager/helpers/invite.js +20 -9
  9. package/dist/account-manager/helpers/invite.js.map +1 -1
  10. package/dist/account-manager/index.d.ts +2 -0
  11. package/dist/account-manager/index.d.ts.map +1 -1
  12. package/dist/account-manager/index.js +8 -1
  13. package/dist/account-manager/index.js.map +1 -1
  14. package/dist/api/com/atproto/admin/getAccountInfo.d.ts.map +1 -1
  15. package/dist/api/com/atproto/admin/getAccountInfo.js +6 -14
  16. package/dist/api/com/atproto/admin/getAccountInfo.js.map +1 -1
  17. package/dist/api/com/atproto/admin/getAccountInfos.d.ts +4 -0
  18. package/dist/api/com/atproto/admin/getAccountInfos.d.ts.map +1 -0
  19. package/dist/api/com/atproto/admin/getAccountInfos.js +32 -0
  20. package/dist/api/com/atproto/admin/getAccountInfos.js.map +1 -0
  21. package/dist/api/com/atproto/admin/index.d.ts.map +1 -1
  22. package/dist/api/com/atproto/admin/index.js +2 -0
  23. package/dist/api/com/atproto/admin/index.js.map +1 -1
  24. package/dist/api/com/atproto/admin/util.d.ts +17 -0
  25. package/dist/api/com/atproto/admin/util.d.ts.map +1 -1
  26. package/dist/api/com/atproto/admin/util.js +27 -1
  27. package/dist/api/com/atproto/admin/util.js.map +1 -1
  28. package/dist/api/com/atproto/repo/getRecord.d.ts.map +1 -1
  29. package/dist/api/com/atproto/repo/getRecord.js +1 -1
  30. package/dist/api/com/atproto/repo/getRecord.js.map +1 -1
  31. package/dist/config/config.d.ts +8 -0
  32. package/dist/config/config.d.ts.map +1 -1
  33. package/dist/config/config.js +1 -0
  34. package/dist/config/config.js.map +1 -1
  35. package/dist/config/env.d.ts +1 -0
  36. package/dist/config/env.d.ts.map +1 -1
  37. package/dist/config/env.js +1 -0
  38. package/dist/config/env.js.map +1 -1
  39. package/dist/lexicon/index.d.ts +4 -0
  40. package/dist/lexicon/index.d.ts.map +1 -1
  41. package/dist/lexicon/index.js +8 -0
  42. package/dist/lexicon/index.js.map +1 -1
  43. package/dist/lexicon/lexicons.d.ts +85 -0
  44. package/dist/lexicon/lexicons.d.ts.map +1 -1
  45. package/dist/lexicon/lexicons.js +93 -0
  46. package/dist/lexicon/lexicons.js.map +1 -1
  47. package/dist/lexicon/types/com/atproto/repo/getRecord.d.ts +1 -0
  48. package/dist/lexicon/types/com/atproto/repo/getRecord.d.ts.map +1 -1
  49. package/dist/lexicon/types/tools/ozone/moderation/getRecords.d.ts +39 -0
  50. package/dist/lexicon/types/tools/ozone/moderation/getRecords.d.ts.map +1 -0
  51. package/dist/lexicon/types/tools/ozone/moderation/getRecords.js +3 -0
  52. package/dist/lexicon/types/tools/ozone/moderation/getRecords.js.map +1 -0
  53. package/dist/lexicon/types/tools/ozone/moderation/getRepos.d.ts +39 -0
  54. package/dist/lexicon/types/tools/ozone/moderation/getRepos.d.ts.map +1 -0
  55. package/dist/lexicon/types/tools/ozone/moderation/getRepos.js +3 -0
  56. package/dist/lexicon/types/tools/ozone/moderation/getRepos.js.map +1 -0
  57. package/dist/mailer/templates/confirm-email.js +1 -1
  58. package/dist/mailer/templates/confirm-email.js.map +1 -1
  59. package/dist/mailer/templates/delete-account.js +1 -1
  60. package/dist/mailer/templates/delete-account.js.map +1 -1
  61. package/dist/mailer/templates/plc-operation.js +1 -1
  62. package/dist/mailer/templates/plc-operation.js.map +1 -1
  63. package/dist/mailer/templates/reset-password.js +1 -1
  64. package/dist/mailer/templates/reset-password.js.map +1 -1
  65. package/dist/mailer/templates/update-email.js +1 -1
  66. package/dist/mailer/templates/update-email.js.map +1 -1
  67. package/dist/pipethrough.d.ts +1 -1
  68. package/dist/pipethrough.d.ts.map +1 -1
  69. package/dist/pipethrough.js +105 -73
  70. package/dist/pipethrough.js.map +1 -1
  71. package/package.json +11 -11
  72. package/src/account-manager/helpers/account.ts +22 -0
  73. package/src/account-manager/helpers/invite.ts +19 -9
  74. package/src/account-manager/index.ts +13 -1
  75. package/src/api/com/atproto/admin/getAccountInfo.ts +6 -13
  76. package/src/api/com/atproto/admin/getAccountInfos.ts +33 -0
  77. package/src/api/com/atproto/admin/index.ts +2 -0
  78. package/src/api/com/atproto/admin/util.ts +38 -0
  79. package/src/api/com/atproto/repo/getRecord.ts +4 -1
  80. package/src/config/config.ts +10 -0
  81. package/src/config/env.ts +2 -0
  82. package/src/lexicon/index.ts +24 -0
  83. package/src/lexicon/lexicons.ts +93 -0
  84. package/src/lexicon/types/com/atproto/repo/getRecord.ts +1 -0
  85. package/src/lexicon/types/tools/ozone/moderation/getRecords.ts +50 -0
  86. package/src/lexicon/types/tools/ozone/moderation/getRepos.ts +50 -0
  87. package/src/mailer/templates/confirm-email.hbs +1 -1
  88. package/src/mailer/templates/delete-account.hbs +1 -1
  89. package/src/mailer/templates/plc-operation.hbs +1 -1
  90. package/src/mailer/templates/reset-password.hbs +1 -1
  91. package/src/mailer/templates/update-email.hbs +1 -1
  92. package/src/pipethrough.ts +131 -92
  93. package/tests/proxied/read-after-write.test.ts +77 -0
@@ -60,7 +60,7 @@ export const proxyHandler = (ctx: AppContext): CatchallHandler => {
60
60
  const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm)
61
61
 
62
62
  const headers: IncomingHttpHeaders = {
63
- 'accept-encoding': req.headers['accept-encoding'],
63
+ 'accept-encoding': req.headers['accept-encoding'] || 'identity',
64
64
  'accept-language': req.headers['accept-language'],
65
65
  'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
66
66
  'x-bsky-topics': req.headers['x-bsky-topics'],
@@ -102,6 +102,20 @@ export const proxyHandler = (ctx: AppContext): CatchallHandler => {
102
102
  }
103
103
  }
104
104
 
105
+ const ACCEPT_ENCODING_COMPRESSED = [
106
+ ['gzip', { q: 1.0 }],
107
+ ['deflate', { q: 0.9 }],
108
+ ['br', { q: 0.8 }],
109
+ ['identity', { q: 0.1 }],
110
+ ] as const satisfies Accept[]
111
+
112
+ const ACCEPT_ENCODING_UNCOMPRESSED = [
113
+ ['identity', { q: 1.0 }],
114
+ ['gzip', { q: 0.3 }],
115
+ ['deflate', { q: 0.2 }],
116
+ ['br', { q: 0.1 }],
117
+ ] as const satisfies Accept[]
118
+
105
119
  export type PipethroughOptions = {
106
120
  /**
107
121
  * Specify the issuer (requester) for service auth. If not provided, no
@@ -122,28 +136,17 @@ export type PipethroughOptions = {
122
136
  lxm?: string
123
137
  }
124
138
 
125
- // List of content encodings that are supported by the PDS. Because proxying
126
- // occurs between data centers, where connectivity is supposedly stable & good,
127
- // and because payloads are small, we prefer encoding that are fast (gzip,
128
- // deflate, identity) over heavier encodings (Brotli). Upstream servers should
129
- // be configured to prefer any encoding over identity in case of big,
130
- // uncompressed payloads.
131
- const SUPPORTED_ENCODINGS = [
132
- ['gzip', { q: '1.0' }],
133
- ['deflate', { q: '0.9' }],
134
- ['identity', { q: '0.3' }],
135
- ['br', { q: '0.1' }],
136
- ] as const satisfies Accept[]
137
-
138
139
  export async function pipethrough(
139
140
  ctx: AppContext,
140
141
  req: express.Request,
141
142
  options?: PipethroughOptions,
142
- ): Promise<{
143
- stream: Readable
144
- headers: Record<string, string>
145
- encoding: string
146
- }> {
143
+ ): Promise<
144
+ HandlerPipeThroughStream & {
145
+ stream: Readable
146
+ headers: Record<string, string>
147
+ encoding: string
148
+ }
149
+ > {
147
150
  if (req.method !== 'GET' && req.method !== 'HEAD') {
148
151
  // pipethrough() is used from within xrpcServer handlers, which means that
149
152
  // the request body either has been parsed or is a readable stream that has
@@ -160,32 +163,31 @@ export async function pipethrough(
160
163
 
161
164
  const { url: origin, did: aud } = await parseProxyInfo(ctx, req, lxm)
162
165
 
163
- // Because we sometimes need to interpret the response (e.g. during
164
- // read-after-write, through asPipeThroughBuffer()), we need to ask the
165
- // upstream server for an encoding that both the requester and the PDS can
166
- // understand.
167
- const acceptEncoding = negotiateAccept(
168
- req.headers['accept-encoding'],
169
- SUPPORTED_ENCODINGS,
170
- )
171
-
172
- const headers: IncomingHttpHeaders = {
173
- 'accept-language': req.headers['accept-language'],
174
- 'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
175
- 'x-bsky-topics': req.headers['x-bsky-topics'],
176
-
177
- 'accept-encoding': `${formatAccepted(acceptEncoding)}, *;q=0`, // Reject anything else (q=0)
178
-
179
- authorization: options?.iss
180
- ? `Bearer ${await ctx.serviceAuthJwt(options.iss, options.aud ?? aud, options.lxm ?? lxm)}`
181
- : undefined,
182
- }
183
-
184
166
  const dispatchOptions: Dispatcher.RequestOptions = {
185
167
  origin,
186
168
  method: req.method,
187
169
  path: req.originalUrl,
188
- headers,
170
+ headers: {
171
+ 'accept-language': req.headers['accept-language'],
172
+ 'atproto-accept-labelers': req.headers['atproto-accept-labelers'],
173
+ 'x-bsky-topics': req.headers['x-bsky-topics'],
174
+
175
+ // Because we sometimes need to interpret the response (e.g. during
176
+ // read-after-write, through asPipeThroughBuffer()), we need to ask the
177
+ // upstream server for an encoding that both the requester and the PDS can
178
+ // understand. Since we might have to do the decoding ourselves, we will
179
+ // use our own preferences (and weight) to negotiate the encoding.
180
+ 'accept-encoding': negotiateContentEncoding(
181
+ req.headers['accept-encoding'],
182
+ ctx.cfg.proxy.preferCompressed
183
+ ? ACCEPT_ENCODING_COMPRESSED
184
+ : ACCEPT_ENCODING_UNCOMPRESSED,
185
+ ),
186
+
187
+ authorization: options?.iss
188
+ ? `Bearer ${await ctx.serviceAuthJwt(options.iss, options.aud ?? aud, options.lxm ?? lxm)}`
189
+ : undefined,
190
+ },
189
191
 
190
192
  // Use a high water mark to buffer more data while performing async
191
193
  // operations before this stream is consumed. This is especially useful
@@ -193,14 +195,13 @@ export async function pipethrough(
193
195
  highWaterMark: 2 * 65536, // twice the default (64KiB)
194
196
  }
195
197
 
196
- const upstream = await pipethroughRequest(ctx, dispatchOptions)
198
+ const { headers, body } = await pipethroughRequest(ctx, dispatchOptions)
197
199
 
198
200
  return {
199
- stream: upstream.body,
200
- headers: Object.fromEntries(responseHeaders(upstream.headers)),
201
- encoding:
202
- safeString(upstream.headers['content-type']) ?? 'application/json',
203
- } satisfies HandlerPipeThroughStream
201
+ encoding: safeString(headers['content-type']) ?? 'application/json',
202
+ headers: Object.fromEntries(responseHeaders(headers)),
203
+ stream: body,
204
+ }
204
205
  }
205
206
 
206
207
  // Request setup/formatting
@@ -367,80 +368,118 @@ function handleUpstreamRequestError(
367
368
  // Request parsing/forwarding
368
369
  // -------------------
369
370
 
370
- type Accept = [name: string, flags: Record<string, string>]
371
+ type AcceptFlags = { q: number }
372
+ type Accept = [name: string, flags: AcceptFlags]
371
373
 
372
- function negotiateAccept(
374
+ // accept-encoding defaults to "identity with lowest priority"
375
+ const ACCEPT_ENC_DEFAULT = ['identity', { q: 0.001 }] as const satisfies Accept
376
+ const ACCEPT_FORBID_STAR = ['*', { q: 0 }] as const satisfies Accept
377
+
378
+ function negotiateContentEncoding(
373
379
  acceptHeader: undefined | string | string[],
374
- supported: readonly Accept[],
375
- ): readonly Accept[] {
376
- // Optimization: if no accept-encoding header is present, skip negotiation
377
- if (!acceptHeader?.length) {
378
- return supported
380
+ preferences: readonly Accept[],
381
+ ): string {
382
+ const acceptMap = Object.fromEntries<undefined | AcceptFlags>(
383
+ parseAcceptEncoding(acceptHeader),
384
+ )
385
+
386
+ // Make sure the default (identity) is covered by the preferences
387
+ if (!preferences.some(coversIdentityAccept)) {
388
+ preferences = [...preferences, ACCEPT_ENC_DEFAULT]
379
389
  }
380
390
 
381
- const acceptNames = extractAcceptedNames(acceptHeader)
382
- const common = acceptNames.includes('*')
383
- ? supported
384
- : supported.filter(nameIncludedIn, acceptNames)
391
+ const common = preferences.filter(([name]) => {
392
+ const acceptQ = (acceptMap[name] ?? acceptMap['*'])?.q
393
+ // Per HTTP/1.1, "identity" is always acceptable unless explicitly rejected
394
+ if (name === 'identity') {
395
+ return acceptQ == null || acceptQ > 0
396
+ } else {
397
+ return acceptQ != null && acceptQ > 0
398
+ }
399
+ })
400
+
401
+ // Since "identity" was present in the preferences, a missing "identity" in
402
+ // the common array means that the client explicitly rejected it. Let's reflect
403
+ // this by adding it to the common array.
404
+ if (!common.some(coversIdentityAccept)) {
405
+ common.push(ACCEPT_FORBID_STAR)
406
+ }
385
407
 
386
- // There must be at least one common encoding with a non-zero q value
387
- if (!common.some(isNotRejected)) {
408
+ // If no common encodings are acceptable, throw a 406 Not Acceptable error
409
+ if (!common.some(isAllowedAccept)) {
388
410
  throw new XRPCServerError(
389
411
  ResponseType.NotAcceptable,
390
412
  'this service does not support any of the requested encodings',
391
413
  )
392
414
  }
393
415
 
394
- return common
416
+ return formatAcceptHeader(common as [Accept, ...Accept[]])
395
417
  }
396
418
 
397
- function formatAccepted(accept: readonly Accept[]): string {
398
- return accept.map(formatEncodingDev).join(', ')
419
+ function coversIdentityAccept([name]: Accept): boolean {
420
+ return name === 'identity' || name === '*'
399
421
  }
400
422
 
401
- function formatEncodingDev([enc, flags]: Accept): string {
402
- let ret = enc
403
- for (const name in flags) ret += `;${name}=${flags[name]}`
404
- return ret
423
+ function isAllowedAccept([, flags]: Accept): boolean {
424
+ return flags.q > 0
405
425
  }
406
426
 
407
- function nameIncludedIn(this: readonly string[], accept: Accept): boolean {
408
- return this.includes(accept[0])
427
+ /**
428
+ * @see {@link https://developer.mozilla.org/en-US/docs/Glossary/Quality_values}
429
+ */
430
+ function formatAcceptHeader(accept: readonly [Accept, ...Accept[]]): string {
431
+ return accept.map(formatAcceptPart).join(',')
409
432
  }
410
433
 
411
- function isNotRejected(accept: Accept): boolean {
412
- return accept[1]['q'] !== '0'
434
+ function formatAcceptPart([name, flags]: Accept): string {
435
+ return `${name};q=${flags.q}`
413
436
  }
414
437
 
415
- function extractAcceptedNames(
416
- acceptHeader: undefined | string | string[],
417
- ): string[] {
418
- if (!acceptHeader?.length) {
419
- return ['*']
420
- }
438
+ function parseAcceptEncoding(
439
+ acceptEncodings: undefined | string | string[],
440
+ ): Accept[] {
441
+ if (!acceptEncodings?.length) return []
421
442
 
422
- return Array.isArray(acceptHeader)
423
- ? acceptHeader.flatMap(extractAcceptedNames)
424
- : acceptHeader.split(',').map(extractAcceptedName).filter(isNonNullable)
443
+ return Array.isArray(acceptEncodings)
444
+ ? acceptEncodings.flatMap(parseAcceptEncoding)
445
+ : acceptEncodings.split(',').map(parseAcceptEncodingDefinition)
425
446
  }
426
447
 
427
- function extractAcceptedName(def: string): string | undefined {
428
- // No need to fully parse since we only care about allowed values
429
- const parts = def.split(';')
430
- if (parts.some(isQzero)) return undefined
431
- return parts[0].trim()
432
- }
448
+ function parseAcceptEncodingDefinition(def: string): Accept {
449
+ const { length, 0: encoding, 1: params } = def.trim().split(';', 3)
433
450
 
434
- function isQzero(def: string): boolean {
435
- return def.trim() === 'q=0'
436
- }
451
+ if (length > 2) {
452
+ throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
453
+ }
454
+
455
+ if (!encoding || encoding.includes('=')) {
456
+ throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
457
+ }
458
+
459
+ const flags = { q: 1 }
460
+ if (length === 2) {
461
+ const { length, 0: key, 1: value } = params.split('=', 3)
462
+ if (length !== 2) {
463
+ throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
464
+ }
465
+
466
+ if (key === 'q' || key === 'Q') {
467
+ const q = parseFloat(value)
468
+ if (q === 0 || (Number.isFinite(q) && q <= 1 && q >= 0.001)) {
469
+ flags.q = q
470
+ } else {
471
+ throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
472
+ }
473
+ } else {
474
+ throw new InvalidRequestError(`Invalid accept-encoding: "${def}"`)
475
+ }
476
+ }
437
477
 
438
- function isNonNullable<T>(val: T): val is NonNullable<T> {
439
- return val != null
478
+ return [encoding.toLowerCase(), flags]
440
479
  }
441
480
 
442
481
  export function isJsonContentType(contentType?: string): boolean | undefined {
443
- if (contentType == null) return undefined
482
+ if (!contentType) return undefined
444
483
  return /application\/(?:\w+\+)?json/i.test(contentType)
445
484
  }
446
485
 
@@ -1,6 +1,7 @@
1
1
  import util from 'node:util'
2
2
  import assert from 'node:assert'
3
3
  import { AtpAgent } from '@atproto/api'
4
+ import { request } from 'undici'
4
5
  import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env'
5
6
  import basicSeed from '../seeds/basic'
6
7
  import { ThreadViewPost } from '../../src/lexicon/types/app/bsky/feed/defs'
@@ -266,4 +267,80 @@ describe('proxy read after write', () => {
266
267
  const parsed = parseInt(lag)
267
268
  expect(parsed > 0).toBe(true)
268
269
  })
270
+
271
+ it('negotiates encoding', async () => {
272
+ const identity = await agent.api.app.bsky.feed.getTimeline(
273
+ {},
274
+ { headers: { ...sc.getHeaders(alice), 'accept-encoding': 'identity' } },
275
+ )
276
+ expect(identity.headers['content-encoding']).toBeUndefined()
277
+
278
+ const gzip = await agent.api.app.bsky.feed.getTimeline(
279
+ {},
280
+ {
281
+ headers: { ...sc.getHeaders(alice), 'accept-encoding': 'gzip, *;q=0' },
282
+ },
283
+ )
284
+ expect(gzip.headers['content-encoding']).toBe('gzip')
285
+ })
286
+
287
+ it('defaults to identity encoding', async () => {
288
+ // Not using the "agent" because "fetch()" will add "accept-encoding: gzip,
289
+ // deflate" if not "accept-encoding" header is provided
290
+ const res = await request(
291
+ new URL(`/xrpc/app.bsky.feed.getTimeline`, agent.dispatchUrl),
292
+ {
293
+ headers: { ...sc.getHeaders(alice) },
294
+ },
295
+ )
296
+ expect(res.statusCode).toBe(200)
297
+ expect(res.headers['content-encoding']).toBeUndefined()
298
+ })
299
+
300
+ it('falls back to identity encoding', async () => {
301
+ const invalid = await agent.api.app.bsky.feed.getTimeline(
302
+ {},
303
+ { headers: { ...sc.getHeaders(alice), 'accept-encoding': 'invalid' } },
304
+ )
305
+
306
+ expect(invalid.headers['content-encoding']).toBeUndefined()
307
+ })
308
+
309
+ it('errors when failing to negotiate encoding', async () => {
310
+ await expect(
311
+ agent.api.app.bsky.feed.getTimeline(
312
+ {},
313
+ {
314
+ headers: {
315
+ ...sc.getHeaders(alice),
316
+ 'accept-encoding': 'invalid, *;q=0',
317
+ },
318
+ },
319
+ ),
320
+ ).rejects.toThrow(
321
+ expect.objectContaining({
322
+ status: 406,
323
+ message: 'this service does not support any of the requested encodings',
324
+ }),
325
+ )
326
+ })
327
+
328
+ it('errors on invalid content-encoding format', async () => {
329
+ await expect(
330
+ agent.api.app.bsky.feed.getTimeline(
331
+ {},
332
+ {
333
+ headers: {
334
+ ...sc.getHeaders(alice),
335
+ 'accept-encoding': ';q=1',
336
+ },
337
+ },
338
+ ),
339
+ ).rejects.toThrow(
340
+ expect.objectContaining({
341
+ status: 400,
342
+ message: 'Invalid accept-encoding: ";q=1"',
343
+ }),
344
+ )
345
+ })
269
346
  })