@atproto/bsky 0.0.81 → 0.0.83
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/api/app/bsky/graph/getSuggestedFollowsByActor.js +1 -3
- package/dist/api/app/bsky/graph/getSuggestedFollowsByActor.js.map +1 -1
- package/dist/auth-verifier.d.ts +6 -0
- package/dist/auth-verifier.d.ts.map +1 -1
- package/dist/auth-verifier.js +80 -1
- package/dist/auth-verifier.js.map +1 -1
- package/dist/config.d.ts +2 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +5 -0
- package/dist/config.js.map +1 -1
- package/dist/data-plane/client.d.ts.map +1 -1
- package/dist/data-plane/client.js +2 -1
- package/dist/data-plane/client.js.map +1 -1
- package/dist/data-plane/server/db/migrations/20240829T211238293Z-simplify-actor-sync.d.ts +4 -0
- package/dist/data-plane/server/db/migrations/20240829T211238293Z-simplify-actor-sync.d.ts.map +1 -0
- package/dist/data-plane/server/db/migrations/20240829T211238293Z-simplify-actor-sync.js +26 -0
- package/dist/data-plane/server/db/migrations/20240829T211238293Z-simplify-actor-sync.js.map +1 -0
- package/dist/data-plane/server/db/migrations/index.d.ts +1 -0
- package/dist/data-plane/server/db/migrations/index.d.ts.map +1 -1
- package/dist/data-plane/server/db/migrations/index.js +2 -1
- package/dist/data-plane/server/db/migrations/index.js.map +1 -1
- package/dist/data-plane/server/db/tables/actor-sync.d.ts +0 -3
- package/dist/data-plane/server/db/tables/actor-sync.d.ts.map +1 -1
- package/dist/data-plane/server/db/tables/actor-sync.js.map +1 -1
- package/dist/data-plane/server/indexing/index.d.ts +2 -7
- package/dist/data-plane/server/indexing/index.d.ts.map +1 -1
- package/dist/data-plane/server/indexing/index.js +4 -21
- package/dist/data-plane/server/indexing/index.js.map +1 -1
- package/dist/data-plane/server/subscription.d.ts +26 -0
- package/dist/data-plane/server/subscription.d.ts.map +1 -0
- package/dist/data-plane/server/subscription.js +115 -0
- package/dist/data-plane/server/subscription.js.map +1 -0
- package/dist/feature-gates.d.ts +5 -1
- package/dist/feature-gates.d.ts.map +1 -1
- package/dist/feature-gates.js +5 -1
- package/dist/feature-gates.js.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -0
- package/dist/index.js.map +1 -1
- package/dist/lexicon/lexicons.d.ts +35 -0
- package/dist/lexicon/lexicons.d.ts.map +1 -1
- package/dist/lexicon/lexicons.js +35 -0
- package/dist/lexicon/lexicons.js.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/defs.d.ts +14 -0
- package/dist/lexicon/types/app/bsky/actor/defs.d.ts.map +1 -1
- package/dist/lexicon/types/app/bsky/actor/defs.js +9 -1
- package/dist/lexicon/types/app/bsky/actor/defs.js.map +1 -1
- package/package.json +10 -8
- package/src/api/app/bsky/graph/getSuggestedFollowsByActor.ts +1 -8
- package/src/auth-verifier.ts +77 -0
- package/src/config.ts +8 -0
- package/src/data-plane/client.ts +4 -1
- package/src/data-plane/server/db/migrations/20240829T211238293Z-simplify-actor-sync.ts +23 -0
- package/src/data-plane/server/db/migrations/index.ts +1 -0
- package/src/data-plane/server/db/tables/actor-sync.ts +0 -3
- package/src/data-plane/server/indexing/index.ts +4 -25
- package/src/data-plane/server/subscription.ts +104 -0
- package/src/feature-gates.ts +5 -1
- package/src/index.ts +5 -1
- package/src/lexicon/lexicons.ts +37 -0
- package/src/lexicon/types/app/bsky/actor/defs.ts +23 -0
- package/tests/data-plane/indexing.test.ts +1 -1
- package/tests/data-plane/{subscription/repo.test.ts → subscription.test.ts} +4 -9
- package/tests/entryway-auth.test.ts +174 -0
- package/tests/views/actor-search.test.ts +1 -1
- package/dist/data-plane/server/subscription/index.d.ts +0 -33
- package/dist/data-plane/server/subscription/index.d.ts.map +0 -1
- package/dist/data-plane/server/subscription/index.js +0 -341
- package/dist/data-plane/server/subscription/index.js.map +0 -1
- package/dist/data-plane/server/subscription/util.d.ts +0 -65
- package/dist/data-plane/server/subscription/util.d.ts.map +0 -1
- package/dist/data-plane/server/subscription/util.js +0 -215
- package/dist/data-plane/server/subscription/util.js.map +0 -1
- package/src/data-plane/server/subscription/index.ts +0 -352
- package/src/data-plane/server/subscription/util.ts +0 -156
- package/tests/data-plane/subscription/util.test.ts +0 -185
package/src/lexicon/lexicons.ts
CHANGED
|
@@ -4572,6 +4572,15 @@ export const schemaDict = {
|
|
|
4572
4572
|
maxLength: 100,
|
|
4573
4573
|
},
|
|
4574
4574
|
},
|
|
4575
|
+
nuxs: {
|
|
4576
|
+
description: 'Storage for NUXs the user has encountered.',
|
|
4577
|
+
type: 'array',
|
|
4578
|
+
maxLength: 100,
|
|
4579
|
+
items: {
|
|
4580
|
+
type: 'ref',
|
|
4581
|
+
ref: 'lex:app.bsky.actor.defs#nux',
|
|
4582
|
+
},
|
|
4583
|
+
},
|
|
4575
4584
|
},
|
|
4576
4585
|
},
|
|
4577
4586
|
bskyAppProgressGuide: {
|
|
@@ -4586,6 +4595,34 @@ export const schemaDict = {
|
|
|
4586
4595
|
},
|
|
4587
4596
|
},
|
|
4588
4597
|
},
|
|
4598
|
+
nux: {
|
|
4599
|
+
type: 'object',
|
|
4600
|
+
description: 'A new user experiences (NUX) storage object',
|
|
4601
|
+
required: ['id', 'completed'],
|
|
4602
|
+
properties: {
|
|
4603
|
+
id: {
|
|
4604
|
+
type: 'string',
|
|
4605
|
+
maxLength: 100,
|
|
4606
|
+
},
|
|
4607
|
+
completed: {
|
|
4608
|
+
type: 'boolean',
|
|
4609
|
+
default: false,
|
|
4610
|
+
},
|
|
4611
|
+
data: {
|
|
4612
|
+
description:
|
|
4613
|
+
'Arbitrary data for the NUX. The structure is defined by the NUX itself. Limited to 300 characters.',
|
|
4614
|
+
type: 'string',
|
|
4615
|
+
maxLength: 3000,
|
|
4616
|
+
maxGraphemes: 300,
|
|
4617
|
+
},
|
|
4618
|
+
expiresAt: {
|
|
4619
|
+
type: 'string',
|
|
4620
|
+
format: 'datetime',
|
|
4621
|
+
description:
|
|
4622
|
+
'The date and time at which the NUX will expire and should be considered completed.',
|
|
4623
|
+
},
|
|
4624
|
+
},
|
|
4625
|
+
},
|
|
4589
4626
|
},
|
|
4590
4627
|
},
|
|
4591
4628
|
AppBskyActorGetPreferences: {
|
|
@@ -469,6 +469,8 @@ export interface BskyAppStatePref {
|
|
|
469
469
|
activeProgressGuide?: BskyAppProgressGuide
|
|
470
470
|
/** An array of tokens which identify nudges (modals, popups, tours, highlight dots) that should be shown to the user. */
|
|
471
471
|
queuedNudges?: string[]
|
|
472
|
+
/** Storage for NUXs the user has encountered. */
|
|
473
|
+
nuxs?: Nux[]
|
|
472
474
|
[k: string]: unknown
|
|
473
475
|
}
|
|
474
476
|
|
|
@@ -501,3 +503,24 @@ export function isBskyAppProgressGuide(v: unknown): v is BskyAppProgressGuide {
|
|
|
501
503
|
export function validateBskyAppProgressGuide(v: unknown): ValidationResult {
|
|
502
504
|
return lexicons.validate('app.bsky.actor.defs#bskyAppProgressGuide', v)
|
|
503
505
|
}
|
|
506
|
+
|
|
507
|
+
/** A new user experiences (NUX) storage object */
|
|
508
|
+
export interface Nux {
|
|
509
|
+
id: string
|
|
510
|
+
completed: boolean
|
|
511
|
+
/** Arbitrary data for the NUX. The structure is defined by the NUX itself. Limited to 300 characters. */
|
|
512
|
+
data?: string
|
|
513
|
+
/** The date and time at which the NUX will expire and should be considered completed. */
|
|
514
|
+
expiresAt?: string
|
|
515
|
+
[k: string]: unknown
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
export function isNux(v: unknown): v is Nux {
|
|
519
|
+
return (
|
|
520
|
+
isObj(v) && hasProp(v, '$type') && v.$type === 'app.bsky.actor.defs#nux'
|
|
521
|
+
)
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
export function validateNux(v: unknown): ValidationResult {
|
|
525
|
+
return lexicons.validate('app.bsky.actor.defs#nux', v)
|
|
526
|
+
}
|
|
@@ -429,7 +429,7 @@ describe('indexing', () => {
|
|
|
429
429
|
|
|
430
430
|
describe('indexRepo', () => {
|
|
431
431
|
beforeAll(async () => {
|
|
432
|
-
network.bsky.sub.
|
|
432
|
+
await network.bsky.sub.restart()
|
|
433
433
|
await basicSeed(sc, false)
|
|
434
434
|
await network.processAll()
|
|
435
435
|
await network.bsky.sub.destroy()
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import { AtpAgent } from '@atproto/api'
|
|
2
2
|
import { cborDecode, cborEncode } from '@atproto/common'
|
|
3
|
-
import { DatabaseSchemaType } from '
|
|
3
|
+
import { DatabaseSchemaType } from '../../src/data-plane/server/db/database-schema'
|
|
4
4
|
import { SeedClient, TestNetwork, basicSeed } from '@atproto/dev-env'
|
|
5
5
|
import { PreparedWrite, sequencer } from '@atproto/pds'
|
|
6
6
|
import { CommitData } from '@atproto/repo'
|
|
7
|
-
import { ids } from '
|
|
8
|
-
import { forSnapshot } from '
|
|
7
|
+
import { ids } from '../../src/lexicon/lexicons'
|
|
8
|
+
import { forSnapshot } from '../_util'
|
|
9
9
|
|
|
10
10
|
type Database = TestNetwork['bsky']['db']
|
|
11
11
|
|
|
@@ -60,12 +60,7 @@ describe('sync', () => {
|
|
|
60
60
|
const originalTableDump = await getTableDump()
|
|
61
61
|
|
|
62
62
|
// Reprocess repos via sync subscription, on top of existing indices
|
|
63
|
-
await network.bsky.sub.
|
|
64
|
-
// Hard reset of state
|
|
65
|
-
network.bsky.sub.cursor = 0
|
|
66
|
-
network.bsky.sub.seenSeq = null
|
|
67
|
-
// Boot streams back up
|
|
68
|
-
network.bsky.sub.run()
|
|
63
|
+
await network.bsky.sub.restart()
|
|
69
64
|
await network.processAll()
|
|
70
65
|
|
|
71
66
|
// Permissive of indexedAt times changing
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
import * as nodeCrypto from 'node:crypto'
|
|
2
|
+
import KeyEncoder from 'key-encoder'
|
|
3
|
+
import * as ui8 from 'uint8arrays'
|
|
4
|
+
import * as jose from 'jose'
|
|
5
|
+
import * as crypto from '@atproto/crypto'
|
|
6
|
+
import { AtpAgent, AtUri } from '@atproto/api'
|
|
7
|
+
import { basicSeed, SeedClient, TestNetwork } from '@atproto/dev-env'
|
|
8
|
+
import assert from 'node:assert'
|
|
9
|
+
import { MINUTE } from '@atproto/common'
|
|
10
|
+
|
|
11
|
+
const keyEncoder = new KeyEncoder('secp256k1')
|
|
12
|
+
|
|
13
|
+
const derivePrivKey = async (
|
|
14
|
+
keypair: crypto.ExportableKeypair,
|
|
15
|
+
): Promise<nodeCrypto.KeyObject> => {
|
|
16
|
+
const privKeyRaw = await keypair.export()
|
|
17
|
+
const privKeyEncoded = keyEncoder.encodePrivate(
|
|
18
|
+
ui8.toString(privKeyRaw, 'hex'),
|
|
19
|
+
'raw',
|
|
20
|
+
'pem',
|
|
21
|
+
)
|
|
22
|
+
return nodeCrypto.createPrivateKey(privKeyEncoded)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// @NOTE temporary measure, see note on entrywaySession in bsky/src/auth-verifier.ts
|
|
26
|
+
describe('entryway auth', () => {
|
|
27
|
+
let network: TestNetwork
|
|
28
|
+
let agent: AtpAgent
|
|
29
|
+
let sc: SeedClient
|
|
30
|
+
let alice: string
|
|
31
|
+
let jwtPrivKey: nodeCrypto.KeyObject
|
|
32
|
+
|
|
33
|
+
beforeAll(async () => {
|
|
34
|
+
const keypair = await crypto.Secp256k1Keypair.create({ exportable: true })
|
|
35
|
+
jwtPrivKey = await derivePrivKey(keypair)
|
|
36
|
+
const entrywayJwtPublicKeyHex = ui8.toString(
|
|
37
|
+
keypair.publicKeyBytes(),
|
|
38
|
+
'hex',
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
network = await TestNetwork.create({
|
|
42
|
+
dbPostgresSchema: 'bsky_entryway_auth',
|
|
43
|
+
bsky: {
|
|
44
|
+
entrywayJwtPublicKeyHex,
|
|
45
|
+
},
|
|
46
|
+
})
|
|
47
|
+
agent = network.bsky.getClient()
|
|
48
|
+
sc = network.getSeedClient()
|
|
49
|
+
await basicSeed(sc)
|
|
50
|
+
await network.processAll()
|
|
51
|
+
alice = sc.dids.alice
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
afterAll(async () => {
|
|
55
|
+
await network.close()
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
it('works', async () => {
|
|
59
|
+
const signer = new jose.SignJWT({ scope: 'com.atproto.access' })
|
|
60
|
+
.setSubject(alice)
|
|
61
|
+
.setIssuedAt()
|
|
62
|
+
.setExpirationTime('60mins')
|
|
63
|
+
.setAudience('did:web:fake.server.bsky.network')
|
|
64
|
+
.setProtectedHeader({
|
|
65
|
+
typ: 'at+jwt', // https://www.rfc-editor.org/rfc/rfc9068.html
|
|
66
|
+
alg: 'ES256K',
|
|
67
|
+
})
|
|
68
|
+
const token = await signer.sign(jwtPrivKey)
|
|
69
|
+
const res = await agent.app.bsky.actor.getProfile(
|
|
70
|
+
{ actor: sc.dids.bob },
|
|
71
|
+
{ headers: { authorization: `Bearer ${token}` } },
|
|
72
|
+
)
|
|
73
|
+
expect(res.data.did).toEqual(sc.dids.bob)
|
|
74
|
+
// ensure this request is personalized for alice
|
|
75
|
+
const followingUri = res.data.viewer?.following
|
|
76
|
+
assert(followingUri)
|
|
77
|
+
const parsed = new AtUri(followingUri)
|
|
78
|
+
expect(parsed.hostname).toEqual(alice)
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
it('does not work on bad scopes', async () => {
|
|
82
|
+
const signer = new jose.SignJWT({ scope: 'com.atproto.refresh' })
|
|
83
|
+
.setSubject(alice)
|
|
84
|
+
.setIssuedAt()
|
|
85
|
+
.setExpirationTime('60mins')
|
|
86
|
+
.setAudience('did:web:fake.server.bsky.network')
|
|
87
|
+
.setProtectedHeader({
|
|
88
|
+
typ: 'at+jwt', // https://www.rfc-editor.org/rfc/rfc9068.html
|
|
89
|
+
alg: 'ES256K',
|
|
90
|
+
})
|
|
91
|
+
const token = await signer.sign(jwtPrivKey)
|
|
92
|
+
const attempt = agent.app.bsky.actor.getProfile(
|
|
93
|
+
{ actor: sc.dids.bob },
|
|
94
|
+
{ headers: { authorization: `Bearer ${token}` } },
|
|
95
|
+
)
|
|
96
|
+
await expect(attempt).rejects.toThrow('Bad token scope')
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
it('does not work on expired tokens', async () => {
|
|
100
|
+
const time = Math.floor((Date.now() - 5 * MINUTE) / 1000)
|
|
101
|
+
const signer = new jose.SignJWT({ scope: 'com.atproto.access' })
|
|
102
|
+
.setSubject(alice)
|
|
103
|
+
.setIssuedAt()
|
|
104
|
+
.setExpirationTime(time)
|
|
105
|
+
.setAudience('did:web:fake.server.bsky.network')
|
|
106
|
+
.setProtectedHeader({
|
|
107
|
+
typ: 'at+jwt', // https://www.rfc-editor.org/rfc/rfc9068.html
|
|
108
|
+
alg: 'ES256K',
|
|
109
|
+
})
|
|
110
|
+
const token = await signer.sign(jwtPrivKey)
|
|
111
|
+
const attempt = agent.app.bsky.actor.getProfile(
|
|
112
|
+
{ actor: sc.dids.bob },
|
|
113
|
+
{ headers: { authorization: `Bearer ${token}` } },
|
|
114
|
+
)
|
|
115
|
+
await expect(attempt).rejects.toThrow('Token has expired')
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
it('does not work on bad auds', async () => {
|
|
119
|
+
const signer = new jose.SignJWT({ scope: 'com.atproto.access' })
|
|
120
|
+
.setSubject(alice)
|
|
121
|
+
.setIssuedAt()
|
|
122
|
+
.setExpirationTime('60mins')
|
|
123
|
+
.setAudience('did:web:my.personal.pds.com')
|
|
124
|
+
.setProtectedHeader({
|
|
125
|
+
typ: 'at+jwt', // https://www.rfc-editor.org/rfc/rfc9068.html
|
|
126
|
+
alg: 'ES256K',
|
|
127
|
+
})
|
|
128
|
+
const token = await signer.sign(jwtPrivKey)
|
|
129
|
+
const attempt = agent.app.bsky.actor.getProfile(
|
|
130
|
+
{ actor: sc.dids.bob },
|
|
131
|
+
{ headers: { authorization: `Bearer ${token}` } },
|
|
132
|
+
)
|
|
133
|
+
await expect(attempt).rejects.toThrow('Bad token aud')
|
|
134
|
+
})
|
|
135
|
+
|
|
136
|
+
it('does not work with bad signatures', async () => {
|
|
137
|
+
const fakeKey = await crypto.Secp256k1Keypair.create({ exportable: true })
|
|
138
|
+
const fakeJwtKey = await derivePrivKey(fakeKey)
|
|
139
|
+
const signer = new jose.SignJWT({ scope: 'com.atproto.access' })
|
|
140
|
+
.setSubject(alice)
|
|
141
|
+
.setIssuedAt()
|
|
142
|
+
.setExpirationTime('60mins')
|
|
143
|
+
.setAudience('did:web:my.personal.pds.com')
|
|
144
|
+
.setProtectedHeader({
|
|
145
|
+
typ: 'at+jwt', // https://www.rfc-editor.org/rfc/rfc9068.html
|
|
146
|
+
alg: 'ES256K',
|
|
147
|
+
})
|
|
148
|
+
const token = await signer.sign(fakeJwtKey)
|
|
149
|
+
const attempt = agent.app.bsky.actor.getProfile(
|
|
150
|
+
{ actor: sc.dids.bob },
|
|
151
|
+
{ headers: { authorization: `Bearer ${token}` } },
|
|
152
|
+
)
|
|
153
|
+
await expect(attempt).rejects.toThrow('Token could not be verified')
|
|
154
|
+
})
|
|
155
|
+
|
|
156
|
+
it('does not work on flexible aud routes', async () => {
|
|
157
|
+
const signer = new jose.SignJWT({ scope: 'com.atproto.access' })
|
|
158
|
+
.setSubject(alice)
|
|
159
|
+
.setIssuedAt()
|
|
160
|
+
.setExpirationTime('60mins')
|
|
161
|
+
.setAudience('did:web:fake.server.bsky.network')
|
|
162
|
+
.setProtectedHeader({
|
|
163
|
+
typ: 'at+jwt', // https://www.rfc-editor.org/rfc/rfc9068.html
|
|
164
|
+
alg: 'ES256K',
|
|
165
|
+
})
|
|
166
|
+
const token = await signer.sign(jwtPrivKey)
|
|
167
|
+
const feedUri = AtUri.make(alice, 'app.bsky.feed.generator', 'fake-feed')
|
|
168
|
+
const attempt = agent.app.bsky.feed.getFeed(
|
|
169
|
+
{ feed: feedUri.toString() },
|
|
170
|
+
{ headers: { authorization: `Bearer ${token}` } },
|
|
171
|
+
)
|
|
172
|
+
await expect(attempt).rejects.toThrow('Malformed token')
|
|
173
|
+
})
|
|
174
|
+
})
|
|
@@ -39,7 +39,7 @@ describe.skip('pds actor search views', () => {
|
|
|
39
39
|
.execute()
|
|
40
40
|
|
|
41
41
|
// Process remaining profiles
|
|
42
|
-
network.bsky.sub.
|
|
42
|
+
await network.bsky.sub.restart()
|
|
43
43
|
await network.processAll(50000)
|
|
44
44
|
headers = await network.serviceHeaders(
|
|
45
45
|
Object.values(sc.dids)[0],
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
import { IdResolver } from '@atproto/identity';
|
|
3
|
-
import { IndexingService } from '../indexing';
|
|
4
|
-
import { Database } from '../db';
|
|
5
|
-
import { ConsecutiveList, PartitionedQueue } from './util';
|
|
6
|
-
import { BackgroundQueue } from '../background';
|
|
7
|
-
export declare class RepoSubscription {
|
|
8
|
-
private opts;
|
|
9
|
-
ac: AbortController;
|
|
10
|
-
running: Promise<void> | undefined;
|
|
11
|
-
cursor: number;
|
|
12
|
-
seenSeq: number | null;
|
|
13
|
-
repoQueue: PartitionedQueue;
|
|
14
|
-
consecutive: ConsecutiveList<number>;
|
|
15
|
-
background: BackgroundQueue;
|
|
16
|
-
indexingSvc: IndexingService;
|
|
17
|
-
constructor(opts: {
|
|
18
|
-
service: string;
|
|
19
|
-
db: Database;
|
|
20
|
-
idResolver: IdResolver;
|
|
21
|
-
background: BackgroundQueue;
|
|
22
|
-
});
|
|
23
|
-
run(): void;
|
|
24
|
-
private process;
|
|
25
|
-
private handleMessage;
|
|
26
|
-
private handleCommit;
|
|
27
|
-
private handleUpdateHandle;
|
|
28
|
-
private handleIdentityEvt;
|
|
29
|
-
private handleAccountEvt;
|
|
30
|
-
private getSubscription;
|
|
31
|
-
destroy(): Promise<void>;
|
|
32
|
-
}
|
|
33
|
-
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/data-plane/server/subscription/index.ts"],"names":[],"mappings":";AAMA,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAA;AAY9C,OAAO,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAA;AAChC,OAAO,EAEL,eAAe,EACf,gBAAgB,EAGjB,MAAM,QAAQ,CAAA;AACf,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAA;AAE/C,qBAAa,gBAAgB;IAWzB,OAAO,CAAC,IAAI;IAVd,EAAE,kBAAwB;IAC1B,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAA;IAClC,MAAM,SAAI;IACV,OAAO,EAAE,MAAM,GAAG,IAAI,CAAO;IAC7B,SAAS,mBAAkD;IAC3D,WAAW,0BAAgC;IAC3C,UAAU,EAAE,eAAe,CAAA;IAC3B,WAAW,EAAE,eAAe,CAAA;gBAGlB,IAAI,EAAE;QACZ,OAAO,EAAE,MAAM,CAAA;QACf,EAAE,EAAE,QAAQ,CAAA;QACZ,UAAU,EAAE,UAAU,CAAA;QACtB,UAAU,EAAE,eAAe,CAAA;KAC5B;IAUH,GAAG;YAgBW,OAAO;YAqBP,aAAa;YAqCb,YAAY;YAgEZ,kBAAkB;YAIlB,iBAAiB;YAIjB,gBAAgB;IAQ9B,OAAO,CAAC,eAAe;IAkCjB,OAAO;CAMd"}
|
|
@@ -1,341 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
-
};
|
|
28
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
exports.RepoSubscription = void 0;
|
|
30
|
-
const node_assert_1 = __importDefault(require("node:assert"));
|
|
31
|
-
const syntax_1 = require("@atproto/syntax");
|
|
32
|
-
const xrpc_server_1 = require("@atproto/xrpc-server");
|
|
33
|
-
const common_1 = require("@atproto/common");
|
|
34
|
-
const lexicon_1 = require("@atproto/lexicon");
|
|
35
|
-
const repo_1 = require("@atproto/repo");
|
|
36
|
-
const lexicons_1 = require("../../../lexicon/lexicons");
|
|
37
|
-
const message = __importStar(require("../../../lexicon/types/com/atproto/sync/subscribeRepos"));
|
|
38
|
-
const logger_1 = require("../../../logger");
|
|
39
|
-
const indexing_1 = require("../indexing");
|
|
40
|
-
const util_1 = require("./util");
|
|
41
|
-
const background_1 = require("../background");
|
|
42
|
-
class RepoSubscription {
|
|
43
|
-
constructor(opts) {
|
|
44
|
-
Object.defineProperty(this, "opts", {
|
|
45
|
-
enumerable: true,
|
|
46
|
-
configurable: true,
|
|
47
|
-
writable: true,
|
|
48
|
-
value: opts
|
|
49
|
-
});
|
|
50
|
-
Object.defineProperty(this, "ac", {
|
|
51
|
-
enumerable: true,
|
|
52
|
-
configurable: true,
|
|
53
|
-
writable: true,
|
|
54
|
-
value: new AbortController()
|
|
55
|
-
});
|
|
56
|
-
Object.defineProperty(this, "running", {
|
|
57
|
-
enumerable: true,
|
|
58
|
-
configurable: true,
|
|
59
|
-
writable: true,
|
|
60
|
-
value: void 0
|
|
61
|
-
});
|
|
62
|
-
Object.defineProperty(this, "cursor", {
|
|
63
|
-
enumerable: true,
|
|
64
|
-
configurable: true,
|
|
65
|
-
writable: true,
|
|
66
|
-
value: 0
|
|
67
|
-
});
|
|
68
|
-
Object.defineProperty(this, "seenSeq", {
|
|
69
|
-
enumerable: true,
|
|
70
|
-
configurable: true,
|
|
71
|
-
writable: true,
|
|
72
|
-
value: null
|
|
73
|
-
});
|
|
74
|
-
Object.defineProperty(this, "repoQueue", {
|
|
75
|
-
enumerable: true,
|
|
76
|
-
configurable: true,
|
|
77
|
-
writable: true,
|
|
78
|
-
value: new util_1.PartitionedQueue({ concurrency: Infinity })
|
|
79
|
-
});
|
|
80
|
-
Object.defineProperty(this, "consecutive", {
|
|
81
|
-
enumerable: true,
|
|
82
|
-
configurable: true,
|
|
83
|
-
writable: true,
|
|
84
|
-
value: new util_1.ConsecutiveList()
|
|
85
|
-
});
|
|
86
|
-
Object.defineProperty(this, "background", {
|
|
87
|
-
enumerable: true,
|
|
88
|
-
configurable: true,
|
|
89
|
-
writable: true,
|
|
90
|
-
value: void 0
|
|
91
|
-
});
|
|
92
|
-
Object.defineProperty(this, "indexingSvc", {
|
|
93
|
-
enumerable: true,
|
|
94
|
-
configurable: true,
|
|
95
|
-
writable: true,
|
|
96
|
-
value: void 0
|
|
97
|
-
});
|
|
98
|
-
this.background = new background_1.BackgroundQueue(this.opts.db);
|
|
99
|
-
this.indexingSvc = new indexing_1.IndexingService(this.opts.db, this.opts.idResolver, this.background);
|
|
100
|
-
}
|
|
101
|
-
run() {
|
|
102
|
-
if (this.running)
|
|
103
|
-
return;
|
|
104
|
-
this.ac = new AbortController();
|
|
105
|
-
this.repoQueue = new util_1.PartitionedQueue({ concurrency: Infinity });
|
|
106
|
-
this.consecutive = new util_1.ConsecutiveList();
|
|
107
|
-
this.running = this.process()
|
|
108
|
-
.catch((err) => {
|
|
109
|
-
if (err.name !== 'AbortError') {
|
|
110
|
-
// allow this to cause an unhandled rejection, let deployment handle the crash.
|
|
111
|
-
logger_1.subLogger.error({ err }, 'subscription crashed');
|
|
112
|
-
throw err;
|
|
113
|
-
}
|
|
114
|
-
})
|
|
115
|
-
.finally(() => (this.running = undefined));
|
|
116
|
-
}
|
|
117
|
-
async process() {
|
|
118
|
-
const sub = this.getSubscription();
|
|
119
|
-
for await (const msg of sub) {
|
|
120
|
-
const details = getMessageDetails(msg);
|
|
121
|
-
if ('info' in details) {
|
|
122
|
-
// These messages are not sequenced, we just log them and carry on
|
|
123
|
-
logger_1.subLogger.warn({ provider: this.opts.service, message: (0, util_1.loggableMessage)(msg) }, `sub ${details.info ? 'info' : 'unknown'} message`);
|
|
124
|
-
continue;
|
|
125
|
-
}
|
|
126
|
-
const item = this.consecutive.push(details.seq);
|
|
127
|
-
this.repoQueue.add(details.repo, async () => {
|
|
128
|
-
await this.handleMessage(item, details);
|
|
129
|
-
});
|
|
130
|
-
this.seenSeq = details.seq;
|
|
131
|
-
await this.repoQueue.main.onEmpty(); // backpressure
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
async handleMessage(item, envelope) {
|
|
135
|
-
const msg = envelope.message;
|
|
136
|
-
try {
|
|
137
|
-
if (message.isCommit(msg)) {
|
|
138
|
-
await this.handleCommit(msg);
|
|
139
|
-
}
|
|
140
|
-
else if (message.isHandle(msg)) {
|
|
141
|
-
await this.handleUpdateHandle(msg);
|
|
142
|
-
}
|
|
143
|
-
else if (message.isIdentity(msg)) {
|
|
144
|
-
await this.handleIdentityEvt(msg);
|
|
145
|
-
}
|
|
146
|
-
else if (message.isAccount(msg)) {
|
|
147
|
-
await this.handleAccountEvt(msg);
|
|
148
|
-
}
|
|
149
|
-
else if (message.isTombstone(msg)) {
|
|
150
|
-
// Ignore tombstones
|
|
151
|
-
}
|
|
152
|
-
else if (message.isMigrate(msg)) {
|
|
153
|
-
// Ignore migrations
|
|
154
|
-
}
|
|
155
|
-
else {
|
|
156
|
-
const exhaustiveCheck = msg;
|
|
157
|
-
throw new Error(`Unhandled message type: ${exhaustiveCheck['$type']}`);
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
catch (err) {
|
|
161
|
-
// We log messages we can't process and move on:
|
|
162
|
-
// otherwise the cursor would get stuck on a poison message.
|
|
163
|
-
logger_1.subLogger.error({ err, message: (0, util_1.loggableMessage)(msg) }, 'indexer message processing error');
|
|
164
|
-
}
|
|
165
|
-
finally {
|
|
166
|
-
const latest = item.complete().at(-1);
|
|
167
|
-
if (latest !== undefined) {
|
|
168
|
-
this.cursor = latest;
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
async handleCommit(msg) {
|
|
173
|
-
const indexRecords = async () => {
|
|
174
|
-
const { root, rootCid, ops } = await getOps(msg);
|
|
175
|
-
if (msg.tooBig) {
|
|
176
|
-
await this.indexingSvc.indexRepo(msg.repo, rootCid.toString());
|
|
177
|
-
await this.indexingSvc.setCommitLastSeen(root, msg);
|
|
178
|
-
return;
|
|
179
|
-
}
|
|
180
|
-
if (msg.rebase) {
|
|
181
|
-
const needsReindex = await this.indexingSvc.checkCommitNeedsIndexing(root);
|
|
182
|
-
if (needsReindex) {
|
|
183
|
-
await this.indexingSvc.indexRepo(msg.repo, rootCid.toString());
|
|
184
|
-
}
|
|
185
|
-
await this.indexingSvc.setCommitLastSeen(root, msg);
|
|
186
|
-
return;
|
|
187
|
-
}
|
|
188
|
-
for (const op of ops) {
|
|
189
|
-
if (op.action === repo_1.WriteOpAction.Delete) {
|
|
190
|
-
await this.indexingSvc.deleteRecord(op.uri);
|
|
191
|
-
}
|
|
192
|
-
else {
|
|
193
|
-
try {
|
|
194
|
-
await this.indexingSvc.indexRecord(op.uri, op.cid, op.record, op.action, // create or update
|
|
195
|
-
msg.time);
|
|
196
|
-
}
|
|
197
|
-
catch (err) {
|
|
198
|
-
if (err instanceof lexicon_1.ValidationError) {
|
|
199
|
-
logger_1.subLogger.warn({
|
|
200
|
-
did: msg.repo,
|
|
201
|
-
commit: msg.commit.toString(),
|
|
202
|
-
uri: op.uri.toString(),
|
|
203
|
-
cid: op.cid.toString(),
|
|
204
|
-
}, 'skipping indexing of invalid record');
|
|
205
|
-
}
|
|
206
|
-
else {
|
|
207
|
-
logger_1.subLogger.error({
|
|
208
|
-
err,
|
|
209
|
-
did: msg.repo,
|
|
210
|
-
commit: msg.commit.toString(),
|
|
211
|
-
uri: op.uri.toString(),
|
|
212
|
-
cid: op.cid.toString(),
|
|
213
|
-
}, 'skipping indexing due to error processing record');
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
await this.indexingSvc.setCommitLastSeen(root, msg);
|
|
219
|
-
};
|
|
220
|
-
const results = await Promise.allSettled([
|
|
221
|
-
indexRecords(),
|
|
222
|
-
this.indexingSvc.indexHandle(msg.repo, msg.time),
|
|
223
|
-
]);
|
|
224
|
-
(0, common_1.handleAllSettledErrors)(results);
|
|
225
|
-
}
|
|
226
|
-
async handleUpdateHandle(msg) {
|
|
227
|
-
await this.indexingSvc.indexHandle(msg.did, msg.time, true);
|
|
228
|
-
}
|
|
229
|
-
async handleIdentityEvt(msg) {
|
|
230
|
-
await this.indexingSvc.indexHandle(msg.did, msg.time, true);
|
|
231
|
-
}
|
|
232
|
-
async handleAccountEvt(msg) {
|
|
233
|
-
if (msg.active === false && msg.status === 'deleted') {
|
|
234
|
-
await this.indexingSvc.deleteActor(msg.did);
|
|
235
|
-
}
|
|
236
|
-
else {
|
|
237
|
-
await this.indexingSvc.updateActorStatus(msg.did, msg.active, msg.status);
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
getSubscription() {
|
|
241
|
-
return new xrpc_server_1.Subscription({
|
|
242
|
-
service: this.opts.service,
|
|
243
|
-
method: lexicons_1.ids.ComAtprotoSyncSubscribeRepos,
|
|
244
|
-
signal: this.ac.signal,
|
|
245
|
-
getParams: async () => {
|
|
246
|
-
return { cursor: this.cursor };
|
|
247
|
-
},
|
|
248
|
-
onReconnectError: (err, reconnects, initial) => {
|
|
249
|
-
logger_1.subLogger.warn({ err, reconnects, initial }, 'sub reconnect');
|
|
250
|
-
},
|
|
251
|
-
validate: (value) => {
|
|
252
|
-
try {
|
|
253
|
-
return lexicons_1.lexicons.assertValidXrpcMessage(lexicons_1.ids.ComAtprotoSyncSubscribeRepos, value);
|
|
254
|
-
}
|
|
255
|
-
catch (err) {
|
|
256
|
-
logger_1.subLogger.warn({
|
|
257
|
-
err,
|
|
258
|
-
seq: ifNumber(value?.['seq']),
|
|
259
|
-
repo: ifString(value?.['repo']),
|
|
260
|
-
commit: ifString(value?.['commit']?.toString()),
|
|
261
|
-
time: ifString(value?.['time']),
|
|
262
|
-
provider: this.opts.service,
|
|
263
|
-
}, 'ingester sub skipped invalid message');
|
|
264
|
-
}
|
|
265
|
-
},
|
|
266
|
-
});
|
|
267
|
-
}
|
|
268
|
-
async destroy() {
|
|
269
|
-
this.ac.abort();
|
|
270
|
-
await this.running;
|
|
271
|
-
await this.repoQueue.destroy();
|
|
272
|
-
await this.background.processAll();
|
|
273
|
-
}
|
|
274
|
-
}
|
|
275
|
-
exports.RepoSubscription = RepoSubscription;
|
|
276
|
-
function ifString(val) {
|
|
277
|
-
return typeof val === 'string' ? val : undefined;
|
|
278
|
-
}
|
|
279
|
-
function ifNumber(val) {
|
|
280
|
-
return typeof val === 'number' ? val : undefined;
|
|
281
|
-
}
|
|
282
|
-
function getMessageDetails(msg) {
|
|
283
|
-
if (message.isCommit(msg)) {
|
|
284
|
-
return { seq: msg.seq, repo: msg.repo, message: msg };
|
|
285
|
-
}
|
|
286
|
-
else if (message.isHandle(msg)) {
|
|
287
|
-
return { seq: msg.seq, repo: msg.did, message: msg };
|
|
288
|
-
}
|
|
289
|
-
else if (message.isIdentity(msg)) {
|
|
290
|
-
return { seq: msg.seq, repo: msg.did, message: msg };
|
|
291
|
-
}
|
|
292
|
-
else if (message.isAccount(msg)) {
|
|
293
|
-
return { seq: msg.seq, repo: msg.did, message: msg };
|
|
294
|
-
}
|
|
295
|
-
else if (message.isMigrate(msg)) {
|
|
296
|
-
return { seq: msg.seq, repo: msg.did, message: msg };
|
|
297
|
-
}
|
|
298
|
-
else if (message.isTombstone(msg)) {
|
|
299
|
-
return { seq: msg.seq, repo: msg.did, message: msg };
|
|
300
|
-
}
|
|
301
|
-
else if (message.isInfo(msg)) {
|
|
302
|
-
return { info: msg };
|
|
303
|
-
}
|
|
304
|
-
return { info: null };
|
|
305
|
-
}
|
|
306
|
-
async function getOps(msg) {
|
|
307
|
-
const car = await (0, repo_1.readCarWithRoot)(msg.blocks);
|
|
308
|
-
const rootBytes = car.blocks.get(car.root);
|
|
309
|
-
(0, node_assert_1.default)(rootBytes, 'Missing commit block in car slice');
|
|
310
|
-
const root = repo_1.def.commit.schema.parse((0, common_1.cborDecode)(rootBytes));
|
|
311
|
-
const ops = msg.ops.map((op) => {
|
|
312
|
-
const [collection, rkey] = op.path.split('/');
|
|
313
|
-
(0, node_assert_1.default)(collection && rkey);
|
|
314
|
-
if (op.action === repo_1.WriteOpAction.Create ||
|
|
315
|
-
op.action === repo_1.WriteOpAction.Update) {
|
|
316
|
-
(0, node_assert_1.default)(op.cid);
|
|
317
|
-
const record = car.blocks.get(op.cid);
|
|
318
|
-
(0, node_assert_1.default)(record);
|
|
319
|
-
return {
|
|
320
|
-
action: op.action === repo_1.WriteOpAction.Create
|
|
321
|
-
? repo_1.WriteOpAction.Create
|
|
322
|
-
: repo_1.WriteOpAction.Update,
|
|
323
|
-
cid: op.cid,
|
|
324
|
-
record: (0, repo_1.cborToLexRecord)(record),
|
|
325
|
-
blobs: [],
|
|
326
|
-
uri: syntax_1.AtUri.make(msg.repo, collection, rkey),
|
|
327
|
-
};
|
|
328
|
-
}
|
|
329
|
-
else if (op.action === repo_1.WriteOpAction.Delete) {
|
|
330
|
-
return {
|
|
331
|
-
action: repo_1.WriteOpAction.Delete,
|
|
332
|
-
uri: syntax_1.AtUri.make(msg.repo, collection, rkey),
|
|
333
|
-
};
|
|
334
|
-
}
|
|
335
|
-
else {
|
|
336
|
-
throw new Error(`Unknown repo op action: ${op.action}`);
|
|
337
|
-
}
|
|
338
|
-
});
|
|
339
|
-
return { root, rootCid: car.root, ops };
|
|
340
|
-
}
|
|
341
|
-
//# sourceMappingURL=index.js.map
|