@atproto/bsky 0.0.16 → 0.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/CHANGELOG.md +14 -0
  2. package/dist/cache/read-through.d.ts +30 -0
  3. package/dist/config.d.ts +18 -0
  4. package/dist/context.d.ts +6 -6
  5. package/dist/daemon/config.d.ts +15 -0
  6. package/dist/daemon/context.d.ts +15 -0
  7. package/dist/daemon/index.d.ts +23 -0
  8. package/dist/daemon/logger.d.ts +3 -0
  9. package/dist/daemon/notifications.d.ts +18 -0
  10. package/dist/daemon/services.d.ts +11 -0
  11. package/dist/db/database-schema.d.ts +1 -2
  12. package/dist/db/index.js +16 -1
  13. package/dist/db/index.js.map +3 -3
  14. package/dist/db/migrations/20231205T000257238Z-remove-did-cache.d.ts +3 -0
  15. package/dist/db/migrations/index.d.ts +1 -0
  16. package/dist/did-cache.d.ts +10 -7
  17. package/dist/index.d.ts +4 -0
  18. package/dist/index.js +1921 -938
  19. package/dist/index.js.map +3 -3
  20. package/dist/indexer/context.d.ts +2 -0
  21. package/dist/indexer/index.d.ts +1 -0
  22. package/dist/lexicon/index.d.ts +12 -0
  23. package/dist/lexicon/lexicons.d.ts +134 -0
  24. package/dist/lexicon/types/com/atproto/admin/deleteAccount.d.ts +25 -0
  25. package/dist/lexicon/types/com/atproto/temp/importRepo.d.ts +32 -0
  26. package/dist/lexicon/types/com/atproto/temp/pushBlob.d.ts +25 -0
  27. package/dist/lexicon/types/com/atproto/temp/transferAccount.d.ts +42 -0
  28. package/dist/logger.d.ts +1 -0
  29. package/dist/redis.d.ts +10 -1
  30. package/dist/services/actor/index.d.ts +18 -4
  31. package/dist/services/actor/views.d.ts +5 -7
  32. package/dist/services/feed/index.d.ts +6 -4
  33. package/dist/services/feed/views.d.ts +5 -4
  34. package/dist/services/index.d.ts +3 -7
  35. package/dist/services/label/index.d.ts +10 -4
  36. package/dist/services/moderation/index.d.ts +0 -1
  37. package/dist/services/types.d.ts +3 -0
  38. package/dist/services/util/notification.d.ts +5 -0
  39. package/dist/services/util/post.d.ts +6 -6
  40. package/dist/util/retry.d.ts +1 -6
  41. package/package.json +6 -6
  42. package/src/api/app/bsky/actor/searchActorsTypeahead.ts +1 -1
  43. package/src/cache/read-through.ts +151 -0
  44. package/src/config.ts +90 -1
  45. package/src/context.ts +7 -7
  46. package/src/daemon/config.ts +60 -0
  47. package/src/daemon/context.ts +27 -0
  48. package/src/daemon/index.ts +78 -0
  49. package/src/daemon/logger.ts +6 -0
  50. package/src/daemon/notifications.ts +54 -0
  51. package/src/daemon/services.ts +22 -0
  52. package/src/db/database-schema.ts +0 -2
  53. package/src/db/migrations/20231205T000257238Z-remove-did-cache.ts +14 -0
  54. package/src/db/migrations/index.ts +1 -0
  55. package/src/did-cache.ts +33 -56
  56. package/src/feed-gen/index.ts +0 -4
  57. package/src/index.ts +55 -16
  58. package/src/indexer/context.ts +5 -0
  59. package/src/indexer/index.ts +10 -7
  60. package/src/lexicon/index.ts +50 -0
  61. package/src/lexicon/lexicons.ts +156 -0
  62. package/src/lexicon/types/com/atproto/admin/deleteAccount.ts +38 -0
  63. package/src/lexicon/types/com/atproto/temp/importRepo.ts +45 -0
  64. package/src/lexicon/types/com/atproto/temp/pushBlob.ts +39 -0
  65. package/src/lexicon/types/com/atproto/temp/transferAccount.ts +62 -0
  66. package/src/logger.ts +2 -0
  67. package/src/redis.ts +43 -3
  68. package/src/services/actor/index.ts +55 -7
  69. package/src/services/actor/views.ts +16 -13
  70. package/src/services/feed/index.ts +27 -13
  71. package/src/services/feed/views.ts +20 -10
  72. package/src/services/index.ts +14 -14
  73. package/src/services/indexing/index.ts +7 -10
  74. package/src/services/indexing/plugins/post.ts +13 -0
  75. package/src/services/label/index.ts +66 -22
  76. package/src/services/moderation/index.ts +1 -1
  77. package/src/services/moderation/status.ts +1 -4
  78. package/src/services/types.ts +4 -0
  79. package/src/services/util/notification.ts +70 -0
  80. package/src/util/retry.ts +1 -44
  81. package/tests/admin/get-repo.test.ts +5 -3
  82. package/tests/admin/moderation.test.ts +2 -2
  83. package/tests/admin/repo-search.test.ts +1 -0
  84. package/tests/algos/hot-classic.test.ts +1 -2
  85. package/tests/auth.test.ts +1 -1
  86. package/tests/auto-moderator/labeler.test.ts +19 -20
  87. package/tests/auto-moderator/takedowns.test.ts +16 -10
  88. package/tests/blob-resolver.test.ts +4 -2
  89. package/tests/daemon.test.ts +191 -0
  90. package/tests/did-cache.test.ts +20 -5
  91. package/tests/handle-invalidation.test.ts +1 -5
  92. package/tests/indexing.test.ts +20 -13
  93. package/tests/redis-cache.test.ts +231 -0
  94. package/tests/seeds/basic.ts +3 -0
  95. package/tests/subscription/repo.test.ts +4 -7
  96. package/tests/views/profile.test.ts +0 -1
  97. package/tests/views/thread.test.ts +73 -78
  98. package/tests/views/threadgating.test.ts +38 -0
  99. package/dist/db/tables/did-cache.d.ts +0 -10
  100. package/dist/feed-gen/best-of-follows.d.ts +0 -29
  101. package/dist/feed-gen/whats-hot.d.ts +0 -29
  102. package/dist/feed-gen/with-friends.d.ts +0 -3
  103. package/dist/label-cache.d.ts +0 -19
  104. package/src/db/tables/did-cache.ts +0 -13
  105. package/src/feed-gen/best-of-follows.ts +0 -77
  106. package/src/feed-gen/whats-hot.ts +0 -101
  107. package/src/feed-gen/with-friends.ts +0 -43
  108. package/src/label-cache.ts +0 -90
  109. package/tests/algos/whats-hot.test.ts +0 -118
  110. package/tests/algos/with-friends.test.ts +0 -145
@@ -0,0 +1,191 @@
1
+ import assert from 'assert'
2
+ import { AtUri } from '@atproto/api'
3
+ import { TestNetwork } from '@atproto/dev-env'
4
+ import { BskyDaemon, DaemonConfig, PrimaryDatabase } from '../src'
5
+ import usersSeed from './seeds/users'
6
+ import { countAll, excluded } from '../src/db/util'
7
+ import { NotificationsDaemon } from '../src/daemon/notifications'
8
+ import {
9
+ BEFORE_LAST_SEEN_DAYS,
10
+ BEFORE_LATEST_UNREAD_DAYS,
11
+ UNREAD_KEPT_COUNT,
12
+ } from '../src/services/util/notification'
13
+
14
+ describe('daemon', () => {
15
+ let network: TestNetwork
16
+ let daemon: BskyDaemon
17
+ let db: PrimaryDatabase
18
+ let actors: { did: string }[] = []
19
+
20
+ beforeAll(async () => {
21
+ network = await TestNetwork.create({
22
+ dbPostgresSchema: 'bsky_daemon',
23
+ })
24
+ db = network.bsky.ctx.db.getPrimary()
25
+ daemon = BskyDaemon.create({
26
+ db,
27
+ cfg: new DaemonConfig({
28
+ version: network.bsky.ctx.cfg.version,
29
+ dbPostgresUrl: network.bsky.ctx.cfg.dbPrimaryPostgresUrl,
30
+ dbPostgresSchema: network.bsky.ctx.cfg.dbPostgresSchema,
31
+ }),
32
+ })
33
+ const sc = network.getSeedClient()
34
+ await usersSeed(sc)
35
+ await network.processAll()
36
+ actors = await db.db.selectFrom('actor').selectAll().execute()
37
+ })
38
+
39
+ afterAll(async () => {
40
+ await network.close()
41
+ })
42
+
43
+ describe('notifications daemon', () => {
44
+ it('processes all dids', async () => {
45
+ for (const { did } of actors) {
46
+ await Promise.all([
47
+ setLastSeen(daemon.ctx.db, { did }),
48
+ createNotifications(daemon.ctx.db, {
49
+ did,
50
+ daysAgo: 2 * BEFORE_LAST_SEEN_DAYS,
51
+ count: 1,
52
+ }),
53
+ ])
54
+ }
55
+ await expect(countNotifications(db)).resolves.toBe(actors.length)
56
+ await runNotifsOnce(daemon.notifications)
57
+ await expect(countNotifications(db)).resolves.toBe(0)
58
+ })
59
+
60
+ it('removes read notifications older than threshold.', async () => {
61
+ const { did } = actors[0]
62
+ const lastSeenDaysAgo = 10
63
+ await Promise.all([
64
+ setLastSeen(daemon.ctx.db, { did, daysAgo: lastSeenDaysAgo }),
65
+ // read, delete
66
+ createNotifications(daemon.ctx.db, {
67
+ did,
68
+ daysAgo: lastSeenDaysAgo + BEFORE_LAST_SEEN_DAYS + 1,
69
+ count: 2,
70
+ }),
71
+ // read, keep
72
+ createNotifications(daemon.ctx.db, {
73
+ did,
74
+ daysAgo: lastSeenDaysAgo + BEFORE_LAST_SEEN_DAYS - 1,
75
+ count: 3,
76
+ }),
77
+ // unread, keep
78
+ createNotifications(daemon.ctx.db, {
79
+ did,
80
+ daysAgo: lastSeenDaysAgo - 1,
81
+ count: 4,
82
+ }),
83
+ ])
84
+ await expect(countNotifications(db)).resolves.toBe(9)
85
+ await runNotifsOnce(daemon.notifications)
86
+ await expect(countNotifications(db)).resolves.toBe(7)
87
+ await clearNotifications(db)
88
+ })
89
+
90
+ it('removes unread notifications older than threshold.', async () => {
91
+ const { did } = actors[0]
92
+ await Promise.all([
93
+ setLastSeen(daemon.ctx.db, {
94
+ did,
95
+ daysAgo: 2 * BEFORE_LATEST_UNREAD_DAYS, // all are unread
96
+ }),
97
+ createNotifications(daemon.ctx.db, {
98
+ did,
99
+ daysAgo: 0,
100
+ count: 1,
101
+ }),
102
+ createNotifications(daemon.ctx.db, {
103
+ did,
104
+ daysAgo: BEFORE_LATEST_UNREAD_DAYS - 1,
105
+ count: 99,
106
+ }),
107
+ createNotifications(daemon.ctx.db, {
108
+ did,
109
+ daysAgo: BEFORE_LATEST_UNREAD_DAYS + 1,
110
+ count: 400,
111
+ }),
112
+ ])
113
+ await expect(countNotifications(db)).resolves.toBe(UNREAD_KEPT_COUNT)
114
+ await runNotifsOnce(daemon.notifications)
115
+ // none removed when within UNREAD_KEPT_COUNT
116
+ await expect(countNotifications(db)).resolves.toBe(UNREAD_KEPT_COUNT)
117
+ // add one more, tip over UNREAD_KEPT_COUNT
118
+ await createNotifications(daemon.ctx.db, {
119
+ did,
120
+ daysAgo: BEFORE_LATEST_UNREAD_DAYS + 1,
121
+ count: 1,
122
+ })
123
+ await runNotifsOnce(daemon.notifications)
124
+ // removed all older than BEFORE_LATEST_UNREAD_DAYS
125
+ await expect(countNotifications(db)).resolves.toBe(100)
126
+ await clearNotifications(db)
127
+ })
128
+ })
129
+
130
+ const runNotifsOnce = async (notifsDaemon: NotificationsDaemon) => {
131
+ assert(!notifsDaemon.running, 'notifications daemon is already running')
132
+ notifsDaemon.run({ forever: false, batchSize: 2 })
133
+ await notifsDaemon.running
134
+ }
135
+
136
+ const setLastSeen = async (
137
+ db: PrimaryDatabase,
138
+ opts: { did: string; daysAgo?: number },
139
+ ) => {
140
+ const { did, daysAgo = 0 } = opts
141
+ const lastSeenAt = new Date()
142
+ lastSeenAt.setDate(lastSeenAt.getDate() - daysAgo)
143
+ await db.db
144
+ .insertInto('actor_state')
145
+ .values({ did, lastSeenNotifs: lastSeenAt.toISOString() })
146
+ .onConflict((oc) =>
147
+ oc.column('did').doUpdateSet({
148
+ lastSeenNotifs: excluded(db.db, 'lastSeenNotifs'),
149
+ }),
150
+ )
151
+ .execute()
152
+ }
153
+
154
+ const createNotifications = async (
155
+ db: PrimaryDatabase,
156
+ opts: {
157
+ did: string
158
+ count: number
159
+ daysAgo: number
160
+ },
161
+ ) => {
162
+ const { did, count, daysAgo } = opts
163
+ const sortAt = new Date()
164
+ sortAt.setDate(sortAt.getDate() - daysAgo)
165
+ await db.db
166
+ .insertInto('notification')
167
+ .values(
168
+ [...Array(count)].map(() => ({
169
+ did,
170
+ author: did,
171
+ reason: 'none',
172
+ recordCid: 'bafycid',
173
+ recordUri: AtUri.make(did, 'invalid.collection', 'self').toString(),
174
+ sortAt: sortAt.toISOString(),
175
+ })),
176
+ )
177
+ .execute()
178
+ }
179
+
180
+ const clearNotifications = async (db: PrimaryDatabase) => {
181
+ await db.db.deleteFrom('notification').execute()
182
+ }
183
+
184
+ const countNotifications = async (db: PrimaryDatabase) => {
185
+ const { count } = await db.db
186
+ .selectFrom('notification')
187
+ .select(countAll.as('count'))
188
+ .executeTakeFirstOrThrow()
189
+ return count
190
+ }
191
+ })
@@ -1,14 +1,16 @@
1
1
  import { TestNetwork, SeedClient } from '@atproto/dev-env'
2
2
  import userSeed from './seeds/users'
3
3
  import { IdResolver } from '@atproto/identity'
4
- import DidSqlCache from '../src/did-cache'
4
+ import DidRedisCache from '../src/did-cache'
5
5
  import { wait } from '@atproto/common'
6
+ import { Redis } from '../src'
6
7
 
7
8
  describe('did cache', () => {
8
9
  let network: TestNetwork
9
10
  let sc: SeedClient
10
11
  let idResolver: IdResolver
11
- let didCache: DidSqlCache
12
+ let redis: Redis
13
+ let didCache: DidRedisCache
12
14
 
13
15
  let alice: string
14
16
  let bob: string
@@ -20,6 +22,7 @@ describe('did cache', () => {
20
22
  dbPostgresSchema: 'bsky_did_cache',
21
23
  })
22
24
  idResolver = network.bsky.indexer.ctx.idResolver
25
+ redis = network.bsky.indexer.ctx.redis
23
26
  didCache = network.bsky.indexer.ctx.didCache
24
27
  sc = network.getSeedClient()
25
28
  await userSeed(sc)
@@ -50,7 +53,12 @@ describe('did cache', () => {
50
53
  })
51
54
 
52
55
  it('clears cache and repopulates', async () => {
53
- await idResolver.did.cache?.clear()
56
+ await Promise.all([
57
+ idResolver.did.cache?.clearEntry(alice),
58
+ idResolver.did.cache?.clearEntry(bob),
59
+ idResolver.did.cache?.clearEntry(carol),
60
+ idResolver.did.cache?.clearEntry(dan),
61
+ ])
54
62
  const docsCleared = await Promise.all([
55
63
  idResolver.did.cache?.checkCache(alice),
56
64
  idResolver.did.cache?.checkCache(bob),
@@ -81,7 +89,10 @@ describe('did cache', () => {
81
89
  })
82
90
 
83
91
  it('accurately reports expired dids & refreshes the cache', async () => {
84
- const didCache = new DidSqlCache(network.bsky.ctx.db.getPrimary(), 1, 60000)
92
+ const didCache = new DidRedisCache(redis.withNamespace('did-doc'), {
93
+ staleTTL: 1,
94
+ maxTTL: 60000,
95
+ })
85
96
  const shortCacheResolver = new IdResolver({
86
97
  plcUrl: network.bsky.ctx.cfg.didPlcUrl,
87
98
  didCache,
@@ -110,7 +121,10 @@ describe('did cache', () => {
110
121
  })
111
122
 
112
123
  it('does not return expired dids & refreshes the cache', async () => {
113
- const didCache = new DidSqlCache(network.bsky.ctx.db.getPrimary(), 0, 1)
124
+ const didCache = new DidRedisCache(redis.withNamespace('did-doc'), {
125
+ staleTTL: 0,
126
+ maxTTL: 1,
127
+ })
114
128
  const shortExpireResolver = new IdResolver({
115
129
  plcUrl: network.bsky.ctx.cfg.didPlcUrl,
116
130
  didCache,
@@ -125,5 +139,6 @@ describe('did cache', () => {
125
139
  // see that the resolver does not return expired value & instead force refreshes
126
140
  const staleGet = await shortExpireResolver.did.resolve(alice)
127
141
  expect(staleGet?.id).toEqual(alice)
142
+ await didCache.destroy()
128
143
  })
129
144
  })
@@ -102,11 +102,7 @@ describe('handle invalidation', () => {
102
102
  it('deals with handle contention', async () => {
103
103
  await backdateIndexedAt(bob)
104
104
  // update alices handle so that the pds will let bob take her old handle
105
- await network.pds.ctx.db.db
106
- .updateTable('did_handle')
107
- .where('did', '=', alice)
108
- .set({ handle: 'not-alice.test' })
109
- .execute()
105
+ await network.pds.ctx.accountManager.updateHandle(alice, 'not-alice.test')
110
106
 
111
107
  await pdsAgent.api.com.atproto.identity.updateHandle(
112
108
  {
@@ -498,7 +498,8 @@ describe('indexing', () => {
498
498
 
499
499
  it('skips invalid records.', async () => {
500
500
  const { db, services } = network.bsky.indexer.ctx
501
- const { db: pdsDb, services: pdsServices } = network.pds.ctx
501
+ const { accountManager } = network.pds.ctx
502
+ // const { db: pdsDb, services: pdsServices } = network.pds.ctx
502
503
  // Create a good and a bad post record
503
504
  const writes = await Promise.all([
504
505
  pdsRepo.prepareCreate({
@@ -513,9 +514,20 @@ describe('indexing', () => {
513
514
  validate: false,
514
515
  }),
515
516
  ])
516
- await pdsServices
517
- .repo(pdsDb)
518
- .processWrites({ did: sc.dids.alice, writes }, 1)
517
+ const writeCommit = await network.pds.ctx.actorStore.transact(
518
+ sc.dids.alice,
519
+ (store) => store.repo.processWrites(writes),
520
+ )
521
+ await accountManager.updateRepoRoot(
522
+ sc.dids.alice,
523
+ writeCommit.cid,
524
+ writeCommit.rev,
525
+ )
526
+ await network.pds.ctx.sequencer.sequenceCommit(
527
+ sc.dids.alice,
528
+ writeCommit,
529
+ writes,
530
+ )
519
531
  // Index
520
532
  const { data: commit } =
521
533
  await pdsAgent.api.com.atproto.sync.getLatestCommit({
@@ -643,15 +655,10 @@ describe('indexing', () => {
643
655
  )
644
656
  await expect(getProfileBefore).resolves.toBeDefined()
645
657
  // Delete account on pds
646
- await pdsAgent.api.com.atproto.server.requestAccountDelete(undefined, {
647
- headers: sc.getHeaders(alice),
648
- })
649
- const { token } = await network.pds.ctx.db.db
650
- .selectFrom('email_token')
651
- .selectAll()
652
- .where('purpose', '=', 'delete_account')
653
- .where('did', '=', alice)
654
- .executeTakeFirstOrThrow()
658
+ const token = await network.pds.ctx.accountManager.createEmailToken(
659
+ alice,
660
+ 'delete_account',
661
+ )
655
662
  await pdsAgent.api.com.atproto.server.deleteAccount({
656
663
  token,
657
664
  did: alice,
@@ -0,0 +1,231 @@
1
+ import { wait } from '@atproto/common'
2
+ import { Redis } from '../src/'
3
+ import { ReadThroughCache } from '../src/cache/read-through'
4
+
5
+ describe('redis cache', () => {
6
+ let redis: Redis
7
+
8
+ beforeAll(async () => {
9
+ redis = new Redis({ host: process.env.REDIS_HOST || '' })
10
+ })
11
+
12
+ afterAll(async () => {
13
+ await redis.destroy()
14
+ })
15
+
16
+ it('caches according to namespace', async () => {
17
+ const ns1 = redis.withNamespace('ns1')
18
+ const ns2 = redis.withNamespace('ns2')
19
+ await Promise.all([
20
+ ns1.set('key', 'a'),
21
+ ns2.set('key', 'b'),
22
+ redis.set('key', 'c'),
23
+ ])
24
+ const got = await Promise.all([
25
+ ns1.get('key'),
26
+ ns2.get('key'),
27
+ redis.get('key'),
28
+ ])
29
+ expect(got[0]).toEqual('a')
30
+ expect(got[1]).toEqual('b')
31
+ expect(got[2]).toEqual('c')
32
+
33
+ await Promise.all([
34
+ ns1.setMulti({ key1: 'a', key2: 'b' }),
35
+ ns2.setMulti({ key1: 'c', key2: 'd' }),
36
+ redis.setMulti({ key1: 'e', key2: 'f' }),
37
+ ])
38
+ const gotMany = await Promise.all([
39
+ ns1.getMulti(['key1', 'key2']),
40
+ ns2.getMulti(['key1', 'key2']),
41
+ redis.getMulti(['key1', 'key2']),
42
+ ])
43
+ expect(gotMany[0]['key1']).toEqual('a')
44
+ expect(gotMany[0]['key2']).toEqual('b')
45
+ expect(gotMany[1]['key1']).toEqual('c')
46
+ expect(gotMany[1]['key2']).toEqual('d')
47
+ expect(gotMany[2]['key1']).toEqual('e')
48
+ expect(gotMany[2]['key2']).toEqual('f')
49
+ })
50
+
51
+ it('caches values when empty', async () => {
52
+ const vals = {
53
+ '1': 'a',
54
+ '2': 'b',
55
+ '3': 'c',
56
+ }
57
+ let hits = 0
58
+ const cache = new ReadThroughCache<string>(redis.withNamespace('test1'), {
59
+ staleTTL: 60000,
60
+ maxTTL: 60000,
61
+ fetchMethod: async (key) => {
62
+ hits++
63
+ return vals[key]
64
+ },
65
+ })
66
+ const got = await Promise.all([
67
+ cache.get('1'),
68
+ cache.get('2'),
69
+ cache.get('3'),
70
+ ])
71
+ expect(got[0]).toEqual('a')
72
+ expect(got[1]).toEqual('b')
73
+ expect(got[2]).toEqual('c')
74
+ expect(hits).toBe(3)
75
+
76
+ const refetched = await Promise.all([
77
+ cache.get('1'),
78
+ cache.get('2'),
79
+ cache.get('3'),
80
+ ])
81
+ expect(refetched[0]).toEqual('a')
82
+ expect(refetched[1]).toEqual('b')
83
+ expect(refetched[2]).toEqual('c')
84
+ expect(hits).toBe(3)
85
+ })
86
+
87
+ it('skips and refreshes cache when requested', async () => {
88
+ let val = 'a'
89
+ let hits = 0
90
+ const cache = new ReadThroughCache<string>(redis.withNamespace('test2'), {
91
+ staleTTL: 60000,
92
+ maxTTL: 60000,
93
+ fetchMethod: async () => {
94
+ hits++
95
+ return val
96
+ },
97
+ })
98
+
99
+ const try1 = await cache.get('1')
100
+ expect(try1).toEqual('a')
101
+ expect(hits).toBe(1)
102
+
103
+ val = 'b'
104
+
105
+ const try2 = await cache.get('1')
106
+ expect(try2).toEqual('a')
107
+ expect(hits).toBe(1)
108
+
109
+ const try3 = await cache.get('1', { revalidate: true })
110
+ expect(try3).toEqual('b')
111
+ expect(hits).toBe(2)
112
+
113
+ const try4 = await cache.get('1')
114
+ expect(try4).toEqual('b')
115
+ expect(hits).toBe(2)
116
+ })
117
+
118
+ it('accurately reports stale entries & refreshes the cache', async () => {
119
+ let val = 'a'
120
+ let hits = 0
121
+ const cache = new ReadThroughCache<string>(redis.withNamespace('test3'), {
122
+ staleTTL: 1,
123
+ maxTTL: 60000,
124
+ fetchMethod: async () => {
125
+ hits++
126
+ return val
127
+ },
128
+ })
129
+
130
+ const try1 = await cache.get('1')
131
+ expect(try1).toEqual('a')
132
+
133
+ await wait(5)
134
+
135
+ val = 'b'
136
+
137
+ const try2 = await cache.get('1')
138
+ // cache gives us stale value while it revalidates
139
+ expect(try2).toEqual('a')
140
+
141
+ await wait(5)
142
+
143
+ const try3 = await cache.get('1')
144
+ expect(try3).toEqual('b')
145
+ expect(hits).toEqual(3)
146
+ })
147
+
148
+ it('does not return expired dids & refreshes the cache', async () => {
149
+ let val = 'a'
150
+ let hits = 0
151
+ const cache = new ReadThroughCache<string>(redis.withNamespace('test4'), {
152
+ staleTTL: 0,
153
+ maxTTL: 1,
154
+ fetchMethod: async () => {
155
+ hits++
156
+ return val
157
+ },
158
+ })
159
+
160
+ const try1 = await cache.get('1')
161
+ expect(try1).toEqual('a')
162
+
163
+ await wait(5)
164
+
165
+ val = 'b'
166
+
167
+ const try2 = await cache.get('1')
168
+ expect(try2).toEqual('b')
169
+ expect(hits).toBe(2)
170
+ })
171
+
172
+ it('caches negative values', async () => {
173
+ let val: string | null = null
174
+ let hits = 0
175
+ const cache = new ReadThroughCache<string>(redis.withNamespace('test5'), {
176
+ staleTTL: 60000,
177
+ maxTTL: 60000,
178
+ fetchMethod: async () => {
179
+ hits++
180
+ return val
181
+ },
182
+ })
183
+
184
+ const try1 = await cache.get('1')
185
+ expect(try1).toEqual(null)
186
+ expect(hits).toBe(1)
187
+
188
+ val = 'b'
189
+
190
+ const try2 = await cache.get('1')
191
+ // returns cached negative value
192
+ expect(try2).toEqual(null)
193
+ expect(hits).toBe(1)
194
+
195
+ const try3 = await cache.get('1', { revalidate: true })
196
+ expect(try3).toEqual('b')
197
+ expect(hits).toEqual(2)
198
+
199
+ const try4 = await cache.get('1')
200
+ expect(try4).toEqual('b')
201
+ expect(hits).toEqual(2)
202
+ })
203
+
204
+ it('times out and fails open', async () => {
205
+ let val = 'a'
206
+ let hits = 0
207
+ const cache = new ReadThroughCache<string>(redis.withNamespace('test6'), {
208
+ staleTTL: 60000,
209
+ maxTTL: 60000,
210
+ fetchMethod: async () => {
211
+ hits++
212
+ return val
213
+ },
214
+ })
215
+
216
+ const try1 = await cache.get('1')
217
+ expect(try1).toEqual('a')
218
+
219
+ const orig = cache.redis.driver.get
220
+ cache.redis.driver.get = async (key) => {
221
+ await wait(600)
222
+ return orig(key)
223
+ }
224
+
225
+ val = 'b'
226
+
227
+ const try2 = await cache.get('1')
228
+ expect(try2).toEqual('b')
229
+ expect(hits).toBe(2)
230
+ })
231
+ })
@@ -103,6 +103,8 @@ export default async (sc: SeedClient, users = true) => {
103
103
  'tests/sample-img/key-landscape-small.jpg',
104
104
  'image/jpeg',
105
105
  )
106
+ // must ensure ordering of replies in indexing
107
+ await sc.network.processAll()
106
108
  await sc.reply(
107
109
  bob,
108
110
  sc.posts[alice][1].ref,
@@ -117,6 +119,7 @@ export default async (sc: SeedClient, users = true) => {
117
119
  sc.posts[alice][1].ref,
118
120
  replies.carol[0],
119
121
  )
122
+ await sc.network.processAll()
120
123
  const alicesReplyToBob = await sc.reply(
121
124
  alice,
122
125
  sc.posts[alice][1].ref,
@@ -1,7 +1,6 @@
1
1
  import AtpAgent from '@atproto/api'
2
2
  import { TestNetwork, SeedClient } from '@atproto/dev-env'
3
3
  import { CommitData } from '@atproto/repo'
4
- import { RepoService } from '@atproto/pds/src/services/repo'
5
4
  import { PreparedWrite } from '@atproto/pds/src/repo'
6
5
  import * as sequencer from '@atproto/pds/src/sequencer'
7
6
  import { cborDecode, cborEncode } from '@atproto/common'
@@ -84,9 +83,8 @@ describe('sync', () => {
84
83
 
85
84
  it('indexes actor when commit is unprocessable.', async () => {
86
85
  // mock sequencing to create an unprocessable commit event
87
- const afterWriteProcessingOriginal =
88
- RepoService.prototype.afterWriteProcessing
89
- RepoService.prototype.afterWriteProcessing = async function (
86
+ const sequenceCommitOrig = network.pds.ctx.sequencer.sequenceCommit
87
+ network.pds.ctx.sequencer.sequenceCommit = async function (
90
88
  did: string,
91
89
  commitData: CommitData,
92
90
  writes: PreparedWrite[],
@@ -95,7 +93,7 @@ describe('sync', () => {
95
93
  const evt = cborDecode(seqEvt.event) as sequencer.CommitEvt
96
94
  evt.blocks = new Uint8Array() // bad blocks
97
95
  seqEvt.event = cborEncode(evt)
98
- await sequencer.sequenceEvt(this.db, seqEvt)
96
+ await network.pds.ctx.sequencer.sequenceEvt(seqEvt)
99
97
  }
100
98
  // create account and index the initial commit event
101
99
  await sc.createAccount('jack', {
@@ -103,12 +101,11 @@ describe('sync', () => {
103
101
  email: 'jack@test.com',
104
102
  password: 'password',
105
103
  })
106
- await network.pds.ctx.sequencerLeader?.isCaughtUp()
107
104
  await network.processAll()
108
105
  // confirm jack was indexed as an actor despite the bad event
109
106
  const actors = await dumpTable(ctx.db.getPrimary(), 'actor', ['did'])
110
107
  expect(actors.map((a) => a.handle)).toContain('jack.test')
111
- RepoService.prototype.afterWriteProcessing = afterWriteProcessingOriginal
108
+ network.pds.ctx.sequencer.sequenceCommit = sequenceCommitOrig
112
109
  })
113
110
 
114
111
  async function updateProfile(
@@ -25,7 +25,6 @@ describe('pds profile views', () => {
25
25
  sc = network.getSeedClient()
26
26
  await basicSeed(sc)
27
27
  await network.processAll()
28
- await network.bsky.processAll()
29
28
  alice = sc.dids.alice
30
29
  bob = sc.dids.bob
31
30
  dan = sc.dids.dan