@mepuka/skygent 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +59 -0
- package/index.ts +146 -0
- package/package.json +56 -0
- package/src/cli/app.ts +75 -0
- package/src/cli/config-command.ts +140 -0
- package/src/cli/config.ts +91 -0
- package/src/cli/derive.ts +205 -0
- package/src/cli/doc/annotation.ts +36 -0
- package/src/cli/doc/filter.ts +69 -0
- package/src/cli/doc/index.ts +9 -0
- package/src/cli/doc/post.ts +155 -0
- package/src/cli/doc/primitives.ts +25 -0
- package/src/cli/doc/render.ts +18 -0
- package/src/cli/doc/table.ts +114 -0
- package/src/cli/doc/thread.ts +46 -0
- package/src/cli/doc/tree.ts +126 -0
- package/src/cli/errors.ts +59 -0
- package/src/cli/exit-codes.ts +52 -0
- package/src/cli/feed.ts +177 -0
- package/src/cli/filter-dsl.ts +1411 -0
- package/src/cli/filter-errors.ts +208 -0
- package/src/cli/filter-help.ts +70 -0
- package/src/cli/filter-input.ts +54 -0
- package/src/cli/filter.ts +435 -0
- package/src/cli/graph.ts +472 -0
- package/src/cli/help.ts +14 -0
- package/src/cli/interval.ts +35 -0
- package/src/cli/jetstream.ts +173 -0
- package/src/cli/layers.ts +180 -0
- package/src/cli/logging.ts +136 -0
- package/src/cli/output-format.ts +26 -0
- package/src/cli/output.ts +82 -0
- package/src/cli/parse.ts +80 -0
- package/src/cli/post.ts +193 -0
- package/src/cli/preferences.ts +11 -0
- package/src/cli/query-fields.ts +247 -0
- package/src/cli/query.ts +415 -0
- package/src/cli/range.ts +44 -0
- package/src/cli/search.ts +465 -0
- package/src/cli/shared-options.ts +169 -0
- package/src/cli/shared.ts +20 -0
- package/src/cli/store-errors.ts +80 -0
- package/src/cli/store-tree.ts +392 -0
- package/src/cli/store.ts +395 -0
- package/src/cli/sync-factory.ts +107 -0
- package/src/cli/sync.ts +366 -0
- package/src/cli/view-thread.ts +196 -0
- package/src/cli/view.ts +47 -0
- package/src/cli/watch.ts +344 -0
- package/src/db/migrations/store-catalog/001_init.ts +14 -0
- package/src/db/migrations/store-index/001_init.ts +34 -0
- package/src/db/migrations/store-index/002_event_log.ts +24 -0
- package/src/db/migrations/store-index/003_fts_and_derived.ts +52 -0
- package/src/db/migrations/store-index/004_query_indexes.ts +9 -0
- package/src/db/migrations/store-index/005_post_lang.ts +15 -0
- package/src/db/migrations/store-index/006_has_embed.ts +10 -0
- package/src/db/migrations/store-index/007_event_seq_and_checkpoints.ts +68 -0
- package/src/domain/bsky.ts +467 -0
- package/src/domain/config.ts +11 -0
- package/src/domain/credentials.ts +6 -0
- package/src/domain/defaults.ts +8 -0
- package/src/domain/derivation.ts +55 -0
- package/src/domain/errors.ts +71 -0
- package/src/domain/events.ts +55 -0
- package/src/domain/extract.ts +64 -0
- package/src/domain/filter-describe.ts +551 -0
- package/src/domain/filter-explain.ts +9 -0
- package/src/domain/filter.ts +797 -0
- package/src/domain/format.ts +91 -0
- package/src/domain/index.ts +13 -0
- package/src/domain/indexes.ts +17 -0
- package/src/domain/policies.ts +16 -0
- package/src/domain/post.ts +88 -0
- package/src/domain/primitives.ts +50 -0
- package/src/domain/raw.ts +140 -0
- package/src/domain/store.ts +103 -0
- package/src/domain/sync.ts +211 -0
- package/src/domain/text-width.ts +56 -0
- package/src/services/app-config.ts +278 -0
- package/src/services/bsky-client.ts +2113 -0
- package/src/services/credential-store.ts +408 -0
- package/src/services/derivation-engine.ts +502 -0
- package/src/services/derivation-settings.ts +61 -0
- package/src/services/derivation-validator.ts +68 -0
- package/src/services/filter-compiler.ts +269 -0
- package/src/services/filter-library.ts +371 -0
- package/src/services/filter-runtime.ts +821 -0
- package/src/services/filter-settings.ts +30 -0
- package/src/services/identity-resolver.ts +563 -0
- package/src/services/jetstream-sync.ts +636 -0
- package/src/services/lineage-store.ts +89 -0
- package/src/services/link-validator.ts +244 -0
- package/src/services/output-manager.ts +274 -0
- package/src/services/post-parser.ts +62 -0
- package/src/services/profile-resolver.ts +223 -0
- package/src/services/resource-monitor.ts +106 -0
- package/src/services/shared.ts +69 -0
- package/src/services/store-cleaner.ts +43 -0
- package/src/services/store-commit.ts +168 -0
- package/src/services/store-db.ts +248 -0
- package/src/services/store-event-log.ts +285 -0
- package/src/services/store-index-sql.ts +289 -0
- package/src/services/store-index.ts +1152 -0
- package/src/services/store-keys.ts +4 -0
- package/src/services/store-manager.ts +358 -0
- package/src/services/store-stats.ts +522 -0
- package/src/services/store-writer.ts +200 -0
- package/src/services/sync-checkpoint-store.ts +169 -0
- package/src/services/sync-engine.ts +547 -0
- package/src/services/sync-reporter.ts +16 -0
- package/src/services/sync-settings.ts +72 -0
- package/src/services/trending-topics.ts +226 -0
- package/src/services/view-checkpoint-store.ts +238 -0
- package/src/typeclass/chunk.ts +84 -0
|
@@ -0,0 +1,636 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Chunk,
|
|
3
|
+
Clock,
|
|
4
|
+
Context,
|
|
5
|
+
Duration,
|
|
6
|
+
Effect,
|
|
7
|
+
Layer,
|
|
8
|
+
Option,
|
|
9
|
+
Ref,
|
|
10
|
+
Schema,
|
|
11
|
+
Stream
|
|
12
|
+
} from "effect";
|
|
13
|
+
import { Jetstream, JetstreamMessage } from "effect-jetstream";
|
|
14
|
+
import { messageFromCause } from "./shared.js";
|
|
15
|
+
import { FilterRuntime } from "./filter-runtime.js";
|
|
16
|
+
import { PostParser } from "./post-parser.js";
|
|
17
|
+
import { StoreCommitter } from "./store-commit.js";
|
|
18
|
+
import { SyncCheckpointStore } from "./sync-checkpoint-store.js";
|
|
19
|
+
import { SyncReporter } from "./sync-reporter.js";
|
|
20
|
+
import { ProfileResolver } from "./profile-resolver.js";
|
|
21
|
+
import { StoreIndex } from "./store-index.js";
|
|
22
|
+
import { EventMeta, PostDelete, PostUpsert } from "../domain/events.js";
|
|
23
|
+
import type { FilterExpr } from "../domain/filter.js";
|
|
24
|
+
import { filterExprSignature } from "../domain/filter.js";
|
|
25
|
+
import type { Post } from "../domain/post.js";
|
|
26
|
+
import { EventSeq, PostCid, PostUri, Timestamp } from "../domain/primitives.js";
|
|
27
|
+
import type { StoreRef } from "../domain/store.js";
|
|
28
|
+
import {
|
|
29
|
+
DataSource,
|
|
30
|
+
SyncCheckpoint,
|
|
31
|
+
SyncError,
|
|
32
|
+
SyncEvent,
|
|
33
|
+
SyncProgress,
|
|
34
|
+
SyncResult,
|
|
35
|
+
SyncResultMonoid,
|
|
36
|
+
SyncStage
|
|
37
|
+
} from "../domain/sync.js";
|
|
38
|
+
|
|
39
|
+
type CommitMessage =
|
|
40
|
+
| JetstreamMessage.CommitCreate
|
|
41
|
+
| JetstreamMessage.CommitUpdate
|
|
42
|
+
| JetstreamMessage.CommitDelete;
|
|
43
|
+
|
|
44
|
+
export type JetstreamSyncConfig = {
|
|
45
|
+
readonly source: Extract<DataSource, { _tag: "Jetstream" }>;
|
|
46
|
+
readonly store: StoreRef;
|
|
47
|
+
readonly filter: FilterExpr;
|
|
48
|
+
readonly command: string;
|
|
49
|
+
readonly limit?: number;
|
|
50
|
+
readonly duration?: Duration.Duration;
|
|
51
|
+
readonly cursor?: string;
|
|
52
|
+
readonly strict?: boolean;
|
|
53
|
+
readonly maxErrors?: number;
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
type SyncOutcome =
|
|
57
|
+
| { readonly _tag: "Stored"; readonly eventSeq: EventSeq; readonly kind: "upsert" | "delete" }
|
|
58
|
+
| { readonly _tag: "Skipped" }
|
|
59
|
+
| { readonly _tag: "Error"; readonly error: SyncError };
|
|
60
|
+
|
|
61
|
+
type PreparedOutcome =
|
|
62
|
+
| {
|
|
63
|
+
readonly _tag: "Upsert";
|
|
64
|
+
readonly post: Post;
|
|
65
|
+
readonly checkExists: boolean;
|
|
66
|
+
}
|
|
67
|
+
| { readonly _tag: "Delete"; readonly uri: PostUri; readonly cid: PostCid | undefined }
|
|
68
|
+
| { readonly _tag: "Skip" }
|
|
69
|
+
| { readonly _tag: "Error"; readonly error: SyncError };
|
|
70
|
+
|
|
71
|
+
type SyncProgressState = {
|
|
72
|
+
readonly processed: number;
|
|
73
|
+
readonly stored: number;
|
|
74
|
+
readonly skipped: number;
|
|
75
|
+
readonly errors: number;
|
|
76
|
+
readonly lastReportAt: number;
|
|
77
|
+
readonly lastEventSeq: Option.Option<EventSeq>;
|
|
78
|
+
readonly lastCursor: Option.Option<string>;
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
const skippedOutcome: SyncOutcome = { _tag: "Skipped" };
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
const toSyncError =
|
|
85
|
+
(stage: SyncStage, fallback: string) => (cause: unknown) =>
|
|
86
|
+
SyncError.make({
|
|
87
|
+
stage,
|
|
88
|
+
message: messageFromCause(fallback, cause),
|
|
89
|
+
cause
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
const isCommitMessage = (
|
|
93
|
+
message: JetstreamMessage.JetstreamMessage
|
|
94
|
+
): message is CommitMessage =>
|
|
95
|
+
message._tag === "CommitCreate" ||
|
|
96
|
+
message._tag === "CommitUpdate" ||
|
|
97
|
+
message._tag === "CommitDelete";
|
|
98
|
+
|
|
99
|
+
const isPostCommit = (message: CommitMessage) =>
|
|
100
|
+
message.commit.collection === "app.bsky.feed.post";
|
|
101
|
+
|
|
102
|
+
const postUriFor = (message: CommitMessage) =>
|
|
103
|
+
`at://${message.did}/${message.commit.collection}/${message.commit.rkey}`;
|
|
104
|
+
|
|
105
|
+
const indexedAtFor = (message: CommitMessage) =>
|
|
106
|
+
new Date(Math.floor(message.time_us / 1000)).toISOString();
|
|
107
|
+
|
|
108
|
+
export class JetstreamSyncEngine extends Context.Tag("@skygent/JetstreamSyncEngine")<
|
|
109
|
+
JetstreamSyncEngine,
|
|
110
|
+
{
|
|
111
|
+
readonly sync: (config: JetstreamSyncConfig) => Effect.Effect<SyncResult, SyncError>;
|
|
112
|
+
readonly watch: (
|
|
113
|
+
config: Omit<JetstreamSyncConfig, "limit" | "duration">
|
|
114
|
+
) => Stream.Stream<SyncEvent, SyncError>;
|
|
115
|
+
}
|
|
116
|
+
>() {
|
|
117
|
+
static readonly layer = Layer.effect(
|
|
118
|
+
JetstreamSyncEngine,
|
|
119
|
+
Effect.gen(function* () {
|
|
120
|
+
const jetstream = yield* Jetstream.Jetstream;
|
|
121
|
+
const parser = yield* PostParser;
|
|
122
|
+
const runtime = yield* FilterRuntime;
|
|
123
|
+
const committer = yield* StoreCommitter;
|
|
124
|
+
const index = yield* StoreIndex;
|
|
125
|
+
const checkpoints = yield* SyncCheckpointStore;
|
|
126
|
+
const reporter = yield* SyncReporter;
|
|
127
|
+
const profiles = yield* ProfileResolver;
|
|
128
|
+
const safeShutdown = jetstream.shutdown.pipe(
|
|
129
|
+
Effect.timeout(Duration.seconds(5)),
|
|
130
|
+
Effect.catchAll(() => Effect.void)
|
|
131
|
+
);
|
|
132
|
+
|
|
133
|
+
const makeMeta = (
|
|
134
|
+
command: string,
|
|
135
|
+
filterHash: string
|
|
136
|
+
) =>
|
|
137
|
+
Clock.currentTimeMillis.pipe(
|
|
138
|
+
Effect.flatMap((now) => Schema.decodeUnknown(Timestamp)(new Date(now).toISOString())),
|
|
139
|
+
Effect.mapError(toSyncError("store", "Failed to create event metadata")),
|
|
140
|
+
Effect.map((createdAt) =>
|
|
141
|
+
EventMeta.make({
|
|
142
|
+
source: "jetstream",
|
|
143
|
+
command,
|
|
144
|
+
filterExprHash: filterHash,
|
|
145
|
+
createdAt
|
|
146
|
+
})
|
|
147
|
+
)
|
|
148
|
+
);
|
|
149
|
+
|
|
150
|
+
const storePost = (
|
|
151
|
+
target: StoreRef,
|
|
152
|
+
command: string,
|
|
153
|
+
filterHash: string,
|
|
154
|
+
post: Post
|
|
155
|
+
) =>
|
|
156
|
+
Effect.gen(function* () {
|
|
157
|
+
const meta = yield* makeMeta(command, filterHash);
|
|
158
|
+
const event = PostUpsert.make({ post, meta });
|
|
159
|
+
return yield* committer
|
|
160
|
+
.appendUpsert(target, event)
|
|
161
|
+
.pipe(
|
|
162
|
+
Effect.mapError(
|
|
163
|
+
toSyncError("store", "Failed to append event")
|
|
164
|
+
),
|
|
165
|
+
Effect.map((record) => record.seq)
|
|
166
|
+
);
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const storePostIfMissing = (
|
|
170
|
+
target: StoreRef,
|
|
171
|
+
command: string,
|
|
172
|
+
filterHash: string,
|
|
173
|
+
post: Post
|
|
174
|
+
) =>
|
|
175
|
+
Effect.gen(function* () {
|
|
176
|
+
const meta = yield* makeMeta(command, filterHash);
|
|
177
|
+
const event = PostUpsert.make({ post, meta });
|
|
178
|
+
const stored = yield* committer
|
|
179
|
+
.appendUpsertIfMissing(target, event)
|
|
180
|
+
.pipe(
|
|
181
|
+
Effect.mapError(
|
|
182
|
+
toSyncError("store", "Failed to append event")
|
|
183
|
+
)
|
|
184
|
+
);
|
|
185
|
+
return Option.map(stored, (entry) => entry.seq);
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
const storeDelete = (
|
|
189
|
+
target: StoreRef,
|
|
190
|
+
command: string,
|
|
191
|
+
filterHash: string,
|
|
192
|
+
uri: PostUri,
|
|
193
|
+
cid: PostCid | undefined
|
|
194
|
+
) =>
|
|
195
|
+
Effect.gen(function* () {
|
|
196
|
+
const meta = yield* makeMeta(command, filterHash);
|
|
197
|
+
const event = PostDelete.make({ uri, cid, meta });
|
|
198
|
+
return yield* committer
|
|
199
|
+
.appendDelete(target, event)
|
|
200
|
+
.pipe(
|
|
201
|
+
Effect.mapError(
|
|
202
|
+
toSyncError("store", "Failed to append event")
|
|
203
|
+
),
|
|
204
|
+
Effect.map((record) => record.seq)
|
|
205
|
+
);
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
const processStream = Effect.fn("JetstreamSyncEngine.processStream")(
|
|
209
|
+
(
|
|
210
|
+
config: JetstreamSyncConfig,
|
|
211
|
+
predicate: (post: Post) => Effect.Effect<
|
|
212
|
+
{ readonly ok: boolean },
|
|
213
|
+
unknown
|
|
214
|
+
>,
|
|
215
|
+
activeCheckpoint: Option.Option<SyncCheckpoint>
|
|
216
|
+
) =>
|
|
217
|
+
Effect.gen(function* () {
|
|
218
|
+
const filterHash = filterExprSignature(config.filter);
|
|
219
|
+
const startTime = yield* Clock.currentTimeMillis;
|
|
220
|
+
const strict = config.strict === true;
|
|
221
|
+
const maxErrors = config.maxErrors;
|
|
222
|
+
const initialLastEventSeq = Option.flatMap(activeCheckpoint, (value) =>
|
|
223
|
+
Option.fromNullable(value.lastEventSeq)
|
|
224
|
+
);
|
|
225
|
+
const initialCursor = Option.orElse(
|
|
226
|
+
Option.fromNullable(config.cursor),
|
|
227
|
+
() => Option.flatMap(activeCheckpoint, (value) =>
|
|
228
|
+
Option.fromNullable(value.cursor)
|
|
229
|
+
)
|
|
230
|
+
);
|
|
231
|
+
const stateRef = yield* Ref.make<SyncProgressState>({
|
|
232
|
+
processed: 0,
|
|
233
|
+
stored: 0,
|
|
234
|
+
skipped: 0,
|
|
235
|
+
errors: 0,
|
|
236
|
+
lastReportAt: startTime,
|
|
237
|
+
lastEventSeq: initialLastEventSeq,
|
|
238
|
+
lastCursor: initialCursor
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
const saveCheckpointFromState = (state: SyncProgressState) => {
|
|
242
|
+
const cursorValue = Option.getOrUndefined(state.lastCursor);
|
|
243
|
+
const shouldSave =
|
|
244
|
+
cursorValue !== undefined || Option.isSome(activeCheckpoint);
|
|
245
|
+
if (!shouldSave) {
|
|
246
|
+
return Effect.void;
|
|
247
|
+
}
|
|
248
|
+
return Clock.currentTimeMillis.pipe(
|
|
249
|
+
Effect.flatMap((now) => Schema.decodeUnknown(Timestamp)(new Date(now).toISOString())),
|
|
250
|
+
Effect.mapError(
|
|
251
|
+
toSyncError("store", "Failed to create checkpoint timestamp")
|
|
252
|
+
),
|
|
253
|
+
Effect.flatMap((updatedAt) => {
|
|
254
|
+
const checkpoint = SyncCheckpoint.make({
|
|
255
|
+
source: config.source,
|
|
256
|
+
cursor: cursorValue,
|
|
257
|
+
lastEventSeq: Option.getOrUndefined(state.lastEventSeq),
|
|
258
|
+
filterHash,
|
|
259
|
+
updatedAt
|
|
260
|
+
});
|
|
261
|
+
return checkpoints
|
|
262
|
+
.save(config.store, checkpoint)
|
|
263
|
+
.pipe(
|
|
264
|
+
Effect.mapError(
|
|
265
|
+
toSyncError("store", "Failed to save checkpoint")
|
|
266
|
+
)
|
|
267
|
+
);
|
|
268
|
+
})
|
|
269
|
+
);
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
const baseStream = jetstream.stream.pipe(
|
|
273
|
+
Stream.mapError(toSyncError("source", "Jetstream stream failed")),
|
|
274
|
+
Stream.filter(isCommitMessage),
|
|
275
|
+
Stream.filter(isPostCommit)
|
|
276
|
+
);
|
|
277
|
+
|
|
278
|
+
const bounded = typeof config.limit === "number"
|
|
279
|
+
? baseStream.pipe(Stream.take(config.limit))
|
|
280
|
+
: baseStream;
|
|
281
|
+
|
|
282
|
+
const prepareCommit = (
|
|
283
|
+
message: CommitMessage
|
|
284
|
+
): Effect.Effect<PreparedOutcome, SyncError> =>
|
|
285
|
+
Effect.gen(function* () {
|
|
286
|
+
const uri = postUriFor(message);
|
|
287
|
+
switch (message._tag) {
|
|
288
|
+
case "CommitCreate":
|
|
289
|
+
case "CommitUpdate": {
|
|
290
|
+
const handle = yield* profiles
|
|
291
|
+
.handleForDid(message.did)
|
|
292
|
+
.pipe(
|
|
293
|
+
Effect.mapError(
|
|
294
|
+
toSyncError("source", "Failed to resolve author profile")
|
|
295
|
+
)
|
|
296
|
+
);
|
|
297
|
+
const raw = {
|
|
298
|
+
uri,
|
|
299
|
+
cid: message.commit.cid,
|
|
300
|
+
author: handle,
|
|
301
|
+
authorDid: message.did,
|
|
302
|
+
record: message.commit.record,
|
|
303
|
+
indexedAt: indexedAtFor(message)
|
|
304
|
+
};
|
|
305
|
+
const post = yield* parser
|
|
306
|
+
.parsePost(raw)
|
|
307
|
+
.pipe(
|
|
308
|
+
Effect.mapError(
|
|
309
|
+
toSyncError("parse", "Failed to parse post")
|
|
310
|
+
)
|
|
311
|
+
);
|
|
312
|
+
const evaluated = yield* predicate(post).pipe(
|
|
313
|
+
Effect.mapError(
|
|
314
|
+
toSyncError("filter", "Filter evaluation failed")
|
|
315
|
+
)
|
|
316
|
+
);
|
|
317
|
+
return evaluated.ok
|
|
318
|
+
? ({
|
|
319
|
+
_tag: "Upsert",
|
|
320
|
+
post,
|
|
321
|
+
checkExists: message._tag === "CommitCreate"
|
|
322
|
+
} as const)
|
|
323
|
+
: ({ _tag: "Skip" } as const);
|
|
324
|
+
}
|
|
325
|
+
case "CommitDelete": {
|
|
326
|
+
const parsedUri = yield* Schema.decodeUnknown(PostUri)(uri).pipe(
|
|
327
|
+
Effect.mapError(toSyncError("parse", "Invalid post uri"))
|
|
328
|
+
);
|
|
329
|
+
const parsedCid =
|
|
330
|
+
"cid" in message.commit &&
|
|
331
|
+
typeof message.commit.cid === "string"
|
|
332
|
+
? yield* Schema.decodeUnknown(PostCid)(message.commit.cid).pipe(
|
|
333
|
+
Effect.mapError(
|
|
334
|
+
toSyncError("parse", "Invalid post cid")
|
|
335
|
+
)
|
|
336
|
+
)
|
|
337
|
+
: undefined;
|
|
338
|
+
return {
|
|
339
|
+
_tag: "Delete",
|
|
340
|
+
uri: parsedUri,
|
|
341
|
+
cid: parsedCid
|
|
342
|
+
} as const;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}).pipe(
|
|
346
|
+
Effect.catchAll((error) =>
|
|
347
|
+
Effect.succeed({ _tag: "Error", error } as const)
|
|
348
|
+
)
|
|
349
|
+
);
|
|
350
|
+
|
|
351
|
+
const applyPrepared = (prepared: PreparedOutcome) => {
|
|
352
|
+
switch (prepared._tag) {
|
|
353
|
+
case "Skip":
|
|
354
|
+
return Effect.succeed(skippedOutcome);
|
|
355
|
+
case "Error":
|
|
356
|
+
return strict
|
|
357
|
+
? Effect.fail(prepared.error)
|
|
358
|
+
: Effect.succeed({ _tag: "Error", error: prepared.error } as const);
|
|
359
|
+
case "Delete":
|
|
360
|
+
return index
|
|
361
|
+
.hasUri(config.store, prepared.uri)
|
|
362
|
+
.pipe(
|
|
363
|
+
Effect.mapError(
|
|
364
|
+
toSyncError("store", "Failed to check existing post")
|
|
365
|
+
),
|
|
366
|
+
Effect.flatMap((exists) =>
|
|
367
|
+
exists
|
|
368
|
+
? storeDelete(
|
|
369
|
+
config.store,
|
|
370
|
+
config.command,
|
|
371
|
+
filterHash,
|
|
372
|
+
prepared.uri,
|
|
373
|
+
prepared.cid
|
|
374
|
+
).pipe(
|
|
375
|
+
Effect.map(
|
|
376
|
+
(eventSeq): SyncOutcome => ({
|
|
377
|
+
_tag: "Stored",
|
|
378
|
+
eventSeq,
|
|
379
|
+
kind: "delete"
|
|
380
|
+
})
|
|
381
|
+
)
|
|
382
|
+
)
|
|
383
|
+
: Effect.succeed(skippedOutcome)
|
|
384
|
+
)
|
|
385
|
+
);
|
|
386
|
+
case "Upsert":
|
|
387
|
+
return (prepared.checkExists
|
|
388
|
+
? storePostIfMissing(
|
|
389
|
+
config.store,
|
|
390
|
+
config.command,
|
|
391
|
+
filterHash,
|
|
392
|
+
prepared.post
|
|
393
|
+
).pipe(
|
|
394
|
+
Effect.map((eventSeq) =>
|
|
395
|
+
Option.match(eventSeq, {
|
|
396
|
+
onNone: () => skippedOutcome,
|
|
397
|
+
onSome: (value): SyncOutcome => ({
|
|
398
|
+
_tag: "Stored",
|
|
399
|
+
eventSeq: value,
|
|
400
|
+
kind: "upsert"
|
|
401
|
+
})
|
|
402
|
+
})
|
|
403
|
+
)
|
|
404
|
+
)
|
|
405
|
+
: storePost(
|
|
406
|
+
config.store,
|
|
407
|
+
config.command,
|
|
408
|
+
filterHash,
|
|
409
|
+
prepared.post
|
|
410
|
+
).pipe(
|
|
411
|
+
Effect.map(
|
|
412
|
+
(eventSeq): SyncOutcome => ({
|
|
413
|
+
_tag: "Stored",
|
|
414
|
+
eventSeq,
|
|
415
|
+
kind: "upsert"
|
|
416
|
+
})
|
|
417
|
+
)
|
|
418
|
+
)
|
|
419
|
+
);
|
|
420
|
+
}
|
|
421
|
+
};
|
|
422
|
+
|
|
423
|
+
const processBatch = (batch: Chunk.Chunk<CommitMessage>) =>
|
|
424
|
+
Effect.gen(function* () {
|
|
425
|
+
const messages = Chunk.toReadonlyArray(batch);
|
|
426
|
+
if (messages.length === 0) {
|
|
427
|
+
return SyncResultMonoid.empty;
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
const prepared = yield* Effect.forEach(messages, prepareCommit, {
|
|
431
|
+
concurrency: "unbounded",
|
|
432
|
+
batching: true
|
|
433
|
+
}).pipe(Effect.withRequestBatching(true));
|
|
434
|
+
|
|
435
|
+
const outcomes = yield* Effect.forEach(prepared, applyPrepared);
|
|
436
|
+
|
|
437
|
+
let added = 0;
|
|
438
|
+
let deleted = 0;
|
|
439
|
+
let skipped = 0;
|
|
440
|
+
const errors: Array<SyncError> = [];
|
|
441
|
+
let lastEventSeq = Option.none<EventSeq>();
|
|
442
|
+
for (const outcome of outcomes) {
|
|
443
|
+
switch (outcome._tag) {
|
|
444
|
+
case "Stored":
|
|
445
|
+
if (outcome.kind === "delete") {
|
|
446
|
+
deleted += 1;
|
|
447
|
+
} else {
|
|
448
|
+
added += 1;
|
|
449
|
+
}
|
|
450
|
+
lastEventSeq = Option.some(outcome.eventSeq);
|
|
451
|
+
break;
|
|
452
|
+
case "Skipped":
|
|
453
|
+
skipped += 1;
|
|
454
|
+
break;
|
|
455
|
+
case "Error":
|
|
456
|
+
skipped += 1;
|
|
457
|
+
errors.push(outcome.error);
|
|
458
|
+
break;
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
let maxCursor = 0;
|
|
463
|
+
for (const message of messages) {
|
|
464
|
+
if (message.time_us > maxCursor) {
|
|
465
|
+
maxCursor = message.time_us;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
const cursor = String(Math.max(0, Math.trunc(maxCursor)));
|
|
469
|
+
const now = yield* Clock.currentTimeMillis;
|
|
470
|
+
const update = yield* Ref.modify(
|
|
471
|
+
stateRef,
|
|
472
|
+
(state): readonly [
|
|
473
|
+
{ readonly nextState: SyncProgressState; readonly shouldReport: boolean },
|
|
474
|
+
SyncProgressState
|
|
475
|
+
] => {
|
|
476
|
+
const processed = state.processed + messages.length;
|
|
477
|
+
const stored = state.stored + added;
|
|
478
|
+
const skippedTotal = state.skipped + skipped;
|
|
479
|
+
const errorsTotal = state.errors + errors.length;
|
|
480
|
+
const shouldReport =
|
|
481
|
+
processed % 100 === 0 || now - state.lastReportAt >= 5000;
|
|
482
|
+
const nextState: SyncProgressState = {
|
|
483
|
+
processed,
|
|
484
|
+
stored,
|
|
485
|
+
skipped: skippedTotal,
|
|
486
|
+
errors: errorsTotal,
|
|
487
|
+
lastReportAt: shouldReport ? now : state.lastReportAt,
|
|
488
|
+
lastEventSeq: Option.isSome(lastEventSeq)
|
|
489
|
+
? lastEventSeq
|
|
490
|
+
: state.lastEventSeq,
|
|
491
|
+
lastCursor: Option.some(cursor)
|
|
492
|
+
};
|
|
493
|
+
return [{ nextState, shouldReport }, nextState];
|
|
494
|
+
}
|
|
495
|
+
);
|
|
496
|
+
|
|
497
|
+
const state = update.nextState;
|
|
498
|
+
const shouldReport = update.shouldReport;
|
|
499
|
+
const exceedsMaxErrors =
|
|
500
|
+
typeof maxErrors === "number" && state.errors > maxErrors;
|
|
501
|
+
if (exceedsMaxErrors) {
|
|
502
|
+
const lastError = errors[errors.length - 1];
|
|
503
|
+
return yield* SyncError.make({
|
|
504
|
+
stage: lastError?.stage ?? "source",
|
|
505
|
+
message: `Stopped after exceeding max errors (${maxErrors}).`,
|
|
506
|
+
cause: lastError ?? { maxErrors }
|
|
507
|
+
});
|
|
508
|
+
}
|
|
509
|
+
if (shouldReport) {
|
|
510
|
+
const elapsedMs = now - startTime;
|
|
511
|
+
const rate =
|
|
512
|
+
elapsedMs > 0 ? state.processed / (elapsedMs / 1000) : 0;
|
|
513
|
+
yield* reporter.report(
|
|
514
|
+
SyncProgress.make({
|
|
515
|
+
processed: state.processed,
|
|
516
|
+
stored: state.stored,
|
|
517
|
+
skipped: state.skipped,
|
|
518
|
+
errors: state.errors,
|
|
519
|
+
elapsedMs,
|
|
520
|
+
rate
|
|
521
|
+
})
|
|
522
|
+
);
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
yield* saveCheckpointFromState(state);
|
|
526
|
+
|
|
527
|
+
return SyncResult.make({
|
|
528
|
+
postsAdded: added,
|
|
529
|
+
postsDeleted: deleted,
|
|
530
|
+
postsSkipped: skipped,
|
|
531
|
+
errors
|
|
532
|
+
});
|
|
533
|
+
});
|
|
534
|
+
|
|
535
|
+
const stream = bounded.pipe(
|
|
536
|
+
Stream.groupedWithin(100, Duration.seconds(1)),
|
|
537
|
+
Stream.mapEffect(processBatch)
|
|
538
|
+
);
|
|
539
|
+
|
|
540
|
+
return stream.pipe(
|
|
541
|
+
Stream.ensuring(
|
|
542
|
+
Ref.get(stateRef).pipe(
|
|
543
|
+
Effect.flatMap(saveCheckpointFromState),
|
|
544
|
+
Effect.catchAll(() => Effect.void)
|
|
545
|
+
)
|
|
546
|
+
),
|
|
547
|
+
Stream.ensuring(safeShutdown)
|
|
548
|
+
);
|
|
549
|
+
})
|
|
550
|
+
);
|
|
551
|
+
|
|
552
|
+
const sync = Effect.fn("JetstreamSyncEngine.sync")((config: JetstreamSyncConfig) =>
|
|
553
|
+
Effect.gen(function* () {
|
|
554
|
+
const predicate = yield* runtime
|
|
555
|
+
.evaluateWithMetadata(config.filter)
|
|
556
|
+
.pipe(
|
|
557
|
+
Effect.mapError(
|
|
558
|
+
toSyncError("filter", "Filter compilation failed")
|
|
559
|
+
)
|
|
560
|
+
);
|
|
561
|
+
|
|
562
|
+
const filterHash = filterExprSignature(config.filter);
|
|
563
|
+
const previousCheckpoint = yield* checkpoints
|
|
564
|
+
.load(config.store, config.source)
|
|
565
|
+
.pipe(
|
|
566
|
+
Effect.mapError(toSyncError("store", "Failed to load sync checkpoint"))
|
|
567
|
+
);
|
|
568
|
+
const activeCheckpoint = Option.filter(previousCheckpoint, (value) =>
|
|
569
|
+
value.filterHash ? value.filterHash === filterHash : true
|
|
570
|
+
);
|
|
571
|
+
|
|
572
|
+
const stream = yield* processStream(config, predicate, activeCheckpoint);
|
|
573
|
+
const resultRef = yield* Ref.make(SyncResultMonoid.empty);
|
|
574
|
+
const tagged = stream.pipe(
|
|
575
|
+
Stream.tap((result) =>
|
|
576
|
+
Ref.update(resultRef, (current) =>
|
|
577
|
+
SyncResultMonoid.combine(current, result)
|
|
578
|
+
)
|
|
579
|
+
)
|
|
580
|
+
);
|
|
581
|
+
const withTimeout = config.duration
|
|
582
|
+
? tagged.pipe(
|
|
583
|
+
Stream.interruptWhen(
|
|
584
|
+
Effect.sleep(config.duration).pipe(
|
|
585
|
+
Effect.zipRight(
|
|
586
|
+
Effect.logWarning(
|
|
587
|
+
"Jetstream sync exceeded duration; shutting down.",
|
|
588
|
+
{ durationMs: Duration.toMillis(config.duration) }
|
|
589
|
+
)
|
|
590
|
+
),
|
|
591
|
+
Effect.zipRight(safeShutdown)
|
|
592
|
+
)
|
|
593
|
+
)
|
|
594
|
+
)
|
|
595
|
+
: tagged;
|
|
596
|
+
yield* Stream.runDrain(withTimeout);
|
|
597
|
+
return yield* Ref.get(resultRef);
|
|
598
|
+
})
|
|
599
|
+
);
|
|
600
|
+
|
|
601
|
+
const watch = (config: Omit<JetstreamSyncConfig, "limit" | "duration">) =>
|
|
602
|
+
Stream.unwrap(
|
|
603
|
+
Effect.gen(function* () {
|
|
604
|
+
const predicate = yield* runtime
|
|
605
|
+
.evaluateWithMetadata(config.filter)
|
|
606
|
+
.pipe(
|
|
607
|
+
Effect.mapError(
|
|
608
|
+
toSyncError("filter", "Filter compilation failed")
|
|
609
|
+
)
|
|
610
|
+
);
|
|
611
|
+
|
|
612
|
+
const filterHash = filterExprSignature(config.filter);
|
|
613
|
+
const previousCheckpoint = yield* checkpoints
|
|
614
|
+
.load(config.store, config.source)
|
|
615
|
+
.pipe(
|
|
616
|
+
Effect.mapError(toSyncError("store", "Failed to load sync checkpoint"))
|
|
617
|
+
);
|
|
618
|
+
const activeCheckpoint = Option.filter(previousCheckpoint, (value) =>
|
|
619
|
+
value.filterHash ? value.filterHash === filterHash : true
|
|
620
|
+
);
|
|
621
|
+
|
|
622
|
+
const stream = yield* processStream(
|
|
623
|
+
{ ...config, command: config.command },
|
|
624
|
+
predicate,
|
|
625
|
+
activeCheckpoint
|
|
626
|
+
);
|
|
627
|
+
return stream.pipe(
|
|
628
|
+
Stream.map((result) => SyncEvent.make({ result }))
|
|
629
|
+
);
|
|
630
|
+
})
|
|
631
|
+
);
|
|
632
|
+
|
|
633
|
+
return JetstreamSyncEngine.of({ sync, watch });
|
|
634
|
+
})
|
|
635
|
+
);
|
|
636
|
+
}
|