@graphrefly/graphrefly 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,6 +34,8 @@ __export(extra_exports, {
34
34
  cached: () => cached,
35
35
  catchError: () => catchError,
36
36
  checkpointNodeValue: () => checkpointNodeValue,
37
+ checkpointToRedis: () => checkpointToRedis,
38
+ checkpointToS3: () => checkpointToS3,
37
39
  circuitBreaker: () => circuitBreaker,
38
40
  combine: () => combine,
39
41
  combineLatest: () => combineLatest,
@@ -75,10 +77,13 @@ __export(extra_exports, {
75
77
  fromIter: () => fromIter,
76
78
  fromKafka: () => fromKafka,
77
79
  fromMCP: () => fromMCP,
80
+ fromNATS: () => fromNATS,
78
81
  fromNDJSON: () => fromNDJSON,
79
82
  fromOTel: () => fromOTel,
80
83
  fromPrometheus: () => fromPrometheus,
81
84
  fromPromise: () => fromPromise,
85
+ fromPulsar: () => fromPulsar,
86
+ fromRabbitMQ: () => fromRabbitMQ,
82
87
  fromRedisStream: () => fromRedisStream,
83
88
  fromStatsD: () => fromStatsD,
84
89
  fromSyslog: () => fromSyslog,
@@ -142,11 +147,22 @@ __export(extra_exports, {
142
147
  throwError: () => throwError,
143
148
  timeout: () => timeout,
144
149
  toArray: () => toArray,
150
+ toCSV: () => toCSV,
151
+ toClickHouse: () => toClickHouse,
152
+ toFile: () => toFile,
145
153
  toKafka: () => toKafka,
154
+ toLoki: () => toLoki,
146
155
  toMessages$: () => toMessages$,
156
+ toMongo: () => toMongo,
157
+ toNATS: () => toNATS,
147
158
  toObservable: () => toObservable,
159
+ toPostgres: () => toPostgres,
160
+ toPulsar: () => toPulsar,
161
+ toRabbitMQ: () => toRabbitMQ,
148
162
  toRedisStream: () => toRedisStream,
163
+ toS3: () => toS3,
149
164
  toSSE: () => toSSE,
165
+ toTempo: () => toTempo,
150
166
  toWebSocket: () => toWebSocket,
151
167
  tokenBucket: () => tokenBucket,
152
168
  tokenTracker: () => tokenTracker,
@@ -3114,6 +3130,797 @@ function fromClickHouseWatch(client, query, opts) {
3114
3130
  };
3115
3131
  }, sourceOpts2(rest));
3116
3132
  }
3133
+ function fromPulsar(consumer, opts) {
3134
+ const {
3135
+ autoAck = true,
3136
+ deserialize = (buf) => {
3137
+ try {
3138
+ return JSON.parse(buf.toString());
3139
+ } catch {
3140
+ return buf.toString();
3141
+ }
3142
+ },
3143
+ ...rest
3144
+ } = opts ?? {};
3145
+ return producer((_d, a) => {
3146
+ let active = true;
3147
+ const loop = async () => {
3148
+ while (active) {
3149
+ try {
3150
+ const msg = await consumer.receive();
3151
+ if (!active) return;
3152
+ a.emit({
3153
+ topic: msg.getTopicName(),
3154
+ messageId: msg.getMessageId().toString(),
3155
+ key: msg.getPartitionKey(),
3156
+ value: deserialize(msg.getData()),
3157
+ properties: msg.getProperties(),
3158
+ publishTime: msg.getPublishTimestamp(),
3159
+ eventTime: msg.getEventTimestamp(),
3160
+ timestampNs: wallClockNs()
3161
+ });
3162
+ if (autoAck) await consumer.acknowledge(msg);
3163
+ } catch (err) {
3164
+ if (active) a.down([[ERROR, err]]);
3165
+ return;
3166
+ }
3167
+ }
3168
+ };
3169
+ void loop();
3170
+ return () => {
3171
+ active = false;
3172
+ };
3173
+ }, sourceOpts2(rest));
3174
+ }
3175
+ function toPulsar(source, pulsarProducer, opts) {
3176
+ const {
3177
+ serialize = (v) => Buffer.from(JSON.stringify(v)),
3178
+ keyExtractor,
3179
+ propertiesExtractor,
3180
+ onTransportError,
3181
+ ...rest
3182
+ } = opts ?? {};
3183
+ const inner = node([source], () => void 0, {
3184
+ describeKind: "effect",
3185
+ ...rest,
3186
+ onMessage(msg) {
3187
+ if (msg[0] === DATA) {
3188
+ const value = msg[1];
3189
+ let data;
3190
+ try {
3191
+ data = serialize(value);
3192
+ } catch (err) {
3193
+ onTransportError?.({
3194
+ stage: "serialize",
3195
+ error: err instanceof Error ? err : new Error(String(err)),
3196
+ value
3197
+ });
3198
+ return true;
3199
+ }
3200
+ void pulsarProducer.send({
3201
+ data,
3202
+ partitionKey: keyExtractor?.(value),
3203
+ properties: propertiesExtractor?.(value)
3204
+ }).catch((err) => {
3205
+ onTransportError?.({
3206
+ stage: "send",
3207
+ error: err instanceof Error ? err : new Error(String(err)),
3208
+ value
3209
+ });
3210
+ });
3211
+ return true;
3212
+ }
3213
+ return false;
3214
+ }
3215
+ });
3216
+ return inner.subscribe(() => {
3217
+ });
3218
+ }
3219
+ function fromNATS(client, subject, opts) {
3220
+ const decoder = new TextDecoder();
3221
+ const {
3222
+ queue,
3223
+ deserialize = (data) => {
3224
+ const text = decoder.decode(data);
3225
+ try {
3226
+ return JSON.parse(text);
3227
+ } catch {
3228
+ return text;
3229
+ }
3230
+ },
3231
+ ...rest
3232
+ } = opts ?? {};
3233
+ return producer((_d, a) => {
3234
+ let active = true;
3235
+ const sub = client.subscribe(subject, queue ? { queue } : void 0);
3236
+ const loop = async () => {
3237
+ try {
3238
+ for await (const msg of sub) {
3239
+ if (!active) return;
3240
+ const headers = {};
3241
+ if (msg.headers) {
3242
+ for (const k of msg.headers.keys()) {
3243
+ headers[k] = msg.headers.get(k);
3244
+ }
3245
+ }
3246
+ a.emit({
3247
+ subject: msg.subject,
3248
+ data: deserialize(msg.data),
3249
+ headers,
3250
+ reply: msg.reply,
3251
+ sid: msg.sid,
3252
+ timestampNs: wallClockNs()
3253
+ });
3254
+ }
3255
+ if (active) a.down([[COMPLETE]]);
3256
+ } catch (err) {
3257
+ if (active) a.down([[ERROR, err]]);
3258
+ }
3259
+ };
3260
+ void loop();
3261
+ return () => {
3262
+ active = false;
3263
+ };
3264
+ }, sourceOpts2(rest));
3265
+ }
3266
+ function toNATS(source, client, subject, opts) {
3267
+ const encoder = new TextEncoder();
3268
+ const {
3269
+ serialize = (v) => encoder.encode(JSON.stringify(v)),
3270
+ onTransportError,
3271
+ ...rest
3272
+ } = opts ?? {};
3273
+ const inner = node([source], () => void 0, {
3274
+ describeKind: "effect",
3275
+ ...rest,
3276
+ onMessage(msg) {
3277
+ if (msg[0] === DATA) {
3278
+ const value = msg[1];
3279
+ let data;
3280
+ try {
3281
+ data = serialize(value);
3282
+ } catch (err) {
3283
+ onTransportError?.({
3284
+ stage: "serialize",
3285
+ error: err instanceof Error ? err : new Error(String(err)),
3286
+ value
3287
+ });
3288
+ return true;
3289
+ }
3290
+ try {
3291
+ client.publish(subject, data);
3292
+ } catch (err) {
3293
+ onTransportError?.({
3294
+ stage: "send",
3295
+ error: err instanceof Error ? err : new Error(String(err)),
3296
+ value
3297
+ });
3298
+ }
3299
+ return true;
3300
+ }
3301
+ return false;
3302
+ }
3303
+ });
3304
+ return inner.subscribe(() => {
3305
+ });
3306
+ }
3307
+ function fromRabbitMQ(channel, queue, opts) {
3308
+ const {
3309
+ autoAck = true,
3310
+ deserialize = (buf) => {
3311
+ try {
3312
+ return JSON.parse(buf.toString());
3313
+ } catch {
3314
+ return buf.toString();
3315
+ }
3316
+ },
3317
+ ...rest
3318
+ } = opts ?? {};
3319
+ return producer((_d, a) => {
3320
+ let active = true;
3321
+ let consumerTag;
3322
+ const start = async () => {
3323
+ try {
3324
+ const result = await channel.consume(
3325
+ queue,
3326
+ (msg) => {
3327
+ if (!active) return;
3328
+ if (msg === null) {
3329
+ if (active) a.down([[ERROR, new Error("Consumer cancelled by broker")]]);
3330
+ return;
3331
+ }
3332
+ a.emit({
3333
+ queue,
3334
+ routingKey: msg.fields.routingKey,
3335
+ exchange: msg.fields.exchange,
3336
+ content: deserialize(msg.content),
3337
+ properties: msg.properties,
3338
+ deliveryTag: msg.fields.deliveryTag,
3339
+ redelivered: msg.fields.redelivered,
3340
+ timestampNs: wallClockNs()
3341
+ });
3342
+ if (autoAck) channel.ack(msg);
3343
+ },
3344
+ { noAck: false }
3345
+ );
3346
+ consumerTag = result.consumerTag;
3347
+ } catch (err) {
3348
+ if (active) a.down([[ERROR, err]]);
3349
+ }
3350
+ };
3351
+ void start();
3352
+ return () => {
3353
+ active = false;
3354
+ if (consumerTag !== void 0) {
3355
+ void channel.cancel(consumerTag);
3356
+ }
3357
+ };
3358
+ }, sourceOpts2(rest));
3359
+ }
3360
+ function toRabbitMQ(source, channel, exchange, opts) {
3361
+ const {
3362
+ serialize = (v) => Buffer.from(JSON.stringify(v)),
3363
+ routingKeyExtractor = () => "",
3364
+ onTransportError,
3365
+ ...rest
3366
+ } = opts ?? {};
3367
+ const inner = node([source], () => void 0, {
3368
+ describeKind: "effect",
3369
+ ...rest,
3370
+ onMessage(msg) {
3371
+ if (msg[0] === DATA) {
3372
+ const value = msg[1];
3373
+ let routingKey;
3374
+ try {
3375
+ routingKey = routingKeyExtractor(value);
3376
+ } catch (err) {
3377
+ onTransportError?.({
3378
+ stage: "routing_key",
3379
+ error: err instanceof Error ? err : new Error(String(err)),
3380
+ value
3381
+ });
3382
+ return true;
3383
+ }
3384
+ let content;
3385
+ try {
3386
+ content = serialize(value);
3387
+ } catch (err) {
3388
+ onTransportError?.({
3389
+ stage: "serialize",
3390
+ error: err instanceof Error ? err : new Error(String(err)),
3391
+ value
3392
+ });
3393
+ return true;
3394
+ }
3395
+ try {
3396
+ channel.publish(exchange, routingKey, content);
3397
+ } catch (err) {
3398
+ onTransportError?.({
3399
+ stage: "send",
3400
+ error: err instanceof Error ? err : new Error(String(err)),
3401
+ value
3402
+ });
3403
+ }
3404
+ return true;
3405
+ }
3406
+ return false;
3407
+ }
3408
+ });
3409
+ return inner.subscribe(() => {
3410
+ });
3411
+ }
3412
+ function toFile(source, writer, opts) {
3413
+ const {
3414
+ serialize = (v) => `${JSON.stringify(v)}
3415
+ `,
3416
+ flushIntervalMs = 0,
3417
+ batchSize = Number.POSITIVE_INFINITY,
3418
+ onTransportError,
3419
+ mode: _mode,
3420
+ ...rest
3421
+ } = opts ?? {};
3422
+ let buffer2 = [];
3423
+ let timer;
3424
+ const doFlush = () => {
3425
+ if (buffer2.length === 0) return;
3426
+ const chunk = buffer2.join("");
3427
+ buffer2 = [];
3428
+ try {
3429
+ writer.write(chunk);
3430
+ } catch (err) {
3431
+ onTransportError?.({
3432
+ stage: "send",
3433
+ error: err instanceof Error ? err : new Error(String(err)),
3434
+ value: chunk
3435
+ });
3436
+ }
3437
+ };
3438
+ const scheduleFlush = () => {
3439
+ if (flushIntervalMs > 0 && timer === void 0) {
3440
+ timer = setTimeout(() => {
3441
+ timer = void 0;
3442
+ doFlush();
3443
+ }, flushIntervalMs);
3444
+ }
3445
+ };
3446
+ const buffered = flushIntervalMs > 0 || batchSize < Number.POSITIVE_INFINITY;
3447
+ const inner = node([source], () => void 0, {
3448
+ describeKind: "effect",
3449
+ ...rest,
3450
+ onMessage(msg) {
3451
+ if (msg[0] === DATA) {
3452
+ const value = msg[1];
3453
+ let line;
3454
+ try {
3455
+ line = serialize(value);
3456
+ } catch (err) {
3457
+ onTransportError?.({
3458
+ stage: "serialize",
3459
+ error: err instanceof Error ? err : new Error(String(err)),
3460
+ value
3461
+ });
3462
+ return true;
3463
+ }
3464
+ if (buffered) {
3465
+ buffer2.push(line);
3466
+ if (buffer2.length >= batchSize) doFlush();
3467
+ else scheduleFlush();
3468
+ } else {
3469
+ try {
3470
+ writer.write(line);
3471
+ } catch (err) {
3472
+ onTransportError?.({
3473
+ stage: "send",
3474
+ error: err instanceof Error ? err : new Error(String(err)),
3475
+ value
3476
+ });
3477
+ }
3478
+ }
3479
+ return true;
3480
+ }
3481
+ if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
3482
+ doFlush();
3483
+ }
3484
+ return false;
3485
+ }
3486
+ });
3487
+ const unsub = inner.subscribe(() => {
3488
+ });
3489
+ const dispose = () => {
3490
+ if (timer !== void 0) {
3491
+ clearTimeout(timer);
3492
+ timer = void 0;
3493
+ }
3494
+ doFlush();
3495
+ writer.end();
3496
+ unsub();
3497
+ };
3498
+ return {
3499
+ dispose,
3500
+ flush: async () => {
3501
+ doFlush();
3502
+ }
3503
+ };
3504
+ }
3505
+ function escapeCSVField(value, delimiter) {
3506
+ if (value.includes(delimiter) || value.includes('"') || value.includes("\n")) {
3507
+ return `"${value.replace(/"/g, '""')}"`;
3508
+ }
3509
+ return value;
3510
+ }
3511
+ function toCSV(source, writer, opts) {
3512
+ const {
3513
+ columns,
3514
+ delimiter = ",",
3515
+ writeHeader = true,
3516
+ cellExtractor = (row, col) => String(row[col] ?? ""),
3517
+ flushIntervalMs = 0,
3518
+ batchSize = Number.POSITIVE_INFINITY,
3519
+ onTransportError,
3520
+ ...rest
3521
+ } = opts;
3522
+ let headerWritten = false;
3523
+ const serializeRow = (row) => {
3524
+ if (!headerWritten && writeHeader) {
3525
+ headerWritten = true;
3526
+ const header = columns.map((c) => escapeCSVField(c, delimiter)).join(delimiter);
3527
+ const data = columns.map((c) => escapeCSVField(cellExtractor(row, c), delimiter)).join(delimiter);
3528
+ return `${header}
3529
+ ${data}
3530
+ `;
3531
+ }
3532
+ return `${columns.map((c) => escapeCSVField(cellExtractor(row, c), delimiter)).join(delimiter)}
3533
+ `;
3534
+ };
3535
+ return toFile(source, writer, {
3536
+ serialize: serializeRow,
3537
+ flushIntervalMs,
3538
+ batchSize,
3539
+ onTransportError,
3540
+ ...rest
3541
+ });
3542
+ }
3543
+ function toClickHouse(source, client, table, opts) {
3544
+ const {
3545
+ batchSize = 1e3,
3546
+ flushIntervalMs = 5e3,
3547
+ format = "JSONEachRow",
3548
+ transform = (v) => v,
3549
+ onTransportError,
3550
+ ...rest
3551
+ } = opts ?? {};
3552
+ let buffer2 = [];
3553
+ let timer;
3554
+ let lastFlush = Promise.resolve();
3555
+ const doFlush = () => {
3556
+ if (buffer2.length === 0) return Promise.resolve();
3557
+ const batch2 = buffer2;
3558
+ buffer2 = [];
3559
+ try {
3560
+ const p = client.insert({ table, values: batch2, format }).catch((err) => {
3561
+ onTransportError?.({
3562
+ stage: "send",
3563
+ error: err instanceof Error ? err : new Error(String(err)),
3564
+ value: batch2
3565
+ });
3566
+ });
3567
+ lastFlush = p;
3568
+ return p;
3569
+ } catch (err) {
3570
+ onTransportError?.({
3571
+ stage: "send",
3572
+ error: err instanceof Error ? err : new Error(String(err)),
3573
+ value: batch2
3574
+ });
3575
+ return Promise.resolve();
3576
+ }
3577
+ };
3578
+ const scheduleFlush = () => {
3579
+ if (timer === void 0) {
3580
+ timer = setTimeout(() => {
3581
+ timer = void 0;
3582
+ doFlush();
3583
+ }, flushIntervalMs);
3584
+ }
3585
+ };
3586
+ const inner = node([source], () => void 0, {
3587
+ describeKind: "effect",
3588
+ ...rest,
3589
+ onMessage(msg) {
3590
+ if (msg[0] === DATA) {
3591
+ const value = msg[1];
3592
+ try {
3593
+ buffer2.push(transform(value));
3594
+ } catch (err) {
3595
+ onTransportError?.({
3596
+ stage: "serialize",
3597
+ error: err instanceof Error ? err : new Error(String(err)),
3598
+ value
3599
+ });
3600
+ return true;
3601
+ }
3602
+ if (buffer2.length >= batchSize) doFlush();
3603
+ else scheduleFlush();
3604
+ return true;
3605
+ }
3606
+ if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
3607
+ doFlush();
3608
+ }
3609
+ return false;
3610
+ }
3611
+ });
3612
+ const unsub = inner.subscribe(() => {
3613
+ });
3614
+ const dispose = () => {
3615
+ if (timer !== void 0) {
3616
+ clearTimeout(timer);
3617
+ timer = void 0;
3618
+ }
3619
+ doFlush();
3620
+ unsub();
3621
+ };
3622
+ return {
3623
+ dispose,
3624
+ flush: () => doFlush().then(() => lastFlush)
3625
+ };
3626
+ }
3627
+ function toS3(source, client, bucket, opts) {
3628
+ const {
3629
+ format = "ndjson",
3630
+ keyGenerator = (seq2, timestampNs) => {
3631
+ const ms = Math.floor(timestampNs / 1e6);
3632
+ const ts = new Date(ms).toISOString().replace(/[:.]/g, "-");
3633
+ return `data/${ts}-${seq2}.${format === "ndjson" ? "ndjson" : "json"}`;
3634
+ },
3635
+ batchSize = 1e3,
3636
+ flushIntervalMs = 1e4,
3637
+ transform = (v) => v,
3638
+ onTransportError,
3639
+ ...rest
3640
+ } = opts ?? {};
3641
+ let buffer2 = [];
3642
+ let timer;
3643
+ let seq = 0;
3644
+ let lastFlush = Promise.resolve();
3645
+ const doFlush = () => {
3646
+ if (buffer2.length === 0) return Promise.resolve();
3647
+ const batch2 = buffer2;
3648
+ buffer2 = [];
3649
+ seq += 1;
3650
+ const body = format === "ndjson" ? `${batch2.map((v) => JSON.stringify(v)).join("\n")}
3651
+ ` : JSON.stringify(batch2);
3652
+ const contentType = format === "ndjson" ? "application/x-ndjson" : "application/json";
3653
+ const key = keyGenerator(seq, wallClockNs());
3654
+ try {
3655
+ const p = client.putObject({ Bucket: bucket, Key: key, Body: body, ContentType: contentType }).then(() => {
3656
+ }).catch((err) => {
3657
+ onTransportError?.({
3658
+ stage: "send",
3659
+ error: err instanceof Error ? err : new Error(String(err)),
3660
+ value: batch2
3661
+ });
3662
+ });
3663
+ lastFlush = p;
3664
+ return p;
3665
+ } catch (err) {
3666
+ onTransportError?.({
3667
+ stage: "send",
3668
+ error: err instanceof Error ? err : new Error(String(err)),
3669
+ value: batch2
3670
+ });
3671
+ return Promise.resolve();
3672
+ }
3673
+ };
3674
+ const scheduleFlush = () => {
3675
+ if (timer === void 0) {
3676
+ timer = setTimeout(() => {
3677
+ timer = void 0;
3678
+ doFlush();
3679
+ }, flushIntervalMs);
3680
+ }
3681
+ };
3682
+ const inner = node([source], () => void 0, {
3683
+ describeKind: "effect",
3684
+ ...rest,
3685
+ onMessage(msg) {
3686
+ if (msg[0] === DATA) {
3687
+ const value = msg[1];
3688
+ try {
3689
+ buffer2.push(transform(value));
3690
+ } catch (err) {
3691
+ onTransportError?.({
3692
+ stage: "serialize",
3693
+ error: err instanceof Error ? err : new Error(String(err)),
3694
+ value
3695
+ });
3696
+ return true;
3697
+ }
3698
+ if (buffer2.length >= batchSize) doFlush();
3699
+ else scheduleFlush();
3700
+ return true;
3701
+ }
3702
+ if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
3703
+ doFlush();
3704
+ }
3705
+ return false;
3706
+ }
3707
+ });
3708
+ const unsub = inner.subscribe(() => {
3709
+ });
3710
+ const dispose = () => {
3711
+ if (timer !== void 0) {
3712
+ clearTimeout(timer);
3713
+ timer = void 0;
3714
+ }
3715
+ doFlush();
3716
+ unsub();
3717
+ };
3718
+ return {
3719
+ dispose,
3720
+ flush: () => doFlush().then(() => lastFlush)
3721
+ };
3722
+ }
3723
+ function toPostgres(source, client, table, opts) {
3724
+ const {
3725
+ toSQL = (v, t) => ({
3726
+ sql: `INSERT INTO "${t.replace(/"/g, '""')}" (data) VALUES ($1)`,
3727
+ params: [JSON.stringify(v)]
3728
+ }),
3729
+ onTransportError,
3730
+ ...rest
3731
+ } = opts ?? {};
3732
+ const inner = node([source], () => void 0, {
3733
+ describeKind: "effect",
3734
+ ...rest,
3735
+ onMessage(msg) {
3736
+ if (msg[0] === DATA) {
3737
+ const value = msg[1];
3738
+ let query;
3739
+ try {
3740
+ query = toSQL(value, table);
3741
+ } catch (err) {
3742
+ onTransportError?.({
3743
+ stage: "serialize",
3744
+ error: err instanceof Error ? err : new Error(String(err)),
3745
+ value
3746
+ });
3747
+ return true;
3748
+ }
3749
+ void client.query(query.sql, query.params).catch((err) => {
3750
+ onTransportError?.({
3751
+ stage: "send",
3752
+ error: err instanceof Error ? err : new Error(String(err)),
3753
+ value
3754
+ });
3755
+ });
3756
+ return true;
3757
+ }
3758
+ return false;
3759
+ }
3760
+ });
3761
+ return inner.subscribe(() => {
3762
+ });
3763
+ }
3764
+ function toMongo(source, collection, opts) {
3765
+ const { toDocument = (v) => v, onTransportError, ...rest } = opts ?? {};
3766
+ const inner = node([source], () => void 0, {
3767
+ describeKind: "effect",
3768
+ ...rest,
3769
+ onMessage(msg) {
3770
+ if (msg[0] === DATA) {
3771
+ const value = msg[1];
3772
+ let doc;
3773
+ try {
3774
+ doc = toDocument(value);
3775
+ } catch (err) {
3776
+ onTransportError?.({
3777
+ stage: "serialize",
3778
+ error: err instanceof Error ? err : new Error(String(err)),
3779
+ value
3780
+ });
3781
+ return true;
3782
+ }
3783
+ void collection.insertOne(doc).catch((err) => {
3784
+ onTransportError?.({
3785
+ stage: "send",
3786
+ error: err instanceof Error ? err : new Error(String(err)),
3787
+ value
3788
+ });
3789
+ });
3790
+ return true;
3791
+ }
3792
+ return false;
3793
+ }
3794
+ });
3795
+ return inner.subscribe(() => {
3796
+ });
3797
+ }
3798
+ function toLoki(source, client, opts) {
3799
+ const {
3800
+ labels = {},
3801
+ toLine = (v) => JSON.stringify(v),
3802
+ toLabels,
3803
+ onTransportError,
3804
+ ...rest
3805
+ } = opts ?? {};
3806
+ const inner = node([source], () => void 0, {
3807
+ describeKind: "effect",
3808
+ ...rest,
3809
+ onMessage(msg) {
3810
+ if (msg[0] === DATA) {
3811
+ const value = msg[1];
3812
+ let line;
3813
+ try {
3814
+ line = toLine(value);
3815
+ } catch (err) {
3816
+ onTransportError?.({
3817
+ stage: "serialize",
3818
+ error: err instanceof Error ? err : new Error(String(err)),
3819
+ value
3820
+ });
3821
+ return true;
3822
+ }
3823
+ let streamLabels;
3824
+ try {
3825
+ streamLabels = toLabels ? { ...labels, ...toLabels(value) } : labels;
3826
+ } catch (err) {
3827
+ onTransportError?.({
3828
+ stage: "serialize",
3829
+ error: err instanceof Error ? err : new Error(String(err)),
3830
+ value
3831
+ });
3832
+ return true;
3833
+ }
3834
+ const ts = `${wallClockNs()}`;
3835
+ void client.push({ streams: [{ stream: streamLabels, values: [[ts, line]] }] }).catch((err) => {
3836
+ onTransportError?.({
3837
+ stage: "send",
3838
+ error: err instanceof Error ? err : new Error(String(err)),
3839
+ value
3840
+ });
3841
+ });
3842
+ return true;
3843
+ }
3844
+ return false;
3845
+ }
3846
+ });
3847
+ return inner.subscribe(() => {
3848
+ });
3849
+ }
3850
+ function toTempo(source, client, opts) {
3851
+ const { toResourceSpans = (v) => [v], onTransportError, ...rest } = opts ?? {};
3852
+ const inner = node([source], () => void 0, {
3853
+ describeKind: "effect",
3854
+ ...rest,
3855
+ onMessage(msg) {
3856
+ if (msg[0] === DATA) {
3857
+ const value = msg[1];
3858
+ let spans;
3859
+ try {
3860
+ spans = toResourceSpans(value);
3861
+ } catch (err) {
3862
+ onTransportError?.({
3863
+ stage: "serialize",
3864
+ error: err instanceof Error ? err : new Error(String(err)),
3865
+ value
3866
+ });
3867
+ return true;
3868
+ }
3869
+ void client.push({ resourceSpans: spans }).catch((err) => {
3870
+ onTransportError?.({
3871
+ stage: "send",
3872
+ error: err instanceof Error ? err : new Error(String(err)),
3873
+ value
3874
+ });
3875
+ });
3876
+ return true;
3877
+ }
3878
+ return false;
3879
+ }
3880
+ });
3881
+ return inner.subscribe(() => {
3882
+ });
3883
+ }
3884
+ function checkpointToS3(graph, client, bucket, opts) {
3885
+ const { prefix = "checkpoints/", debounceMs, compactEvery, onError } = opts ?? {};
3886
+ const adapter = {
3887
+ save(data) {
3888
+ const ms = Math.floor(wallClockNs() / 1e6);
3889
+ const key = `${prefix}${graph.name}/checkpoint-${ms}.json`;
3890
+ let body;
3891
+ try {
3892
+ body = JSON.stringify(data);
3893
+ } catch (err) {
3894
+ onError?.(err);
3895
+ return;
3896
+ }
3897
+ void client.putObject({
3898
+ Bucket: bucket,
3899
+ Key: key,
3900
+ Body: body,
3901
+ ContentType: "application/json"
3902
+ }).catch((err) => onError?.(err));
3903
+ }
3904
+ };
3905
+ return graph.autoCheckpoint(adapter, { debounceMs, compactEvery, onError });
3906
+ }
3907
+ function checkpointToRedis(graph, client, opts) {
3908
+ const { prefix = "graphrefly:checkpoint:", debounceMs, compactEvery, onError } = opts ?? {};
3909
+ const key = `${prefix}${graph.name}`;
3910
+ const adapter = {
3911
+ save(data) {
3912
+ let body;
3913
+ try {
3914
+ body = JSON.stringify(data);
3915
+ } catch (err) {
3916
+ onError?.(err);
3917
+ return;
3918
+ }
3919
+ void client.set(key, body).catch((err) => onError?.(err));
3920
+ }
3921
+ };
3922
+ return graph.autoCheckpoint(adapter, { debounceMs, compactEvery, onError });
3923
+ }
3117
3924
 
3118
3925
  // src/extra/backpressure.ts
3119
3926
  var nextLockId = 0;
@@ -6754,6 +7561,8 @@ function workerSelf(target, opts) {
6754
7561
  cached,
6755
7562
  catchError,
6756
7563
  checkpointNodeValue,
7564
+ checkpointToRedis,
7565
+ checkpointToS3,
6757
7566
  circuitBreaker,
6758
7567
  combine,
6759
7568
  combineLatest,
@@ -6795,10 +7604,13 @@ function workerSelf(target, opts) {
6795
7604
  fromIter,
6796
7605
  fromKafka,
6797
7606
  fromMCP,
7607
+ fromNATS,
6798
7608
  fromNDJSON,
6799
7609
  fromOTel,
6800
7610
  fromPrometheus,
6801
7611
  fromPromise,
7612
+ fromPulsar,
7613
+ fromRabbitMQ,
6802
7614
  fromRedisStream,
6803
7615
  fromStatsD,
6804
7616
  fromSyslog,
@@ -6862,11 +7674,22 @@ function workerSelf(target, opts) {
6862
7674
  throwError,
6863
7675
  timeout,
6864
7676
  toArray,
7677
+ toCSV,
7678
+ toClickHouse,
7679
+ toFile,
6865
7680
  toKafka,
7681
+ toLoki,
6866
7682
  toMessages$,
7683
+ toMongo,
7684
+ toNATS,
6867
7685
  toObservable,
7686
+ toPostgres,
7687
+ toPulsar,
7688
+ toRabbitMQ,
6868
7689
  toRedisStream,
7690
+ toS3,
6869
7691
  toSSE,
7692
+ toTempo,
6870
7693
  toWebSocket,
6871
7694
  tokenBucket,
6872
7695
  tokenTracker,