@graphrefly/graphrefly 0.4.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-VPS7L64N.js → chunk-V3UACY6A.js} +900 -4
- package/dist/chunk-V3UACY6A.js.map +1 -0
- package/dist/compat/nestjs/index.js +1 -1
- package/dist/extra/index.cjs +899 -3
- package/dist/extra/index.cjs.map +1 -1
- package/dist/extra/index.d.cts +1 -1
- package/dist/extra/index.d.ts +1 -1
- package/dist/extra/index.js +37 -1
- package/dist/{index-BHUvlQ3v.d.ts → index-B2jmzVxL.d.ts} +708 -4
- package/dist/{index-B6SsZs2h.d.cts → index-Bk_idZm1.d.cts} +708 -4
- package/dist/index.cjs +917 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +45 -9
- package/dist/index.js.map +1 -1
- package/package.json +4 -2
- package/dist/chunk-VPS7L64N.js.map +0 -1
package/dist/index.cjs
CHANGED
|
@@ -98,6 +98,8 @@ __export(index_exports, {
|
|
|
98
98
|
cached: () => cached,
|
|
99
99
|
catchError: () => catchError,
|
|
100
100
|
checkpointNodeValue: () => checkpointNodeValue,
|
|
101
|
+
checkpointToRedis: () => checkpointToRedis,
|
|
102
|
+
checkpointToS3: () => checkpointToS3,
|
|
101
103
|
circuitBreaker: () => circuitBreaker,
|
|
102
104
|
combine: () => combine,
|
|
103
105
|
combineLatest: () => combineLatest,
|
|
@@ -152,11 +154,15 @@ __export(index_exports, {
|
|
|
152
154
|
fromIter: () => fromIter,
|
|
153
155
|
fromKafka: () => fromKafka,
|
|
154
156
|
fromMCP: () => fromMCP,
|
|
157
|
+
fromNATS: () => fromNATS,
|
|
155
158
|
fromNDJSON: () => fromNDJSON,
|
|
156
159
|
fromOTel: () => fromOTel,
|
|
157
160
|
fromPrometheus: () => fromPrometheus,
|
|
158
161
|
fromPromise: () => fromPromise,
|
|
162
|
+
fromPulsar: () => fromPulsar,
|
|
163
|
+
fromRabbitMQ: () => fromRabbitMQ,
|
|
159
164
|
fromRedisStream: () => fromRedisStream,
|
|
165
|
+
fromSqlite: () => fromSqlite,
|
|
160
166
|
fromStatsD: () => fromStatsD,
|
|
161
167
|
fromSyslog: () => fromSyslog,
|
|
162
168
|
fromTimer: () => fromTimer,
|
|
@@ -253,11 +259,23 @@ __export(index_exports, {
|
|
|
253
259
|
throwError: () => throwError,
|
|
254
260
|
timeout: () => timeout,
|
|
255
261
|
toArray: () => toArray,
|
|
262
|
+
toCSV: () => toCSV,
|
|
263
|
+
toClickHouse: () => toClickHouse,
|
|
264
|
+
toFile: () => toFile,
|
|
256
265
|
toKafka: () => toKafka,
|
|
266
|
+
toLoki: () => toLoki,
|
|
257
267
|
toMessages$: () => toMessages$,
|
|
268
|
+
toMongo: () => toMongo,
|
|
269
|
+
toNATS: () => toNATS,
|
|
258
270
|
toObservable: () => toObservable,
|
|
271
|
+
toPostgres: () => toPostgres,
|
|
272
|
+
toPulsar: () => toPulsar,
|
|
273
|
+
toRabbitMQ: () => toRabbitMQ,
|
|
259
274
|
toRedisStream: () => toRedisStream,
|
|
275
|
+
toS3: () => toS3,
|
|
260
276
|
toSSE: () => toSSE,
|
|
277
|
+
toSqlite: () => toSqlite,
|
|
278
|
+
toTempo: () => toTempo,
|
|
261
279
|
toWebSocket: () => toWebSocket,
|
|
262
280
|
tokenBucket: () => tokenBucket,
|
|
263
281
|
tokenTracker: () => tokenTracker,
|
|
@@ -6541,6 +6559,8 @@ __export(extra_exports, {
|
|
|
6541
6559
|
cached: () => cached,
|
|
6542
6560
|
catchError: () => catchError,
|
|
6543
6561
|
checkpointNodeValue: () => checkpointNodeValue,
|
|
6562
|
+
checkpointToRedis: () => checkpointToRedis,
|
|
6563
|
+
checkpointToS3: () => checkpointToS3,
|
|
6544
6564
|
circuitBreaker: () => circuitBreaker,
|
|
6545
6565
|
combine: () => combine,
|
|
6546
6566
|
combineLatest: () => combineLatest,
|
|
@@ -6582,11 +6602,15 @@ __export(extra_exports, {
|
|
|
6582
6602
|
fromIter: () => fromIter,
|
|
6583
6603
|
fromKafka: () => fromKafka,
|
|
6584
6604
|
fromMCP: () => fromMCP,
|
|
6605
|
+
fromNATS: () => fromNATS,
|
|
6585
6606
|
fromNDJSON: () => fromNDJSON,
|
|
6586
6607
|
fromOTel: () => fromOTel,
|
|
6587
6608
|
fromPrometheus: () => fromPrometheus,
|
|
6588
6609
|
fromPromise: () => fromPromise,
|
|
6610
|
+
fromPulsar: () => fromPulsar,
|
|
6611
|
+
fromRabbitMQ: () => fromRabbitMQ,
|
|
6589
6612
|
fromRedisStream: () => fromRedisStream,
|
|
6613
|
+
fromSqlite: () => fromSqlite,
|
|
6590
6614
|
fromStatsD: () => fromStatsD,
|
|
6591
6615
|
fromSyslog: () => fromSyslog,
|
|
6592
6616
|
fromTimer: () => fromTimer,
|
|
@@ -6649,11 +6673,23 @@ __export(extra_exports, {
|
|
|
6649
6673
|
throwError: () => throwError,
|
|
6650
6674
|
timeout: () => timeout,
|
|
6651
6675
|
toArray: () => toArray,
|
|
6676
|
+
toCSV: () => toCSV,
|
|
6677
|
+
toClickHouse: () => toClickHouse,
|
|
6678
|
+
toFile: () => toFile,
|
|
6652
6679
|
toKafka: () => toKafka,
|
|
6680
|
+
toLoki: () => toLoki,
|
|
6653
6681
|
toMessages$: () => toMessages$,
|
|
6682
|
+
toMongo: () => toMongo,
|
|
6683
|
+
toNATS: () => toNATS,
|
|
6654
6684
|
toObservable: () => toObservable,
|
|
6685
|
+
toPostgres: () => toPostgres,
|
|
6686
|
+
toPulsar: () => toPulsar,
|
|
6687
|
+
toRabbitMQ: () => toRabbitMQ,
|
|
6655
6688
|
toRedisStream: () => toRedisStream,
|
|
6689
|
+
toS3: () => toS3,
|
|
6656
6690
|
toSSE: () => toSSE,
|
|
6691
|
+
toSqlite: () => toSqlite,
|
|
6692
|
+
toTempo: () => toTempo,
|
|
6657
6693
|
toWebSocket: () => toWebSocket,
|
|
6658
6694
|
tokenBucket: () => tokenBucket,
|
|
6659
6695
|
tokenTracker: () => tokenTracker,
|
|
@@ -7455,7 +7491,7 @@ function toWebSocket(source, socket, opts) {
|
|
|
7455
7491
|
};
|
|
7456
7492
|
const inner = node([source], () => void 0, {
|
|
7457
7493
|
describeKind: "effect",
|
|
7458
|
-
onMessage(msg) {
|
|
7494
|
+
onMessage(msg, _depIndex, _actions) {
|
|
7459
7495
|
if (msg[0] === DATA) {
|
|
7460
7496
|
let serialized;
|
|
7461
7497
|
try {
|
|
@@ -7906,7 +7942,7 @@ function toKafka(source, kafkaProducer, topic2, opts) {
|
|
|
7906
7942
|
const inner = node([source], () => void 0, {
|
|
7907
7943
|
describeKind: "effect",
|
|
7908
7944
|
...rest,
|
|
7909
|
-
onMessage(msg) {
|
|
7945
|
+
onMessage(msg, _depIndex, _actions) {
|
|
7910
7946
|
if (msg[0] === DATA) {
|
|
7911
7947
|
const value = msg[1];
|
|
7912
7948
|
const key = keyExtractor?.(value) ?? null;
|
|
@@ -8005,7 +8041,7 @@ function toRedisStream(source, client, key, opts) {
|
|
|
8005
8041
|
const inner = node([source], () => void 0, {
|
|
8006
8042
|
describeKind: "effect",
|
|
8007
8043
|
...rest,
|
|
8008
|
-
onMessage(msg) {
|
|
8044
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8009
8045
|
if (msg[0] === DATA) {
|
|
8010
8046
|
const value = msg[1];
|
|
8011
8047
|
let fields;
|
|
@@ -8210,6 +8246,866 @@ function fromClickHouseWatch(client, query, opts) {
|
|
|
8210
8246
|
};
|
|
8211
8247
|
}, sourceOpts2(rest));
|
|
8212
8248
|
}
|
|
8249
|
+
function fromPulsar(consumer, opts) {
|
|
8250
|
+
const {
|
|
8251
|
+
autoAck = true,
|
|
8252
|
+
deserialize = (buf) => {
|
|
8253
|
+
try {
|
|
8254
|
+
return JSON.parse(buf.toString());
|
|
8255
|
+
} catch {
|
|
8256
|
+
return buf.toString();
|
|
8257
|
+
}
|
|
8258
|
+
},
|
|
8259
|
+
...rest
|
|
8260
|
+
} = opts ?? {};
|
|
8261
|
+
return producer((_d, a) => {
|
|
8262
|
+
let active = true;
|
|
8263
|
+
const loop2 = async () => {
|
|
8264
|
+
while (active) {
|
|
8265
|
+
try {
|
|
8266
|
+
const msg = await consumer.receive();
|
|
8267
|
+
if (!active) return;
|
|
8268
|
+
a.emit({
|
|
8269
|
+
topic: msg.getTopicName(),
|
|
8270
|
+
messageId: msg.getMessageId().toString(),
|
|
8271
|
+
key: msg.getPartitionKey(),
|
|
8272
|
+
value: deserialize(msg.getData()),
|
|
8273
|
+
properties: msg.getProperties(),
|
|
8274
|
+
publishTime: msg.getPublishTimestamp(),
|
|
8275
|
+
eventTime: msg.getEventTimestamp(),
|
|
8276
|
+
timestampNs: wallClockNs()
|
|
8277
|
+
});
|
|
8278
|
+
if (autoAck) await consumer.acknowledge(msg);
|
|
8279
|
+
} catch (err) {
|
|
8280
|
+
if (active) a.down([[ERROR, err]]);
|
|
8281
|
+
return;
|
|
8282
|
+
}
|
|
8283
|
+
}
|
|
8284
|
+
};
|
|
8285
|
+
void loop2();
|
|
8286
|
+
return () => {
|
|
8287
|
+
active = false;
|
|
8288
|
+
};
|
|
8289
|
+
}, sourceOpts2(rest));
|
|
8290
|
+
}
|
|
8291
|
+
function toPulsar(source, pulsarProducer, opts) {
|
|
8292
|
+
const {
|
|
8293
|
+
serialize = (v) => Buffer.from(JSON.stringify(v)),
|
|
8294
|
+
keyExtractor,
|
|
8295
|
+
propertiesExtractor,
|
|
8296
|
+
onTransportError,
|
|
8297
|
+
...rest
|
|
8298
|
+
} = opts ?? {};
|
|
8299
|
+
const inner = node([source], () => void 0, {
|
|
8300
|
+
describeKind: "effect",
|
|
8301
|
+
...rest,
|
|
8302
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8303
|
+
if (msg[0] === DATA) {
|
|
8304
|
+
const value = msg[1];
|
|
8305
|
+
let data;
|
|
8306
|
+
try {
|
|
8307
|
+
data = serialize(value);
|
|
8308
|
+
} catch (err) {
|
|
8309
|
+
onTransportError?.({
|
|
8310
|
+
stage: "serialize",
|
|
8311
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8312
|
+
value
|
|
8313
|
+
});
|
|
8314
|
+
return true;
|
|
8315
|
+
}
|
|
8316
|
+
void pulsarProducer.send({
|
|
8317
|
+
data,
|
|
8318
|
+
partitionKey: keyExtractor?.(value),
|
|
8319
|
+
properties: propertiesExtractor?.(value)
|
|
8320
|
+
}).catch((err) => {
|
|
8321
|
+
onTransportError?.({
|
|
8322
|
+
stage: "send",
|
|
8323
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8324
|
+
value
|
|
8325
|
+
});
|
|
8326
|
+
});
|
|
8327
|
+
return true;
|
|
8328
|
+
}
|
|
8329
|
+
return false;
|
|
8330
|
+
}
|
|
8331
|
+
});
|
|
8332
|
+
return inner.subscribe(() => {
|
|
8333
|
+
});
|
|
8334
|
+
}
|
|
8335
|
+
function fromNATS(client, subject, opts) {
|
|
8336
|
+
const decoder = new TextDecoder();
|
|
8337
|
+
const {
|
|
8338
|
+
queue,
|
|
8339
|
+
deserialize = (data) => {
|
|
8340
|
+
const text = decoder.decode(data);
|
|
8341
|
+
try {
|
|
8342
|
+
return JSON.parse(text);
|
|
8343
|
+
} catch {
|
|
8344
|
+
return text;
|
|
8345
|
+
}
|
|
8346
|
+
},
|
|
8347
|
+
...rest
|
|
8348
|
+
} = opts ?? {};
|
|
8349
|
+
return producer((_d, a) => {
|
|
8350
|
+
let active = true;
|
|
8351
|
+
const sub = client.subscribe(subject, queue ? { queue } : void 0);
|
|
8352
|
+
const loop2 = async () => {
|
|
8353
|
+
try {
|
|
8354
|
+
for await (const msg of sub) {
|
|
8355
|
+
if (!active) return;
|
|
8356
|
+
const headers = {};
|
|
8357
|
+
if (msg.headers) {
|
|
8358
|
+
for (const k of msg.headers.keys()) {
|
|
8359
|
+
headers[k] = msg.headers.get(k);
|
|
8360
|
+
}
|
|
8361
|
+
}
|
|
8362
|
+
a.emit({
|
|
8363
|
+
subject: msg.subject,
|
|
8364
|
+
data: deserialize(msg.data),
|
|
8365
|
+
headers,
|
|
8366
|
+
reply: msg.reply,
|
|
8367
|
+
sid: msg.sid,
|
|
8368
|
+
timestampNs: wallClockNs()
|
|
8369
|
+
});
|
|
8370
|
+
}
|
|
8371
|
+
if (active) a.down([[COMPLETE]]);
|
|
8372
|
+
} catch (err) {
|
|
8373
|
+
if (active) a.down([[ERROR, err]]);
|
|
8374
|
+
}
|
|
8375
|
+
};
|
|
8376
|
+
void loop2();
|
|
8377
|
+
return () => {
|
|
8378
|
+
active = false;
|
|
8379
|
+
};
|
|
8380
|
+
}, sourceOpts2(rest));
|
|
8381
|
+
}
|
|
8382
|
+
function toNATS(source, client, subject, opts) {
|
|
8383
|
+
const encoder = new TextEncoder();
|
|
8384
|
+
const {
|
|
8385
|
+
serialize = (v) => encoder.encode(JSON.stringify(v)),
|
|
8386
|
+
onTransportError,
|
|
8387
|
+
...rest
|
|
8388
|
+
} = opts ?? {};
|
|
8389
|
+
const inner = node([source], () => void 0, {
|
|
8390
|
+
describeKind: "effect",
|
|
8391
|
+
...rest,
|
|
8392
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8393
|
+
if (msg[0] === DATA) {
|
|
8394
|
+
const value = msg[1];
|
|
8395
|
+
let data;
|
|
8396
|
+
try {
|
|
8397
|
+
data = serialize(value);
|
|
8398
|
+
} catch (err) {
|
|
8399
|
+
onTransportError?.({
|
|
8400
|
+
stage: "serialize",
|
|
8401
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8402
|
+
value
|
|
8403
|
+
});
|
|
8404
|
+
return true;
|
|
8405
|
+
}
|
|
8406
|
+
try {
|
|
8407
|
+
client.publish(subject, data);
|
|
8408
|
+
} catch (err) {
|
|
8409
|
+
onTransportError?.({
|
|
8410
|
+
stage: "send",
|
|
8411
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8412
|
+
value
|
|
8413
|
+
});
|
|
8414
|
+
}
|
|
8415
|
+
return true;
|
|
8416
|
+
}
|
|
8417
|
+
return false;
|
|
8418
|
+
}
|
|
8419
|
+
});
|
|
8420
|
+
return inner.subscribe(() => {
|
|
8421
|
+
});
|
|
8422
|
+
}
|
|
8423
|
+
function fromRabbitMQ(channel, queue, opts) {
|
|
8424
|
+
const {
|
|
8425
|
+
autoAck = true,
|
|
8426
|
+
deserialize = (buf) => {
|
|
8427
|
+
try {
|
|
8428
|
+
return JSON.parse(buf.toString());
|
|
8429
|
+
} catch {
|
|
8430
|
+
return buf.toString();
|
|
8431
|
+
}
|
|
8432
|
+
},
|
|
8433
|
+
...rest
|
|
8434
|
+
} = opts ?? {};
|
|
8435
|
+
return producer((_d, a) => {
|
|
8436
|
+
let active = true;
|
|
8437
|
+
let consumerTag;
|
|
8438
|
+
const start = async () => {
|
|
8439
|
+
try {
|
|
8440
|
+
const result = await channel.consume(
|
|
8441
|
+
queue,
|
|
8442
|
+
(msg) => {
|
|
8443
|
+
if (!active) return;
|
|
8444
|
+
if (msg === null) {
|
|
8445
|
+
if (active) a.down([[ERROR, new Error("Consumer cancelled by broker")]]);
|
|
8446
|
+
return;
|
|
8447
|
+
}
|
|
8448
|
+
a.emit({
|
|
8449
|
+
queue,
|
|
8450
|
+
routingKey: msg.fields.routingKey,
|
|
8451
|
+
exchange: msg.fields.exchange,
|
|
8452
|
+
content: deserialize(msg.content),
|
|
8453
|
+
properties: msg.properties,
|
|
8454
|
+
deliveryTag: msg.fields.deliveryTag,
|
|
8455
|
+
redelivered: msg.fields.redelivered,
|
|
8456
|
+
timestampNs: wallClockNs()
|
|
8457
|
+
});
|
|
8458
|
+
if (autoAck) channel.ack(msg);
|
|
8459
|
+
},
|
|
8460
|
+
{ noAck: false }
|
|
8461
|
+
);
|
|
8462
|
+
consumerTag = result.consumerTag;
|
|
8463
|
+
} catch (err) {
|
|
8464
|
+
if (active) a.down([[ERROR, err]]);
|
|
8465
|
+
}
|
|
8466
|
+
};
|
|
8467
|
+
void start();
|
|
8468
|
+
return () => {
|
|
8469
|
+
active = false;
|
|
8470
|
+
if (consumerTag !== void 0) {
|
|
8471
|
+
void channel.cancel(consumerTag);
|
|
8472
|
+
}
|
|
8473
|
+
};
|
|
8474
|
+
}, sourceOpts2(rest));
|
|
8475
|
+
}
|
|
8476
|
+
function toRabbitMQ(source, channel, exchange, opts) {
|
|
8477
|
+
const {
|
|
8478
|
+
serialize = (v) => Buffer.from(JSON.stringify(v)),
|
|
8479
|
+
routingKeyExtractor = () => "",
|
|
8480
|
+
onTransportError,
|
|
8481
|
+
...rest
|
|
8482
|
+
} = opts ?? {};
|
|
8483
|
+
const inner = node([source], () => void 0, {
|
|
8484
|
+
describeKind: "effect",
|
|
8485
|
+
...rest,
|
|
8486
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8487
|
+
if (msg[0] === DATA) {
|
|
8488
|
+
const value = msg[1];
|
|
8489
|
+
let routingKey;
|
|
8490
|
+
try {
|
|
8491
|
+
routingKey = routingKeyExtractor(value);
|
|
8492
|
+
} catch (err) {
|
|
8493
|
+
onTransportError?.({
|
|
8494
|
+
stage: "routing_key",
|
|
8495
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8496
|
+
value
|
|
8497
|
+
});
|
|
8498
|
+
return true;
|
|
8499
|
+
}
|
|
8500
|
+
let content;
|
|
8501
|
+
try {
|
|
8502
|
+
content = serialize(value);
|
|
8503
|
+
} catch (err) {
|
|
8504
|
+
onTransportError?.({
|
|
8505
|
+
stage: "serialize",
|
|
8506
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8507
|
+
value
|
|
8508
|
+
});
|
|
8509
|
+
return true;
|
|
8510
|
+
}
|
|
8511
|
+
try {
|
|
8512
|
+
channel.publish(exchange, routingKey, content);
|
|
8513
|
+
} catch (err) {
|
|
8514
|
+
onTransportError?.({
|
|
8515
|
+
stage: "send",
|
|
8516
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8517
|
+
value
|
|
8518
|
+
});
|
|
8519
|
+
}
|
|
8520
|
+
return true;
|
|
8521
|
+
}
|
|
8522
|
+
return false;
|
|
8523
|
+
}
|
|
8524
|
+
});
|
|
8525
|
+
return inner.subscribe(() => {
|
|
8526
|
+
});
|
|
8527
|
+
}
|
|
8528
|
+
function toFile(source, writer, opts) {
|
|
8529
|
+
const {
|
|
8530
|
+
serialize = (v) => `${JSON.stringify(v)}
|
|
8531
|
+
`,
|
|
8532
|
+
flushIntervalMs = 0,
|
|
8533
|
+
batchSize = Number.POSITIVE_INFINITY,
|
|
8534
|
+
onTransportError,
|
|
8535
|
+
mode: _mode,
|
|
8536
|
+
...rest
|
|
8537
|
+
} = opts ?? {};
|
|
8538
|
+
let buffer2 = [];
|
|
8539
|
+
let timer;
|
|
8540
|
+
const doFlush = () => {
|
|
8541
|
+
if (buffer2.length === 0) return;
|
|
8542
|
+
const chunk = buffer2.join("");
|
|
8543
|
+
buffer2 = [];
|
|
8544
|
+
try {
|
|
8545
|
+
writer.write(chunk);
|
|
8546
|
+
} catch (err) {
|
|
8547
|
+
onTransportError?.({
|
|
8548
|
+
stage: "send",
|
|
8549
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8550
|
+
value: chunk
|
|
8551
|
+
});
|
|
8552
|
+
}
|
|
8553
|
+
};
|
|
8554
|
+
const scheduleFlush = () => {
|
|
8555
|
+
if (flushIntervalMs > 0 && timer === void 0) {
|
|
8556
|
+
timer = setTimeout(() => {
|
|
8557
|
+
timer = void 0;
|
|
8558
|
+
doFlush();
|
|
8559
|
+
}, flushIntervalMs);
|
|
8560
|
+
}
|
|
8561
|
+
};
|
|
8562
|
+
const buffered = flushIntervalMs > 0 || batchSize < Number.POSITIVE_INFINITY;
|
|
8563
|
+
const inner = node([source], () => void 0, {
|
|
8564
|
+
describeKind: "effect",
|
|
8565
|
+
...rest,
|
|
8566
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8567
|
+
if (msg[0] === DATA) {
|
|
8568
|
+
const value = msg[1];
|
|
8569
|
+
let line;
|
|
8570
|
+
try {
|
|
8571
|
+
line = serialize(value);
|
|
8572
|
+
} catch (err) {
|
|
8573
|
+
onTransportError?.({
|
|
8574
|
+
stage: "serialize",
|
|
8575
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8576
|
+
value
|
|
8577
|
+
});
|
|
8578
|
+
return true;
|
|
8579
|
+
}
|
|
8580
|
+
if (buffered) {
|
|
8581
|
+
buffer2.push(line);
|
|
8582
|
+
if (buffer2.length >= batchSize) doFlush();
|
|
8583
|
+
else scheduleFlush();
|
|
8584
|
+
} else {
|
|
8585
|
+
try {
|
|
8586
|
+
writer.write(line);
|
|
8587
|
+
} catch (err) {
|
|
8588
|
+
onTransportError?.({
|
|
8589
|
+
stage: "send",
|
|
8590
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8591
|
+
value
|
|
8592
|
+
});
|
|
8593
|
+
}
|
|
8594
|
+
}
|
|
8595
|
+
return true;
|
|
8596
|
+
}
|
|
8597
|
+
if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
|
|
8598
|
+
doFlush();
|
|
8599
|
+
}
|
|
8600
|
+
return false;
|
|
8601
|
+
}
|
|
8602
|
+
});
|
|
8603
|
+
const unsub = inner.subscribe(() => {
|
|
8604
|
+
});
|
|
8605
|
+
const dispose = () => {
|
|
8606
|
+
if (timer !== void 0) {
|
|
8607
|
+
clearTimeout(timer);
|
|
8608
|
+
timer = void 0;
|
|
8609
|
+
}
|
|
8610
|
+
doFlush();
|
|
8611
|
+
writer.end();
|
|
8612
|
+
unsub();
|
|
8613
|
+
};
|
|
8614
|
+
return {
|
|
8615
|
+
dispose,
|
|
8616
|
+
flush: async () => {
|
|
8617
|
+
doFlush();
|
|
8618
|
+
}
|
|
8619
|
+
};
|
|
8620
|
+
}
|
|
8621
|
+
function escapeCSVField(value, delimiter) {
|
|
8622
|
+
if (value.includes(delimiter) || value.includes('"') || value.includes("\n")) {
|
|
8623
|
+
return `"${value.replace(/"/g, '""')}"`;
|
|
8624
|
+
}
|
|
8625
|
+
return value;
|
|
8626
|
+
}
|
|
8627
|
+
function toCSV(source, writer, opts) {
|
|
8628
|
+
const {
|
|
8629
|
+
columns,
|
|
8630
|
+
delimiter = ",",
|
|
8631
|
+
writeHeader = true,
|
|
8632
|
+
cellExtractor = (row, col) => String(row[col] ?? ""),
|
|
8633
|
+
flushIntervalMs = 0,
|
|
8634
|
+
batchSize = Number.POSITIVE_INFINITY,
|
|
8635
|
+
onTransportError,
|
|
8636
|
+
...rest
|
|
8637
|
+
} = opts;
|
|
8638
|
+
let headerWritten = false;
|
|
8639
|
+
const serializeRow = (row) => {
|
|
8640
|
+
if (!headerWritten && writeHeader) {
|
|
8641
|
+
headerWritten = true;
|
|
8642
|
+
const header = columns.map((c) => escapeCSVField(c, delimiter)).join(delimiter);
|
|
8643
|
+
const data = columns.map((c) => escapeCSVField(cellExtractor(row, c), delimiter)).join(delimiter);
|
|
8644
|
+
return `${header}
|
|
8645
|
+
${data}
|
|
8646
|
+
`;
|
|
8647
|
+
}
|
|
8648
|
+
return `${columns.map((c) => escapeCSVField(cellExtractor(row, c), delimiter)).join(delimiter)}
|
|
8649
|
+
`;
|
|
8650
|
+
};
|
|
8651
|
+
return toFile(source, writer, {
|
|
8652
|
+
serialize: serializeRow,
|
|
8653
|
+
flushIntervalMs,
|
|
8654
|
+
batchSize,
|
|
8655
|
+
onTransportError,
|
|
8656
|
+
...rest
|
|
8657
|
+
});
|
|
8658
|
+
}
|
|
8659
|
+
function toClickHouse(source, client, table, opts) {
|
|
8660
|
+
const {
|
|
8661
|
+
batchSize = 1e3,
|
|
8662
|
+
flushIntervalMs = 5e3,
|
|
8663
|
+
format = "JSONEachRow",
|
|
8664
|
+
transform = (v) => v,
|
|
8665
|
+
onTransportError,
|
|
8666
|
+
...rest
|
|
8667
|
+
} = opts ?? {};
|
|
8668
|
+
let buffer2 = [];
|
|
8669
|
+
let timer;
|
|
8670
|
+
let lastFlush = Promise.resolve();
|
|
8671
|
+
const doFlush = () => {
|
|
8672
|
+
if (buffer2.length === 0) return Promise.resolve();
|
|
8673
|
+
const batch2 = buffer2;
|
|
8674
|
+
buffer2 = [];
|
|
8675
|
+
try {
|
|
8676
|
+
const p = client.insert({ table, values: batch2, format }).catch((err) => {
|
|
8677
|
+
onTransportError?.({
|
|
8678
|
+
stage: "send",
|
|
8679
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8680
|
+
value: batch2
|
|
8681
|
+
});
|
|
8682
|
+
});
|
|
8683
|
+
lastFlush = p;
|
|
8684
|
+
return p;
|
|
8685
|
+
} catch (err) {
|
|
8686
|
+
onTransportError?.({
|
|
8687
|
+
stage: "send",
|
|
8688
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8689
|
+
value: batch2
|
|
8690
|
+
});
|
|
8691
|
+
return Promise.resolve();
|
|
8692
|
+
}
|
|
8693
|
+
};
|
|
8694
|
+
const scheduleFlush = () => {
|
|
8695
|
+
if (timer === void 0) {
|
|
8696
|
+
timer = setTimeout(() => {
|
|
8697
|
+
timer = void 0;
|
|
8698
|
+
doFlush();
|
|
8699
|
+
}, flushIntervalMs);
|
|
8700
|
+
}
|
|
8701
|
+
};
|
|
8702
|
+
const inner = node([source], () => void 0, {
|
|
8703
|
+
describeKind: "effect",
|
|
8704
|
+
...rest,
|
|
8705
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8706
|
+
if (msg[0] === DATA) {
|
|
8707
|
+
const value = msg[1];
|
|
8708
|
+
try {
|
|
8709
|
+
buffer2.push(transform(value));
|
|
8710
|
+
} catch (err) {
|
|
8711
|
+
onTransportError?.({
|
|
8712
|
+
stage: "serialize",
|
|
8713
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8714
|
+
value
|
|
8715
|
+
});
|
|
8716
|
+
return true;
|
|
8717
|
+
}
|
|
8718
|
+
if (buffer2.length >= batchSize) doFlush();
|
|
8719
|
+
else scheduleFlush();
|
|
8720
|
+
return true;
|
|
8721
|
+
}
|
|
8722
|
+
if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
|
|
8723
|
+
doFlush();
|
|
8724
|
+
}
|
|
8725
|
+
return false;
|
|
8726
|
+
}
|
|
8727
|
+
});
|
|
8728
|
+
const unsub = inner.subscribe(() => {
|
|
8729
|
+
});
|
|
8730
|
+
const dispose = () => {
|
|
8731
|
+
if (timer !== void 0) {
|
|
8732
|
+
clearTimeout(timer);
|
|
8733
|
+
timer = void 0;
|
|
8734
|
+
}
|
|
8735
|
+
doFlush();
|
|
8736
|
+
unsub();
|
|
8737
|
+
};
|
|
8738
|
+
return {
|
|
8739
|
+
dispose,
|
|
8740
|
+
flush: () => doFlush().then(() => lastFlush)
|
|
8741
|
+
};
|
|
8742
|
+
}
|
|
8743
|
+
function toS3(source, client, bucket, opts) {
|
|
8744
|
+
const {
|
|
8745
|
+
format = "ndjson",
|
|
8746
|
+
keyGenerator = (seq2, timestampNs) => {
|
|
8747
|
+
const ms = Math.floor(timestampNs / 1e6);
|
|
8748
|
+
const ts = new Date(ms).toISOString().replace(/[:.]/g, "-");
|
|
8749
|
+
return `data/${ts}-${seq2}.${format === "ndjson" ? "ndjson" : "json"}`;
|
|
8750
|
+
},
|
|
8751
|
+
batchSize = 1e3,
|
|
8752
|
+
flushIntervalMs = 1e4,
|
|
8753
|
+
transform = (v) => v,
|
|
8754
|
+
onTransportError,
|
|
8755
|
+
...rest
|
|
8756
|
+
} = opts ?? {};
|
|
8757
|
+
let buffer2 = [];
|
|
8758
|
+
let timer;
|
|
8759
|
+
let seq = 0;
|
|
8760
|
+
let lastFlush = Promise.resolve();
|
|
8761
|
+
const doFlush = () => {
|
|
8762
|
+
if (buffer2.length === 0) return Promise.resolve();
|
|
8763
|
+
const batch2 = buffer2;
|
|
8764
|
+
buffer2 = [];
|
|
8765
|
+
seq += 1;
|
|
8766
|
+
const body = format === "ndjson" ? `${batch2.map((v) => JSON.stringify(v)).join("\n")}
|
|
8767
|
+
` : JSON.stringify(batch2);
|
|
8768
|
+
const contentType = format === "ndjson" ? "application/x-ndjson" : "application/json";
|
|
8769
|
+
const key = keyGenerator(seq, wallClockNs());
|
|
8770
|
+
try {
|
|
8771
|
+
const p = client.putObject({ Bucket: bucket, Key: key, Body: body, ContentType: contentType }).then(() => {
|
|
8772
|
+
}).catch((err) => {
|
|
8773
|
+
onTransportError?.({
|
|
8774
|
+
stage: "send",
|
|
8775
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8776
|
+
value: batch2
|
|
8777
|
+
});
|
|
8778
|
+
});
|
|
8779
|
+
lastFlush = p;
|
|
8780
|
+
return p;
|
|
8781
|
+
} catch (err) {
|
|
8782
|
+
onTransportError?.({
|
|
8783
|
+
stage: "send",
|
|
8784
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8785
|
+
value: batch2
|
|
8786
|
+
});
|
|
8787
|
+
return Promise.resolve();
|
|
8788
|
+
}
|
|
8789
|
+
};
|
|
8790
|
+
const scheduleFlush = () => {
|
|
8791
|
+
if (timer === void 0) {
|
|
8792
|
+
timer = setTimeout(() => {
|
|
8793
|
+
timer = void 0;
|
|
8794
|
+
doFlush();
|
|
8795
|
+
}, flushIntervalMs);
|
|
8796
|
+
}
|
|
8797
|
+
};
|
|
8798
|
+
const inner = node([source], () => void 0, {
|
|
8799
|
+
describeKind: "effect",
|
|
8800
|
+
...rest,
|
|
8801
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8802
|
+
if (msg[0] === DATA) {
|
|
8803
|
+
const value = msg[1];
|
|
8804
|
+
try {
|
|
8805
|
+
buffer2.push(transform(value));
|
|
8806
|
+
} catch (err) {
|
|
8807
|
+
onTransportError?.({
|
|
8808
|
+
stage: "serialize",
|
|
8809
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8810
|
+
value
|
|
8811
|
+
});
|
|
8812
|
+
return true;
|
|
8813
|
+
}
|
|
8814
|
+
if (buffer2.length >= batchSize) doFlush();
|
|
8815
|
+
else scheduleFlush();
|
|
8816
|
+
return true;
|
|
8817
|
+
}
|
|
8818
|
+
if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
|
|
8819
|
+
doFlush();
|
|
8820
|
+
}
|
|
8821
|
+
return false;
|
|
8822
|
+
}
|
|
8823
|
+
});
|
|
8824
|
+
const unsub = inner.subscribe(() => {
|
|
8825
|
+
});
|
|
8826
|
+
const dispose = () => {
|
|
8827
|
+
if (timer !== void 0) {
|
|
8828
|
+
clearTimeout(timer);
|
|
8829
|
+
timer = void 0;
|
|
8830
|
+
}
|
|
8831
|
+
doFlush();
|
|
8832
|
+
unsub();
|
|
8833
|
+
};
|
|
8834
|
+
return {
|
|
8835
|
+
dispose,
|
|
8836
|
+
flush: () => doFlush().then(() => lastFlush)
|
|
8837
|
+
};
|
|
8838
|
+
}
|
|
8839
|
+
function toPostgres(source, client, table, opts) {
|
|
8840
|
+
const {
|
|
8841
|
+
toSQL = (v, t) => ({
|
|
8842
|
+
sql: `INSERT INTO "${t.replace(/"/g, '""')}" (data) VALUES ($1)`,
|
|
8843
|
+
params: [JSON.stringify(v)]
|
|
8844
|
+
}),
|
|
8845
|
+
onTransportError,
|
|
8846
|
+
...rest
|
|
8847
|
+
} = opts ?? {};
|
|
8848
|
+
const inner = node([source], () => void 0, {
|
|
8849
|
+
describeKind: "effect",
|
|
8850
|
+
...rest,
|
|
8851
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8852
|
+
if (msg[0] === DATA) {
|
|
8853
|
+
const value = msg[1];
|
|
8854
|
+
let query;
|
|
8855
|
+
try {
|
|
8856
|
+
query = toSQL(value, table);
|
|
8857
|
+
} catch (err) {
|
|
8858
|
+
onTransportError?.({
|
|
8859
|
+
stage: "serialize",
|
|
8860
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8861
|
+
value
|
|
8862
|
+
});
|
|
8863
|
+
return true;
|
|
8864
|
+
}
|
|
8865
|
+
void client.query(query.sql, query.params).catch((err) => {
|
|
8866
|
+
onTransportError?.({
|
|
8867
|
+
stage: "send",
|
|
8868
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8869
|
+
value
|
|
8870
|
+
});
|
|
8871
|
+
});
|
|
8872
|
+
return true;
|
|
8873
|
+
}
|
|
8874
|
+
return false;
|
|
8875
|
+
}
|
|
8876
|
+
});
|
|
8877
|
+
return inner.subscribe(() => {
|
|
8878
|
+
});
|
|
8879
|
+
}
|
|
8880
|
+
function toMongo(source, collection2, opts) {
|
|
8881
|
+
const { toDocument = (v) => v, onTransportError, ...rest } = opts ?? {};
|
|
8882
|
+
const inner = node([source], () => void 0, {
|
|
8883
|
+
describeKind: "effect",
|
|
8884
|
+
...rest,
|
|
8885
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8886
|
+
if (msg[0] === DATA) {
|
|
8887
|
+
const value = msg[1];
|
|
8888
|
+
let doc;
|
|
8889
|
+
try {
|
|
8890
|
+
doc = toDocument(value);
|
|
8891
|
+
} catch (err) {
|
|
8892
|
+
onTransportError?.({
|
|
8893
|
+
stage: "serialize",
|
|
8894
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8895
|
+
value
|
|
8896
|
+
});
|
|
8897
|
+
return true;
|
|
8898
|
+
}
|
|
8899
|
+
void collection2.insertOne(doc).catch((err) => {
|
|
8900
|
+
onTransportError?.({
|
|
8901
|
+
stage: "send",
|
|
8902
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8903
|
+
value
|
|
8904
|
+
});
|
|
8905
|
+
});
|
|
8906
|
+
return true;
|
|
8907
|
+
}
|
|
8908
|
+
return false;
|
|
8909
|
+
}
|
|
8910
|
+
});
|
|
8911
|
+
return inner.subscribe(() => {
|
|
8912
|
+
});
|
|
8913
|
+
}
|
|
8914
|
+
function toLoki(source, client, opts) {
|
|
8915
|
+
const {
|
|
8916
|
+
labels = {},
|
|
8917
|
+
toLine = (v) => JSON.stringify(v),
|
|
8918
|
+
toLabels,
|
|
8919
|
+
onTransportError,
|
|
8920
|
+
...rest
|
|
8921
|
+
} = opts ?? {};
|
|
8922
|
+
const inner = node([source], () => void 0, {
|
|
8923
|
+
describeKind: "effect",
|
|
8924
|
+
...rest,
|
|
8925
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8926
|
+
if (msg[0] === DATA) {
|
|
8927
|
+
const value = msg[1];
|
|
8928
|
+
let line;
|
|
8929
|
+
try {
|
|
8930
|
+
line = toLine(value);
|
|
8931
|
+
} catch (err) {
|
|
8932
|
+
onTransportError?.({
|
|
8933
|
+
stage: "serialize",
|
|
8934
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8935
|
+
value
|
|
8936
|
+
});
|
|
8937
|
+
return true;
|
|
8938
|
+
}
|
|
8939
|
+
let streamLabels;
|
|
8940
|
+
try {
|
|
8941
|
+
streamLabels = toLabels ? { ...labels, ...toLabels(value) } : labels;
|
|
8942
|
+
} catch (err) {
|
|
8943
|
+
onTransportError?.({
|
|
8944
|
+
stage: "serialize",
|
|
8945
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8946
|
+
value
|
|
8947
|
+
});
|
|
8948
|
+
return true;
|
|
8949
|
+
}
|
|
8950
|
+
const ts = `${wallClockNs()}`;
|
|
8951
|
+
void client.push({ streams: [{ stream: streamLabels, values: [[ts, line]] }] }).catch((err) => {
|
|
8952
|
+
onTransportError?.({
|
|
8953
|
+
stage: "send",
|
|
8954
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8955
|
+
value
|
|
8956
|
+
});
|
|
8957
|
+
});
|
|
8958
|
+
return true;
|
|
8959
|
+
}
|
|
8960
|
+
return false;
|
|
8961
|
+
}
|
|
8962
|
+
});
|
|
8963
|
+
return inner.subscribe(() => {
|
|
8964
|
+
});
|
|
8965
|
+
}
|
|
8966
|
+
function toTempo(source, client, opts) {
|
|
8967
|
+
const { toResourceSpans = (v) => [v], onTransportError, ...rest } = opts ?? {};
|
|
8968
|
+
const inner = node([source], () => void 0, {
|
|
8969
|
+
describeKind: "effect",
|
|
8970
|
+
...rest,
|
|
8971
|
+
onMessage(msg, _depIndex, _actions) {
|
|
8972
|
+
if (msg[0] === DATA) {
|
|
8973
|
+
const value = msg[1];
|
|
8974
|
+
let spans;
|
|
8975
|
+
try {
|
|
8976
|
+
spans = toResourceSpans(value);
|
|
8977
|
+
} catch (err) {
|
|
8978
|
+
onTransportError?.({
|
|
8979
|
+
stage: "serialize",
|
|
8980
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8981
|
+
value
|
|
8982
|
+
});
|
|
8983
|
+
return true;
|
|
8984
|
+
}
|
|
8985
|
+
void client.push({ resourceSpans: spans }).catch((err) => {
|
|
8986
|
+
onTransportError?.({
|
|
8987
|
+
stage: "send",
|
|
8988
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
8989
|
+
value
|
|
8990
|
+
});
|
|
8991
|
+
});
|
|
8992
|
+
return true;
|
|
8993
|
+
}
|
|
8994
|
+
return false;
|
|
8995
|
+
}
|
|
8996
|
+
});
|
|
8997
|
+
return inner.subscribe(() => {
|
|
8998
|
+
});
|
|
8999
|
+
}
|
|
9000
|
+
function checkpointToS3(graph, client, bucket, opts) {
|
|
9001
|
+
const { prefix = "checkpoints/", debounceMs, compactEvery, onError } = opts ?? {};
|
|
9002
|
+
const adapter = {
|
|
9003
|
+
save(data) {
|
|
9004
|
+
const ms = Math.floor(wallClockNs() / 1e6);
|
|
9005
|
+
const key = `${prefix}${graph.name}/checkpoint-${ms}.json`;
|
|
9006
|
+
let body;
|
|
9007
|
+
try {
|
|
9008
|
+
body = JSON.stringify(data);
|
|
9009
|
+
} catch (err) {
|
|
9010
|
+
onError?.(err);
|
|
9011
|
+
return;
|
|
9012
|
+
}
|
|
9013
|
+
void client.putObject({
|
|
9014
|
+
Bucket: bucket,
|
|
9015
|
+
Key: key,
|
|
9016
|
+
Body: body,
|
|
9017
|
+
ContentType: "application/json"
|
|
9018
|
+
}).catch((err) => onError?.(err));
|
|
9019
|
+
}
|
|
9020
|
+
};
|
|
9021
|
+
return graph.autoCheckpoint(adapter, { debounceMs, compactEvery, onError });
|
|
9022
|
+
}
|
|
9023
|
+
function checkpointToRedis(graph, client, opts) {
|
|
9024
|
+
const { prefix = "graphrefly:checkpoint:", debounceMs, compactEvery, onError } = opts ?? {};
|
|
9025
|
+
const key = `${prefix}${graph.name}`;
|
|
9026
|
+
const adapter = {
|
|
9027
|
+
save(data) {
|
|
9028
|
+
let body;
|
|
9029
|
+
try {
|
|
9030
|
+
body = JSON.stringify(data);
|
|
9031
|
+
} catch (err) {
|
|
9032
|
+
onError?.(err);
|
|
9033
|
+
return;
|
|
9034
|
+
}
|
|
9035
|
+
void client.set(key, body).catch((err) => onError?.(err));
|
|
9036
|
+
}
|
|
9037
|
+
};
|
|
9038
|
+
return graph.autoCheckpoint(adapter, { debounceMs, compactEvery, onError });
|
|
9039
|
+
}
|
|
9040
|
+
function fromSqlite(db, query, opts) {
|
|
9041
|
+
const { mapRow = (r) => r, params, ...rest } = opts ?? {};
|
|
9042
|
+
return producer(
|
|
9043
|
+
(_d, a) => {
|
|
9044
|
+
let mapped;
|
|
9045
|
+
try {
|
|
9046
|
+
const rows = db.query(query, params);
|
|
9047
|
+
mapped = rows.map(mapRow);
|
|
9048
|
+
} catch (err) {
|
|
9049
|
+
a.down([[ERROR, err instanceof Error ? err : new Error(String(err))]]);
|
|
9050
|
+
return void 0;
|
|
9051
|
+
}
|
|
9052
|
+
batch(() => {
|
|
9053
|
+
for (const item of mapped) {
|
|
9054
|
+
a.down([[DATA, item]]);
|
|
9055
|
+
}
|
|
9056
|
+
a.down([[COMPLETE]]);
|
|
9057
|
+
});
|
|
9058
|
+
return void 0;
|
|
9059
|
+
},
|
|
9060
|
+
{ describeKind: "producer", completeWhenDepsComplete: false, ...rest }
|
|
9061
|
+
);
|
|
9062
|
+
}
|
|
9063
|
+
function toSqlite(source, db, table, opts) {
|
|
9064
|
+
if (table.includes("\0") || table.length === 0) {
|
|
9065
|
+
throw new Error(`toSqlite: invalid table name: ${JSON.stringify(table)}`);
|
|
9066
|
+
}
|
|
9067
|
+
const {
|
|
9068
|
+
toSQL = (v, t) => ({
|
|
9069
|
+
sql: `INSERT INTO "${t.replace(/"/g, '""')}" (data) VALUES (?)`,
|
|
9070
|
+
params: [JSON.stringify(v)]
|
|
9071
|
+
}),
|
|
9072
|
+
onTransportError,
|
|
9073
|
+
...rest
|
|
9074
|
+
} = opts ?? {};
|
|
9075
|
+
const inner = node([source], () => void 0, {
|
|
9076
|
+
describeKind: "effect",
|
|
9077
|
+
...rest,
|
|
9078
|
+
onMessage(msg, _depIndex, _actions) {
|
|
9079
|
+
if (msg[0] === DATA) {
|
|
9080
|
+
const value = msg[1];
|
|
9081
|
+
let query;
|
|
9082
|
+
try {
|
|
9083
|
+
query = toSQL(value, table);
|
|
9084
|
+
} catch (err) {
|
|
9085
|
+
onTransportError?.({
|
|
9086
|
+
stage: "serialize",
|
|
9087
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
9088
|
+
value
|
|
9089
|
+
});
|
|
9090
|
+
return true;
|
|
9091
|
+
}
|
|
9092
|
+
try {
|
|
9093
|
+
db.query(query.sql, query.params);
|
|
9094
|
+
} catch (err) {
|
|
9095
|
+
onTransportError?.({
|
|
9096
|
+
stage: "send",
|
|
9097
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
9098
|
+
value
|
|
9099
|
+
});
|
|
9100
|
+
}
|
|
9101
|
+
return true;
|
|
9102
|
+
}
|
|
9103
|
+
return false;
|
|
9104
|
+
}
|
|
9105
|
+
});
|
|
9106
|
+
return inner.subscribe(() => {
|
|
9107
|
+
});
|
|
9108
|
+
}
|
|
8213
9109
|
|
|
8214
9110
|
// src/extra/checkpoint.ts
|
|
8215
9111
|
var import_node_crypto2 = require("crypto");
|
|
@@ -14661,6 +15557,8 @@ var version = "0.0.0";
|
|
|
14661
15557
|
cached,
|
|
14662
15558
|
catchError,
|
|
14663
15559
|
checkpointNodeValue,
|
|
15560
|
+
checkpointToRedis,
|
|
15561
|
+
checkpointToS3,
|
|
14664
15562
|
circuitBreaker,
|
|
14665
15563
|
combine,
|
|
14666
15564
|
combineLatest,
|
|
@@ -14715,11 +15613,15 @@ var version = "0.0.0";
|
|
|
14715
15613
|
fromIter,
|
|
14716
15614
|
fromKafka,
|
|
14717
15615
|
fromMCP,
|
|
15616
|
+
fromNATS,
|
|
14718
15617
|
fromNDJSON,
|
|
14719
15618
|
fromOTel,
|
|
14720
15619
|
fromPrometheus,
|
|
14721
15620
|
fromPromise,
|
|
15621
|
+
fromPulsar,
|
|
15622
|
+
fromRabbitMQ,
|
|
14722
15623
|
fromRedisStream,
|
|
15624
|
+
fromSqlite,
|
|
14723
15625
|
fromStatsD,
|
|
14724
15626
|
fromSyslog,
|
|
14725
15627
|
fromTimer,
|
|
@@ -14816,11 +15718,23 @@ var version = "0.0.0";
|
|
|
14816
15718
|
throwError,
|
|
14817
15719
|
timeout,
|
|
14818
15720
|
toArray,
|
|
15721
|
+
toCSV,
|
|
15722
|
+
toClickHouse,
|
|
15723
|
+
toFile,
|
|
14819
15724
|
toKafka,
|
|
15725
|
+
toLoki,
|
|
14820
15726
|
toMessages$,
|
|
15727
|
+
toMongo,
|
|
15728
|
+
toNATS,
|
|
14821
15729
|
toObservable,
|
|
15730
|
+
toPostgres,
|
|
15731
|
+
toPulsar,
|
|
15732
|
+
toRabbitMQ,
|
|
14822
15733
|
toRedisStream,
|
|
15734
|
+
toS3,
|
|
14823
15735
|
toSSE,
|
|
15736
|
+
toSqlite,
|
|
15737
|
+
toTempo,
|
|
14824
15738
|
toWebSocket,
|
|
14825
15739
|
tokenBucket,
|
|
14826
15740
|
tokenTracker,
|