@graphrefly/graphrefly 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -75,6 +75,8 @@ __export(extra_exports, {
75
75
  cached: () => cached,
76
76
  catchError: () => catchError,
77
77
  checkpointNodeValue: () => checkpointNodeValue,
78
+ checkpointToRedis: () => checkpointToRedis,
79
+ checkpointToS3: () => checkpointToS3,
78
80
  circuitBreaker: () => circuitBreaker,
79
81
  combine: () => combine,
80
82
  combineLatest: () => combineLatest,
@@ -116,10 +118,13 @@ __export(extra_exports, {
116
118
  fromIter: () => fromIter,
117
119
  fromKafka: () => fromKafka,
118
120
  fromMCP: () => fromMCP,
121
+ fromNATS: () => fromNATS,
119
122
  fromNDJSON: () => fromNDJSON,
120
123
  fromOTel: () => fromOTel,
121
124
  fromPrometheus: () => fromPrometheus,
122
125
  fromPromise: () => fromPromise,
126
+ fromPulsar: () => fromPulsar,
127
+ fromRabbitMQ: () => fromRabbitMQ,
123
128
  fromRedisStream: () => fromRedisStream,
124
129
  fromStatsD: () => fromStatsD,
125
130
  fromSyslog: () => fromSyslog,
@@ -183,11 +188,22 @@ __export(extra_exports, {
183
188
  throwError: () => throwError,
184
189
  timeout: () => timeout,
185
190
  toArray: () => toArray,
191
+ toCSV: () => toCSV,
192
+ toClickHouse: () => toClickHouse,
193
+ toFile: () => toFile,
186
194
  toKafka: () => toKafka,
195
+ toLoki: () => toLoki,
187
196
  toMessages$: () => toMessages$,
197
+ toMongo: () => toMongo,
198
+ toNATS: () => toNATS,
188
199
  toObservable: () => toObservable,
200
+ toPostgres: () => toPostgres,
201
+ toPulsar: () => toPulsar,
202
+ toRabbitMQ: () => toRabbitMQ,
189
203
  toRedisStream: () => toRedisStream,
204
+ toS3: () => toS3,
190
205
  toSSE: () => toSSE,
206
+ toTempo: () => toTempo,
191
207
  toWebSocket: () => toWebSocket,
192
208
  tokenBucket: () => tokenBucket,
193
209
  tokenTracker: () => tokenTracker,
@@ -1744,6 +1760,797 @@ function fromClickHouseWatch(client, query, opts) {
1744
1760
  };
1745
1761
  }, sourceOpts(rest));
1746
1762
  }
1763
+ function fromPulsar(consumer, opts) {
1764
+ const {
1765
+ autoAck = true,
1766
+ deserialize = (buf) => {
1767
+ try {
1768
+ return JSON.parse(buf.toString());
1769
+ } catch {
1770
+ return buf.toString();
1771
+ }
1772
+ },
1773
+ ...rest
1774
+ } = opts ?? {};
1775
+ return producer((_d, a) => {
1776
+ let active = true;
1777
+ const loop = async () => {
1778
+ while (active) {
1779
+ try {
1780
+ const msg = await consumer.receive();
1781
+ if (!active) return;
1782
+ a.emit({
1783
+ topic: msg.getTopicName(),
1784
+ messageId: msg.getMessageId().toString(),
1785
+ key: msg.getPartitionKey(),
1786
+ value: deserialize(msg.getData()),
1787
+ properties: msg.getProperties(),
1788
+ publishTime: msg.getPublishTimestamp(),
1789
+ eventTime: msg.getEventTimestamp(),
1790
+ timestampNs: wallClockNs()
1791
+ });
1792
+ if (autoAck) await consumer.acknowledge(msg);
1793
+ } catch (err) {
1794
+ if (active) a.down([[ERROR, err]]);
1795
+ return;
1796
+ }
1797
+ }
1798
+ };
1799
+ void loop();
1800
+ return () => {
1801
+ active = false;
1802
+ };
1803
+ }, sourceOpts(rest));
1804
+ }
1805
+ function toPulsar(source, pulsarProducer, opts) {
1806
+ const {
1807
+ serialize = (v) => Buffer.from(JSON.stringify(v)),
1808
+ keyExtractor,
1809
+ propertiesExtractor,
1810
+ onTransportError,
1811
+ ...rest
1812
+ } = opts ?? {};
1813
+ const inner = node([source], () => void 0, {
1814
+ describeKind: "effect",
1815
+ ...rest,
1816
+ onMessage(msg) {
1817
+ if (msg[0] === DATA) {
1818
+ const value = msg[1];
1819
+ let data;
1820
+ try {
1821
+ data = serialize(value);
1822
+ } catch (err) {
1823
+ onTransportError?.({
1824
+ stage: "serialize",
1825
+ error: err instanceof Error ? err : new Error(String(err)),
1826
+ value
1827
+ });
1828
+ return true;
1829
+ }
1830
+ void pulsarProducer.send({
1831
+ data,
1832
+ partitionKey: keyExtractor?.(value),
1833
+ properties: propertiesExtractor?.(value)
1834
+ }).catch((err) => {
1835
+ onTransportError?.({
1836
+ stage: "send",
1837
+ error: err instanceof Error ? err : new Error(String(err)),
1838
+ value
1839
+ });
1840
+ });
1841
+ return true;
1842
+ }
1843
+ return false;
1844
+ }
1845
+ });
1846
+ return inner.subscribe(() => {
1847
+ });
1848
+ }
1849
+ function fromNATS(client, subject, opts) {
1850
+ const decoder = new TextDecoder();
1851
+ const {
1852
+ queue,
1853
+ deserialize = (data) => {
1854
+ const text = decoder.decode(data);
1855
+ try {
1856
+ return JSON.parse(text);
1857
+ } catch {
1858
+ return text;
1859
+ }
1860
+ },
1861
+ ...rest
1862
+ } = opts ?? {};
1863
+ return producer((_d, a) => {
1864
+ let active = true;
1865
+ const sub = client.subscribe(subject, queue ? { queue } : void 0);
1866
+ const loop = async () => {
1867
+ try {
1868
+ for await (const msg of sub) {
1869
+ if (!active) return;
1870
+ const headers = {};
1871
+ if (msg.headers) {
1872
+ for (const k of msg.headers.keys()) {
1873
+ headers[k] = msg.headers.get(k);
1874
+ }
1875
+ }
1876
+ a.emit({
1877
+ subject: msg.subject,
1878
+ data: deserialize(msg.data),
1879
+ headers,
1880
+ reply: msg.reply,
1881
+ sid: msg.sid,
1882
+ timestampNs: wallClockNs()
1883
+ });
1884
+ }
1885
+ if (active) a.down([[COMPLETE]]);
1886
+ } catch (err) {
1887
+ if (active) a.down([[ERROR, err]]);
1888
+ }
1889
+ };
1890
+ void loop();
1891
+ return () => {
1892
+ active = false;
1893
+ };
1894
+ }, sourceOpts(rest));
1895
+ }
1896
+ function toNATS(source, client, subject, opts) {
1897
+ const encoder = new TextEncoder();
1898
+ const {
1899
+ serialize = (v) => encoder.encode(JSON.stringify(v)),
1900
+ onTransportError,
1901
+ ...rest
1902
+ } = opts ?? {};
1903
+ const inner = node([source], () => void 0, {
1904
+ describeKind: "effect",
1905
+ ...rest,
1906
+ onMessage(msg) {
1907
+ if (msg[0] === DATA) {
1908
+ const value = msg[1];
1909
+ let data;
1910
+ try {
1911
+ data = serialize(value);
1912
+ } catch (err) {
1913
+ onTransportError?.({
1914
+ stage: "serialize",
1915
+ error: err instanceof Error ? err : new Error(String(err)),
1916
+ value
1917
+ });
1918
+ return true;
1919
+ }
1920
+ try {
1921
+ client.publish(subject, data);
1922
+ } catch (err) {
1923
+ onTransportError?.({
1924
+ stage: "send",
1925
+ error: err instanceof Error ? err : new Error(String(err)),
1926
+ value
1927
+ });
1928
+ }
1929
+ return true;
1930
+ }
1931
+ return false;
1932
+ }
1933
+ });
1934
+ return inner.subscribe(() => {
1935
+ });
1936
+ }
1937
+ function fromRabbitMQ(channel, queue, opts) {
1938
+ const {
1939
+ autoAck = true,
1940
+ deserialize = (buf) => {
1941
+ try {
1942
+ return JSON.parse(buf.toString());
1943
+ } catch {
1944
+ return buf.toString();
1945
+ }
1946
+ },
1947
+ ...rest
1948
+ } = opts ?? {};
1949
+ return producer((_d, a) => {
1950
+ let active = true;
1951
+ let consumerTag;
1952
+ const start = async () => {
1953
+ try {
1954
+ const result = await channel.consume(
1955
+ queue,
1956
+ (msg) => {
1957
+ if (!active) return;
1958
+ if (msg === null) {
1959
+ if (active) a.down([[ERROR, new Error("Consumer cancelled by broker")]]);
1960
+ return;
1961
+ }
1962
+ a.emit({
1963
+ queue,
1964
+ routingKey: msg.fields.routingKey,
1965
+ exchange: msg.fields.exchange,
1966
+ content: deserialize(msg.content),
1967
+ properties: msg.properties,
1968
+ deliveryTag: msg.fields.deliveryTag,
1969
+ redelivered: msg.fields.redelivered,
1970
+ timestampNs: wallClockNs()
1971
+ });
1972
+ if (autoAck) channel.ack(msg);
1973
+ },
1974
+ { noAck: false }
1975
+ );
1976
+ consumerTag = result.consumerTag;
1977
+ } catch (err) {
1978
+ if (active) a.down([[ERROR, err]]);
1979
+ }
1980
+ };
1981
+ void start();
1982
+ return () => {
1983
+ active = false;
1984
+ if (consumerTag !== void 0) {
1985
+ void channel.cancel(consumerTag);
1986
+ }
1987
+ };
1988
+ }, sourceOpts(rest));
1989
+ }
1990
+ function toRabbitMQ(source, channel, exchange, opts) {
1991
+ const {
1992
+ serialize = (v) => Buffer.from(JSON.stringify(v)),
1993
+ routingKeyExtractor = () => "",
1994
+ onTransportError,
1995
+ ...rest
1996
+ } = opts ?? {};
1997
+ const inner = node([source], () => void 0, {
1998
+ describeKind: "effect",
1999
+ ...rest,
2000
+ onMessage(msg) {
2001
+ if (msg[0] === DATA) {
2002
+ const value = msg[1];
2003
+ let routingKey;
2004
+ try {
2005
+ routingKey = routingKeyExtractor(value);
2006
+ } catch (err) {
2007
+ onTransportError?.({
2008
+ stage: "routing_key",
2009
+ error: err instanceof Error ? err : new Error(String(err)),
2010
+ value
2011
+ });
2012
+ return true;
2013
+ }
2014
+ let content;
2015
+ try {
2016
+ content = serialize(value);
2017
+ } catch (err) {
2018
+ onTransportError?.({
2019
+ stage: "serialize",
2020
+ error: err instanceof Error ? err : new Error(String(err)),
2021
+ value
2022
+ });
2023
+ return true;
2024
+ }
2025
+ try {
2026
+ channel.publish(exchange, routingKey, content);
2027
+ } catch (err) {
2028
+ onTransportError?.({
2029
+ stage: "send",
2030
+ error: err instanceof Error ? err : new Error(String(err)),
2031
+ value
2032
+ });
2033
+ }
2034
+ return true;
2035
+ }
2036
+ return false;
2037
+ }
2038
+ });
2039
+ return inner.subscribe(() => {
2040
+ });
2041
+ }
2042
+ function toFile(source, writer, opts) {
2043
+ const {
2044
+ serialize = (v) => `${JSON.stringify(v)}
2045
+ `,
2046
+ flushIntervalMs = 0,
2047
+ batchSize = Number.POSITIVE_INFINITY,
2048
+ onTransportError,
2049
+ mode: _mode,
2050
+ ...rest
2051
+ } = opts ?? {};
2052
+ let buffer2 = [];
2053
+ let timer;
2054
+ const doFlush = () => {
2055
+ if (buffer2.length === 0) return;
2056
+ const chunk = buffer2.join("");
2057
+ buffer2 = [];
2058
+ try {
2059
+ writer.write(chunk);
2060
+ } catch (err) {
2061
+ onTransportError?.({
2062
+ stage: "send",
2063
+ error: err instanceof Error ? err : new Error(String(err)),
2064
+ value: chunk
2065
+ });
2066
+ }
2067
+ };
2068
+ const scheduleFlush = () => {
2069
+ if (flushIntervalMs > 0 && timer === void 0) {
2070
+ timer = setTimeout(() => {
2071
+ timer = void 0;
2072
+ doFlush();
2073
+ }, flushIntervalMs);
2074
+ }
2075
+ };
2076
+ const buffered = flushIntervalMs > 0 || batchSize < Number.POSITIVE_INFINITY;
2077
+ const inner = node([source], () => void 0, {
2078
+ describeKind: "effect",
2079
+ ...rest,
2080
+ onMessage(msg) {
2081
+ if (msg[0] === DATA) {
2082
+ const value = msg[1];
2083
+ let line;
2084
+ try {
2085
+ line = serialize(value);
2086
+ } catch (err) {
2087
+ onTransportError?.({
2088
+ stage: "serialize",
2089
+ error: err instanceof Error ? err : new Error(String(err)),
2090
+ value
2091
+ });
2092
+ return true;
2093
+ }
2094
+ if (buffered) {
2095
+ buffer2.push(line);
2096
+ if (buffer2.length >= batchSize) doFlush();
2097
+ else scheduleFlush();
2098
+ } else {
2099
+ try {
2100
+ writer.write(line);
2101
+ } catch (err) {
2102
+ onTransportError?.({
2103
+ stage: "send",
2104
+ error: err instanceof Error ? err : new Error(String(err)),
2105
+ value
2106
+ });
2107
+ }
2108
+ }
2109
+ return true;
2110
+ }
2111
+ if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
2112
+ doFlush();
2113
+ }
2114
+ return false;
2115
+ }
2116
+ });
2117
+ const unsub = inner.subscribe(() => {
2118
+ });
2119
+ const dispose = () => {
2120
+ if (timer !== void 0) {
2121
+ clearTimeout(timer);
2122
+ timer = void 0;
2123
+ }
2124
+ doFlush();
2125
+ writer.end();
2126
+ unsub();
2127
+ };
2128
+ return {
2129
+ dispose,
2130
+ flush: async () => {
2131
+ doFlush();
2132
+ }
2133
+ };
2134
+ }
2135
+ function escapeCSVField(value, delimiter) {
2136
+ if (value.includes(delimiter) || value.includes('"') || value.includes("\n")) {
2137
+ return `"${value.replace(/"/g, '""')}"`;
2138
+ }
2139
+ return value;
2140
+ }
2141
+ function toCSV(source, writer, opts) {
2142
+ const {
2143
+ columns,
2144
+ delimiter = ",",
2145
+ writeHeader = true,
2146
+ cellExtractor = (row, col) => String(row[col] ?? ""),
2147
+ flushIntervalMs = 0,
2148
+ batchSize = Number.POSITIVE_INFINITY,
2149
+ onTransportError,
2150
+ ...rest
2151
+ } = opts;
2152
+ let headerWritten = false;
2153
+ const serializeRow = (row) => {
2154
+ if (!headerWritten && writeHeader) {
2155
+ headerWritten = true;
2156
+ const header = columns.map((c) => escapeCSVField(c, delimiter)).join(delimiter);
2157
+ const data = columns.map((c) => escapeCSVField(cellExtractor(row, c), delimiter)).join(delimiter);
2158
+ return `${header}
2159
+ ${data}
2160
+ `;
2161
+ }
2162
+ return `${columns.map((c) => escapeCSVField(cellExtractor(row, c), delimiter)).join(delimiter)}
2163
+ `;
2164
+ };
2165
+ return toFile(source, writer, {
2166
+ serialize: serializeRow,
2167
+ flushIntervalMs,
2168
+ batchSize,
2169
+ onTransportError,
2170
+ ...rest
2171
+ });
2172
+ }
2173
+ function toClickHouse(source, client, table, opts) {
2174
+ const {
2175
+ batchSize = 1e3,
2176
+ flushIntervalMs = 5e3,
2177
+ format = "JSONEachRow",
2178
+ transform = (v) => v,
2179
+ onTransportError,
2180
+ ...rest
2181
+ } = opts ?? {};
2182
+ let buffer2 = [];
2183
+ let timer;
2184
+ let lastFlush = Promise.resolve();
2185
+ const doFlush = () => {
2186
+ if (buffer2.length === 0) return Promise.resolve();
2187
+ const batch2 = buffer2;
2188
+ buffer2 = [];
2189
+ try {
2190
+ const p = client.insert({ table, values: batch2, format }).catch((err) => {
2191
+ onTransportError?.({
2192
+ stage: "send",
2193
+ error: err instanceof Error ? err : new Error(String(err)),
2194
+ value: batch2
2195
+ });
2196
+ });
2197
+ lastFlush = p;
2198
+ return p;
2199
+ } catch (err) {
2200
+ onTransportError?.({
2201
+ stage: "send",
2202
+ error: err instanceof Error ? err : new Error(String(err)),
2203
+ value: batch2
2204
+ });
2205
+ return Promise.resolve();
2206
+ }
2207
+ };
2208
+ const scheduleFlush = () => {
2209
+ if (timer === void 0) {
2210
+ timer = setTimeout(() => {
2211
+ timer = void 0;
2212
+ doFlush();
2213
+ }, flushIntervalMs);
2214
+ }
2215
+ };
2216
+ const inner = node([source], () => void 0, {
2217
+ describeKind: "effect",
2218
+ ...rest,
2219
+ onMessage(msg) {
2220
+ if (msg[0] === DATA) {
2221
+ const value = msg[1];
2222
+ try {
2223
+ buffer2.push(transform(value));
2224
+ } catch (err) {
2225
+ onTransportError?.({
2226
+ stage: "serialize",
2227
+ error: err instanceof Error ? err : new Error(String(err)),
2228
+ value
2229
+ });
2230
+ return true;
2231
+ }
2232
+ if (buffer2.length >= batchSize) doFlush();
2233
+ else scheduleFlush();
2234
+ return true;
2235
+ }
2236
+ if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
2237
+ doFlush();
2238
+ }
2239
+ return false;
2240
+ }
2241
+ });
2242
+ const unsub = inner.subscribe(() => {
2243
+ });
2244
+ const dispose = () => {
2245
+ if (timer !== void 0) {
2246
+ clearTimeout(timer);
2247
+ timer = void 0;
2248
+ }
2249
+ doFlush();
2250
+ unsub();
2251
+ };
2252
+ return {
2253
+ dispose,
2254
+ flush: () => doFlush().then(() => lastFlush)
2255
+ };
2256
+ }
2257
+ function toS3(source, client, bucket, opts) {
2258
+ const {
2259
+ format = "ndjson",
2260
+ keyGenerator = (seq2, timestampNs) => {
2261
+ const ms = Math.floor(timestampNs / 1e6);
2262
+ const ts = new Date(ms).toISOString().replace(/[:.]/g, "-");
2263
+ return `data/${ts}-${seq2}.${format === "ndjson" ? "ndjson" : "json"}`;
2264
+ },
2265
+ batchSize = 1e3,
2266
+ flushIntervalMs = 1e4,
2267
+ transform = (v) => v,
2268
+ onTransportError,
2269
+ ...rest
2270
+ } = opts ?? {};
2271
+ let buffer2 = [];
2272
+ let timer;
2273
+ let seq = 0;
2274
+ let lastFlush = Promise.resolve();
2275
+ const doFlush = () => {
2276
+ if (buffer2.length === 0) return Promise.resolve();
2277
+ const batch2 = buffer2;
2278
+ buffer2 = [];
2279
+ seq += 1;
2280
+ const body = format === "ndjson" ? `${batch2.map((v) => JSON.stringify(v)).join("\n")}
2281
+ ` : JSON.stringify(batch2);
2282
+ const contentType = format === "ndjson" ? "application/x-ndjson" : "application/json";
2283
+ const key = keyGenerator(seq, wallClockNs());
2284
+ try {
2285
+ const p = client.putObject({ Bucket: bucket, Key: key, Body: body, ContentType: contentType }).then(() => {
2286
+ }).catch((err) => {
2287
+ onTransportError?.({
2288
+ stage: "send",
2289
+ error: err instanceof Error ? err : new Error(String(err)),
2290
+ value: batch2
2291
+ });
2292
+ });
2293
+ lastFlush = p;
2294
+ return p;
2295
+ } catch (err) {
2296
+ onTransportError?.({
2297
+ stage: "send",
2298
+ error: err instanceof Error ? err : new Error(String(err)),
2299
+ value: batch2
2300
+ });
2301
+ return Promise.resolve();
2302
+ }
2303
+ };
2304
+ const scheduleFlush = () => {
2305
+ if (timer === void 0) {
2306
+ timer = setTimeout(() => {
2307
+ timer = void 0;
2308
+ doFlush();
2309
+ }, flushIntervalMs);
2310
+ }
2311
+ };
2312
+ const inner = node([source], () => void 0, {
2313
+ describeKind: "effect",
2314
+ ...rest,
2315
+ onMessage(msg) {
2316
+ if (msg[0] === DATA) {
2317
+ const value = msg[1];
2318
+ try {
2319
+ buffer2.push(transform(value));
2320
+ } catch (err) {
2321
+ onTransportError?.({
2322
+ stage: "serialize",
2323
+ error: err instanceof Error ? err : new Error(String(err)),
2324
+ value
2325
+ });
2326
+ return true;
2327
+ }
2328
+ if (buffer2.length >= batchSize) doFlush();
2329
+ else scheduleFlush();
2330
+ return true;
2331
+ }
2332
+ if (msg[0] === COMPLETE || msg[0] === TEARDOWN) {
2333
+ doFlush();
2334
+ }
2335
+ return false;
2336
+ }
2337
+ });
2338
+ const unsub = inner.subscribe(() => {
2339
+ });
2340
+ const dispose = () => {
2341
+ if (timer !== void 0) {
2342
+ clearTimeout(timer);
2343
+ timer = void 0;
2344
+ }
2345
+ doFlush();
2346
+ unsub();
2347
+ };
2348
+ return {
2349
+ dispose,
2350
+ flush: () => doFlush().then(() => lastFlush)
2351
+ };
2352
+ }
2353
+ function toPostgres(source, client, table, opts) {
2354
+ const {
2355
+ toSQL = (v, t) => ({
2356
+ sql: `INSERT INTO "${t.replace(/"/g, '""')}" (data) VALUES ($1)`,
2357
+ params: [JSON.stringify(v)]
2358
+ }),
2359
+ onTransportError,
2360
+ ...rest
2361
+ } = opts ?? {};
2362
+ const inner = node([source], () => void 0, {
2363
+ describeKind: "effect",
2364
+ ...rest,
2365
+ onMessage(msg) {
2366
+ if (msg[0] === DATA) {
2367
+ const value = msg[1];
2368
+ let query;
2369
+ try {
2370
+ query = toSQL(value, table);
2371
+ } catch (err) {
2372
+ onTransportError?.({
2373
+ stage: "serialize",
2374
+ error: err instanceof Error ? err : new Error(String(err)),
2375
+ value
2376
+ });
2377
+ return true;
2378
+ }
2379
+ void client.query(query.sql, query.params).catch((err) => {
2380
+ onTransportError?.({
2381
+ stage: "send",
2382
+ error: err instanceof Error ? err : new Error(String(err)),
2383
+ value
2384
+ });
2385
+ });
2386
+ return true;
2387
+ }
2388
+ return false;
2389
+ }
2390
+ });
2391
+ return inner.subscribe(() => {
2392
+ });
2393
+ }
2394
+ function toMongo(source, collection, opts) {
2395
+ const { toDocument = (v) => v, onTransportError, ...rest } = opts ?? {};
2396
+ const inner = node([source], () => void 0, {
2397
+ describeKind: "effect",
2398
+ ...rest,
2399
+ onMessage(msg) {
2400
+ if (msg[0] === DATA) {
2401
+ const value = msg[1];
2402
+ let doc;
2403
+ try {
2404
+ doc = toDocument(value);
2405
+ } catch (err) {
2406
+ onTransportError?.({
2407
+ stage: "serialize",
2408
+ error: err instanceof Error ? err : new Error(String(err)),
2409
+ value
2410
+ });
2411
+ return true;
2412
+ }
2413
+ void collection.insertOne(doc).catch((err) => {
2414
+ onTransportError?.({
2415
+ stage: "send",
2416
+ error: err instanceof Error ? err : new Error(String(err)),
2417
+ value
2418
+ });
2419
+ });
2420
+ return true;
2421
+ }
2422
+ return false;
2423
+ }
2424
+ });
2425
+ return inner.subscribe(() => {
2426
+ });
2427
+ }
2428
+ function toLoki(source, client, opts) {
2429
+ const {
2430
+ labels = {},
2431
+ toLine = (v) => JSON.stringify(v),
2432
+ toLabels,
2433
+ onTransportError,
2434
+ ...rest
2435
+ } = opts ?? {};
2436
+ const inner = node([source], () => void 0, {
2437
+ describeKind: "effect",
2438
+ ...rest,
2439
+ onMessage(msg) {
2440
+ if (msg[0] === DATA) {
2441
+ const value = msg[1];
2442
+ let line;
2443
+ try {
2444
+ line = toLine(value);
2445
+ } catch (err) {
2446
+ onTransportError?.({
2447
+ stage: "serialize",
2448
+ error: err instanceof Error ? err : new Error(String(err)),
2449
+ value
2450
+ });
2451
+ return true;
2452
+ }
2453
+ let streamLabels;
2454
+ try {
2455
+ streamLabels = toLabels ? { ...labels, ...toLabels(value) } : labels;
2456
+ } catch (err) {
2457
+ onTransportError?.({
2458
+ stage: "serialize",
2459
+ error: err instanceof Error ? err : new Error(String(err)),
2460
+ value
2461
+ });
2462
+ return true;
2463
+ }
2464
+ const ts = `${wallClockNs()}`;
2465
+ void client.push({ streams: [{ stream: streamLabels, values: [[ts, line]] }] }).catch((err) => {
2466
+ onTransportError?.({
2467
+ stage: "send",
2468
+ error: err instanceof Error ? err : new Error(String(err)),
2469
+ value
2470
+ });
2471
+ });
2472
+ return true;
2473
+ }
2474
+ return false;
2475
+ }
2476
+ });
2477
+ return inner.subscribe(() => {
2478
+ });
2479
+ }
2480
+ function toTempo(source, client, opts) {
2481
+ const { toResourceSpans = (v) => [v], onTransportError, ...rest } = opts ?? {};
2482
+ const inner = node([source], () => void 0, {
2483
+ describeKind: "effect",
2484
+ ...rest,
2485
+ onMessage(msg) {
2486
+ if (msg[0] === DATA) {
2487
+ const value = msg[1];
2488
+ let spans;
2489
+ try {
2490
+ spans = toResourceSpans(value);
2491
+ } catch (err) {
2492
+ onTransportError?.({
2493
+ stage: "serialize",
2494
+ error: err instanceof Error ? err : new Error(String(err)),
2495
+ value
2496
+ });
2497
+ return true;
2498
+ }
2499
+ void client.push({ resourceSpans: spans }).catch((err) => {
2500
+ onTransportError?.({
2501
+ stage: "send",
2502
+ error: err instanceof Error ? err : new Error(String(err)),
2503
+ value
2504
+ });
2505
+ });
2506
+ return true;
2507
+ }
2508
+ return false;
2509
+ }
2510
+ });
2511
+ return inner.subscribe(() => {
2512
+ });
2513
+ }
2514
+ function checkpointToS3(graph, client, bucket, opts) {
2515
+ const { prefix = "checkpoints/", debounceMs, compactEvery, onError } = opts ?? {};
2516
+ const adapter = {
2517
+ save(data) {
2518
+ const ms = Math.floor(wallClockNs() / 1e6);
2519
+ const key = `${prefix}${graph.name}/checkpoint-${ms}.json`;
2520
+ let body;
2521
+ try {
2522
+ body = JSON.stringify(data);
2523
+ } catch (err) {
2524
+ onError?.(err);
2525
+ return;
2526
+ }
2527
+ void client.putObject({
2528
+ Bucket: bucket,
2529
+ Key: key,
2530
+ Body: body,
2531
+ ContentType: "application/json"
2532
+ }).catch((err) => onError?.(err));
2533
+ }
2534
+ };
2535
+ return graph.autoCheckpoint(adapter, { debounceMs, compactEvery, onError });
2536
+ }
2537
+ function checkpointToRedis(graph, client, opts) {
2538
+ const { prefix = "graphrefly:checkpoint:", debounceMs, compactEvery, onError } = opts ?? {};
2539
+ const key = `${prefix}${graph.name}`;
2540
+ const adapter = {
2541
+ save(data) {
2542
+ let body;
2543
+ try {
2544
+ body = JSON.stringify(data);
2545
+ } catch (err) {
2546
+ onError?.(err);
2547
+ return;
2548
+ }
2549
+ void client.set(key, body).catch((err) => onError?.(err));
2550
+ }
2551
+ };
2552
+ return graph.autoCheckpoint(adapter, { debounceMs, compactEvery, onError });
2553
+ }
1747
2554
 
1748
2555
  // src/extra/checkpoint.ts
1749
2556
  import { randomBytes } from "crypto";
@@ -4708,6 +5515,22 @@ export {
4708
5515
  fromCSV,
4709
5516
  fromNDJSON,
4710
5517
  fromClickHouseWatch,
5518
+ fromPulsar,
5519
+ toPulsar,
5520
+ fromNATS,
5521
+ toNATS,
5522
+ fromRabbitMQ,
5523
+ toRabbitMQ,
5524
+ toFile,
5525
+ toCSV,
5526
+ toClickHouse,
5527
+ toS3,
5528
+ toPostgres,
5529
+ toMongo,
5530
+ toLoki,
5531
+ toTempo,
5532
+ checkpointToS3,
5533
+ checkpointToRedis,
4711
5534
  MemoryCheckpointAdapter,
4712
5535
  DictCheckpointAdapter,
4713
5536
  FileCheckpointAdapter,
@@ -4782,4 +5605,4 @@ export {
4782
5605
  workerSelf,
4783
5606
  extra_exports
4784
5607
  };
4785
- //# sourceMappingURL=chunk-VPS7L64N.js.map
5608
+ //# sourceMappingURL=chunk-ZERWUCGK.js.map