persistent-request-response 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -0
- package/dist/index.d.mts +41 -0
- package/dist/index.d.ts +41 -0
- package/dist/index.js +417 -0
- package/dist/index.mjs +377 -0
- package/package.json +67 -0
package/README.md
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).
|
|
2
|
+
|
|
3
|
+
## Getting Started
|
|
4
|
+
|
|
5
|
+
First, run the development server:
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm run dev
|
|
9
|
+
# or
|
|
10
|
+
yarn dev
|
|
11
|
+
# or
|
|
12
|
+
pnpm dev
|
|
13
|
+
# or
|
|
14
|
+
bun dev
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
|
18
|
+
|
|
19
|
+
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
|
|
20
|
+
|
|
21
|
+
This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
|
|
22
|
+
|
|
23
|
+
## Learn More
|
|
24
|
+
|
|
25
|
+
To learn more about Next.js, take a look at the following resources:
|
|
26
|
+
|
|
27
|
+
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
|
28
|
+
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
|
29
|
+
|
|
30
|
+
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
|
|
31
|
+
|
|
32
|
+
## Deploy on Vercel
|
|
33
|
+
|
|
34
|
+
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
|
|
35
|
+
|
|
36
|
+
Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { Redis } from '@upstash/redis';
|
|
2
|
+
|
|
3
|
+
interface KVAdapter {
|
|
4
|
+
rpush(key: string, ...values: string[]): Promise<void>;
|
|
5
|
+
lrange(key: string, start: number, stop: number): Promise<string[]>;
|
|
6
|
+
set(key: string, value: string, ttl: number): Promise<void>;
|
|
7
|
+
get(key: string): Promise<string | null>;
|
|
8
|
+
expire(key: string, ttl: number): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
interface Options {
|
|
12
|
+
kv: KVAdapter;
|
|
13
|
+
ttl?: number;
|
|
14
|
+
}
|
|
15
|
+
declare function createResilientSSE(options: Options): {
|
|
16
|
+
createStream: (streamId?: string) => PersistentStream;
|
|
17
|
+
resume: (streamId: string, lastEventId?: number) => Response;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
interface PersistentFetchOptions {
|
|
21
|
+
disconnectSignal?: AbortSignal;
|
|
22
|
+
projectId?: string;
|
|
23
|
+
}
|
|
24
|
+
declare function persistentFetch(url: string, init?: RequestInit, options?: PersistentFetchOptions): Promise<Response>;
|
|
25
|
+
declare namespace persistentFetch {
|
|
26
|
+
var resume: (url: string, projectId: string) => Promise<Response | null>;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
interface PersistentStream {
|
|
30
|
+
stream: ReadableStream;
|
|
31
|
+
/** accepts a pre-formatted SSE string (e.g. from formatSSE()) — id: N is prepended automatically */
|
|
32
|
+
enqueue: (sseContent: string) => Promise<void>;
|
|
33
|
+
close: () => Promise<void>;
|
|
34
|
+
streamId: string;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
declare function upstashAdapter(redis: Redis): KVAdapter;
|
|
38
|
+
|
|
39
|
+
declare const vercelKvAdapter: KVAdapter;
|
|
40
|
+
|
|
41
|
+
export { type KVAdapter, type PersistentStream, createResilientSSE, persistentFetch, upstashAdapter, vercelKvAdapter };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { Redis } from '@upstash/redis';
|
|
2
|
+
|
|
3
|
+
interface KVAdapter {
|
|
4
|
+
rpush(key: string, ...values: string[]): Promise<void>;
|
|
5
|
+
lrange(key: string, start: number, stop: number): Promise<string[]>;
|
|
6
|
+
set(key: string, value: string, ttl: number): Promise<void>;
|
|
7
|
+
get(key: string): Promise<string | null>;
|
|
8
|
+
expire(key: string, ttl: number): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
interface Options {
|
|
12
|
+
kv: KVAdapter;
|
|
13
|
+
ttl?: number;
|
|
14
|
+
}
|
|
15
|
+
declare function createResilientSSE(options: Options): {
|
|
16
|
+
createStream: (streamId?: string) => PersistentStream;
|
|
17
|
+
resume: (streamId: string, lastEventId?: number) => Response;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
interface PersistentFetchOptions {
|
|
21
|
+
disconnectSignal?: AbortSignal;
|
|
22
|
+
projectId?: string;
|
|
23
|
+
}
|
|
24
|
+
declare function persistentFetch(url: string, init?: RequestInit, options?: PersistentFetchOptions): Promise<Response>;
|
|
25
|
+
declare namespace persistentFetch {
|
|
26
|
+
var resume: (url: string, projectId: string) => Promise<Response | null>;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
interface PersistentStream {
|
|
30
|
+
stream: ReadableStream;
|
|
31
|
+
/** accepts a pre-formatted SSE string (e.g. from formatSSE()) — id: N is prepended automatically */
|
|
32
|
+
enqueue: (sseContent: string) => Promise<void>;
|
|
33
|
+
close: () => Promise<void>;
|
|
34
|
+
streamId: string;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
declare function upstashAdapter(redis: Redis): KVAdapter;
|
|
38
|
+
|
|
39
|
+
declare const vercelKvAdapter: KVAdapter;
|
|
40
|
+
|
|
41
|
+
export { type KVAdapter, type PersistentStream, createResilientSSE, persistentFetch, upstashAdapter, vercelKvAdapter };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// lib/resilient-sse/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
createResilientSSE: () => createResilientSSE,
|
|
34
|
+
persistentFetch: () => persistentFetch,
|
|
35
|
+
upstashAdapter: () => upstashAdapter,
|
|
36
|
+
vercelKvAdapter: () => vercelKvAdapter
|
|
37
|
+
});
|
|
38
|
+
module.exports = __toCommonJS(index_exports);
|
|
39
|
+
|
|
40
|
+
// lib/resilient-sse/server/createPersistentStream.ts
|
|
41
|
+
var import_uuid = require("uuid");
|
|
42
|
+
|
|
43
|
+
// lib/resilient-sse/server/fileLogger.ts
|
|
44
|
+
var import_fs = __toESM(require("fs"));
|
|
45
|
+
var import_path = __toESM(require("path"));
|
|
46
|
+
var LOG_FILE = import_path.default.join(process.cwd(), "sse-debug.log");
|
|
47
|
+
var MAX_PREVIEW = 80;
|
|
48
|
+
function ts() {
|
|
49
|
+
const d = /* @__PURE__ */ new Date();
|
|
50
|
+
return d.getHours().toString().padStart(2, "0") + ":" + d.getMinutes().toString().padStart(2, "0") + ":" + d.getSeconds().toString().padStart(2, "0") + "." + d.getMilliseconds().toString().padStart(3, "0");
|
|
51
|
+
}
|
|
52
|
+
function short(id) {
|
|
53
|
+
return id.slice(0, 8);
|
|
54
|
+
}
|
|
55
|
+
function preview(s) {
|
|
56
|
+
const oneline = s.replace(/\n/g, "\u21B5");
|
|
57
|
+
return oneline.length > MAX_PREVIEW ? oneline.slice(0, MAX_PREVIEW) + "\u2026" : oneline;
|
|
58
|
+
}
|
|
59
|
+
function write(line) {
|
|
60
|
+
if (process.env.NODE_ENV !== "development") return;
|
|
61
|
+
import_fs.default.appendFile(LOG_FILE, line + "\n", () => {
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
function logCreate(streamId) {
|
|
65
|
+
write(`[${ts()}] [CREATE ] sid=${short(streamId)}`);
|
|
66
|
+
}
|
|
67
|
+
function logEnqueue(streamId, eventId, sseContent) {
|
|
68
|
+
write(`[${ts()}] [ENQUEUE ] sid=${short(streamId)} id=${String(eventId).padStart(4)} ${preview(sseContent)}`);
|
|
69
|
+
}
|
|
70
|
+
function logClose(streamId, totalChunks, reason) {
|
|
71
|
+
write(`[${ts()}] [CLOSE ] sid=${short(streamId)} total=${totalChunks} reason=${reason}`);
|
|
72
|
+
}
|
|
73
|
+
function logResumeStart(streamId, fromEventId) {
|
|
74
|
+
write(`[${ts()}] [RESUME ] sid=${short(streamId)} fromId=${fromEventId}`);
|
|
75
|
+
}
|
|
76
|
+
function logResumeBuffered(streamId, count) {
|
|
77
|
+
write(`[${ts()}] [BUFFERED ] sid=${short(streamId)} replaying=${count} buffered chunks`);
|
|
78
|
+
}
|
|
79
|
+
function logResumePoll(streamId, nextIndex, newCount) {
|
|
80
|
+
write(`[${ts()}] [POLL ] sid=${short(streamId)} nextIdx=${nextIndex} newChunks=${newCount}`);
|
|
81
|
+
}
|
|
82
|
+
function logResumeDone(streamId, totalSent) {
|
|
83
|
+
write(`[${ts()}] [RES_DONE ] sid=${short(streamId)} totalSent=${totalSent}`);
|
|
84
|
+
}
|
|
85
|
+
function logResumeError(streamId, err) {
|
|
86
|
+
write(`[${ts()}] [RES_ERR ] sid=${short(streamId)} ${String(err)}`);
|
|
87
|
+
}
|
|
88
|
+
function logSeparator() {
|
|
89
|
+
write("\u2500".repeat(100));
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// lib/resilient-sse/server/createPersistentStream.ts
|
|
93
|
+
function createPersistentStream(options) {
|
|
94
|
+
const { kv: kv2, ttl, streamId = (0, import_uuid.v4)() } = options;
|
|
95
|
+
const chunksKey = `stream:${streamId}:chunks`;
|
|
96
|
+
const statusKey = `stream:${streamId}:status`;
|
|
97
|
+
let controller;
|
|
98
|
+
const encoder = new TextEncoder();
|
|
99
|
+
let currentIndex = 0;
|
|
100
|
+
const BATCH_SIZE = 10;
|
|
101
|
+
const pendingBatch = [];
|
|
102
|
+
let writeQueue = Promise.resolve();
|
|
103
|
+
const flushBatch = () => {
|
|
104
|
+
if (pendingBatch.length === 0) return;
|
|
105
|
+
const batch = pendingBatch.splice(0, pendingBatch.length);
|
|
106
|
+
writeQueue = writeQueue.then(() => kv2.rpush(chunksKey, ...batch)).catch(console.error);
|
|
107
|
+
};
|
|
108
|
+
const stream = new ReadableStream({
|
|
109
|
+
start(c) {
|
|
110
|
+
controller = c;
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
kv2.set(statusKey, "streaming", ttl).catch(console.error);
|
|
114
|
+
logSeparator();
|
|
115
|
+
logCreate(streamId);
|
|
116
|
+
const enqueue = async (sseContent) => {
|
|
117
|
+
const eventId = currentIndex++;
|
|
118
|
+
const withId = `id: ${eventId}
|
|
119
|
+
${sseContent}`;
|
|
120
|
+
pendingBatch.push(withId);
|
|
121
|
+
if (pendingBatch.length >= BATCH_SIZE) flushBatch();
|
|
122
|
+
if (eventId % 500 === 0 && eventId > 0) {
|
|
123
|
+
writeQueue = writeQueue.then(() => kv2.expire(chunksKey, ttl)).catch(console.error);
|
|
124
|
+
}
|
|
125
|
+
logEnqueue(streamId, eventId, sseContent);
|
|
126
|
+
try {
|
|
127
|
+
controller.enqueue(encoder.encode(withId));
|
|
128
|
+
} catch (e) {
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
const close = async () => {
|
|
132
|
+
flushBatch();
|
|
133
|
+
await writeQueue;
|
|
134
|
+
await kv2.expire(chunksKey, ttl);
|
|
135
|
+
await kv2.set(statusKey, "done", ttl);
|
|
136
|
+
logClose(streamId, currentIndex, "done");
|
|
137
|
+
try {
|
|
138
|
+
controller.close();
|
|
139
|
+
} catch (e) {
|
|
140
|
+
}
|
|
141
|
+
};
|
|
142
|
+
return { stream, enqueue, close, streamId };
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// lib/resilient-sse/server/resume.ts
|
|
146
|
+
function sleep(ms) {
|
|
147
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
148
|
+
}
|
|
149
|
+
function resume(kv2, streamId, lastEventId = -1, pollInterval = 100, ttl = 600) {
|
|
150
|
+
const chunksKey = `stream:${streamId}:chunks`;
|
|
151
|
+
const statusKey = `stream:${streamId}:status`;
|
|
152
|
+
const encoder = new TextEncoder();
|
|
153
|
+
let cancelled = false;
|
|
154
|
+
const stream = new ReadableStream({
|
|
155
|
+
cancel() {
|
|
156
|
+
cancelled = true;
|
|
157
|
+
},
|
|
158
|
+
async start(controller) {
|
|
159
|
+
const MAX_POLL_MS = 2 * 60 * 1e3;
|
|
160
|
+
try {
|
|
161
|
+
const startIndex = lastEventId + 1;
|
|
162
|
+
logResumeStart(streamId, lastEventId);
|
|
163
|
+
const buffered = await kv2.lrange(chunksKey, startIndex, -1);
|
|
164
|
+
logResumeBuffered(streamId, buffered.length);
|
|
165
|
+
for (const chunk of buffered) {
|
|
166
|
+
if (cancelled) return;
|
|
167
|
+
controller.enqueue(encoder.encode(chunk));
|
|
168
|
+
}
|
|
169
|
+
let status = await kv2.get(statusKey);
|
|
170
|
+
if (status === "done") {
|
|
171
|
+
logResumeDone(streamId, buffered.length);
|
|
172
|
+
controller.close();
|
|
173
|
+
return;
|
|
174
|
+
}
|
|
175
|
+
let nextIndex = startIndex + buffered.length;
|
|
176
|
+
let totalSent = buffered.length;
|
|
177
|
+
const deadline = Date.now() + MAX_POLL_MS;
|
|
178
|
+
while (true) {
|
|
179
|
+
await sleep(pollInterval);
|
|
180
|
+
if (cancelled) return;
|
|
181
|
+
if (Date.now() > deadline) {
|
|
182
|
+
logResumeError(streamId, "poll timeout \u2014 upstream stream never completed");
|
|
183
|
+
controller.close();
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
status = await kv2.get(statusKey);
|
|
187
|
+
const newChunks = await kv2.lrange(chunksKey, nextIndex, -1);
|
|
188
|
+
logResumePoll(streamId, nextIndex, newChunks.length);
|
|
189
|
+
for (const chunk of newChunks) {
|
|
190
|
+
if (cancelled) return;
|
|
191
|
+
controller.enqueue(encoder.encode(chunk));
|
|
192
|
+
}
|
|
193
|
+
nextIndex += newChunks.length;
|
|
194
|
+
totalSent += newChunks.length;
|
|
195
|
+
if (status === "done" || status === null) {
|
|
196
|
+
logResumeDone(streamId, totalSent);
|
|
197
|
+
controller.close();
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
} catch (err) {
|
|
202
|
+
console.error("[resume] error", err);
|
|
203
|
+
logResumeError(streamId, err);
|
|
204
|
+
try {
|
|
205
|
+
controller.close();
|
|
206
|
+
} catch (e) {
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
});
|
|
211
|
+
return new Response(stream, {
|
|
212
|
+
headers: {
|
|
213
|
+
"Content-Type": "text/event-stream",
|
|
214
|
+
"Cache-Control": "no-cache, no-transform",
|
|
215
|
+
"Connection": "keep-alive",
|
|
216
|
+
"X-Accel-Buffering": "no"
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// lib/resilient-sse/createResilientSSE.ts
|
|
222
|
+
function createResilientSSE(options) {
|
|
223
|
+
const { kv: kv2, ttl = 600 } = options;
|
|
224
|
+
return {
|
|
225
|
+
createStream: (streamId) => createPersistentStream({ kv: kv2, ttl, streamId }),
|
|
226
|
+
resume: (streamId, lastEventId = -1) => resume(kv2, streamId, lastEventId, 100, ttl)
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// lib/resilient-sse/client/debugLog.ts
|
|
231
|
+
var MAX_ENTRIES = 100;
|
|
232
|
+
var nextId = 0;
|
|
233
|
+
var entries = [];
|
|
234
|
+
var listeners = /* @__PURE__ */ new Set();
|
|
235
|
+
function logSSE(type, data = {}) {
|
|
236
|
+
entries.push({ id: nextId++, timestamp: Date.now(), type, data });
|
|
237
|
+
if (entries.length > MAX_ENTRIES) entries.shift();
|
|
238
|
+
listeners.forEach((fn) => fn());
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// lib/resilient-sse/client/persistentFetch.ts
|
|
242
|
+
var streamKey = (projectId) => `pf-stream-id-${projectId}`;
|
|
243
|
+
var eventKey = (projectId) => `pf-last-event-id-${projectId}`;
|
|
244
|
+
async function persistentFetch(url, init, options) {
|
|
245
|
+
const response = await fetch(url, init);
|
|
246
|
+
if (!response.ok || !response.body) {
|
|
247
|
+
return response;
|
|
248
|
+
}
|
|
249
|
+
const streamId = response.headers.get("X-Stream-Id");
|
|
250
|
+
const projectId = options == null ? void 0 : options.projectId;
|
|
251
|
+
if (streamId && projectId) {
|
|
252
|
+
sessionStorage.setItem(streamKey(projectId), streamId);
|
|
253
|
+
sessionStorage.setItem(eventKey(projectId), "-1");
|
|
254
|
+
logSSE("stream_start", { streamId });
|
|
255
|
+
}
|
|
256
|
+
const resilientBody = buildResilientStream(
|
|
257
|
+
response.body,
|
|
258
|
+
url,
|
|
259
|
+
init,
|
|
260
|
+
streamId,
|
|
261
|
+
projectId,
|
|
262
|
+
options == null ? void 0 : options.disconnectSignal
|
|
263
|
+
);
|
|
264
|
+
return new Response(resilientBody, {
|
|
265
|
+
status: response.status,
|
|
266
|
+
headers: response.headers
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
persistentFetch.resume = async (url, projectId) => {
|
|
270
|
+
const streamId = sessionStorage.getItem(streamKey(projectId));
|
|
271
|
+
if (!streamId) return null;
|
|
272
|
+
const response = await fetch(url, {
|
|
273
|
+
method: "POST",
|
|
274
|
+
headers: { "Content-Type": "application/json" },
|
|
275
|
+
body: JSON.stringify({ streamId, lastEventId: -1 })
|
|
276
|
+
});
|
|
277
|
+
if (!response.ok || !response.body) {
|
|
278
|
+
clearSession(projectId);
|
|
279
|
+
return null;
|
|
280
|
+
}
|
|
281
|
+
const resilientBody = buildResilientStream(response.body, url, void 0, streamId, projectId);
|
|
282
|
+
return new Response(resilientBody, {
|
|
283
|
+
status: response.status,
|
|
284
|
+
headers: response.headers
|
|
285
|
+
});
|
|
286
|
+
};
|
|
287
|
+
function buildResilientStream(initialBody, url, init, streamId, projectId, disconnectSignal) {
|
|
288
|
+
let cancelled = false;
|
|
289
|
+
return new ReadableStream({
|
|
290
|
+
cancel() {
|
|
291
|
+
cancelled = true;
|
|
292
|
+
if (projectId) clearSession(projectId);
|
|
293
|
+
},
|
|
294
|
+
async start(controller) {
|
|
295
|
+
var _a;
|
|
296
|
+
const initialDone = await readStream(initialBody, controller, () => cancelled, disconnectSignal, projectId);
|
|
297
|
+
if (initialDone) {
|
|
298
|
+
try {
|
|
299
|
+
controller.close();
|
|
300
|
+
} catch (e) {
|
|
301
|
+
}
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
while (!cancelled && streamId) {
|
|
305
|
+
const lastEventId = parseInt(
|
|
306
|
+
(_a = projectId ? sessionStorage.getItem(eventKey(projectId)) : null) != null ? _a : "-1"
|
|
307
|
+
);
|
|
308
|
+
logSSE("disconnected", { streamId, lastEventId });
|
|
309
|
+
await sleep2(1e3);
|
|
310
|
+
if (cancelled) break;
|
|
311
|
+
logSSE("reconnecting", { streamId, lastEventId });
|
|
312
|
+
try {
|
|
313
|
+
const response = await fetch(url, {
|
|
314
|
+
method: "POST",
|
|
315
|
+
headers: { "Content-Type": "application/json" },
|
|
316
|
+
body: JSON.stringify({ streamId, lastEventId })
|
|
317
|
+
});
|
|
318
|
+
if (!response.ok || !response.body) break;
|
|
319
|
+
logSSE("resumed", { streamId, fromEventId: lastEventId });
|
|
320
|
+
const done = await readStream(response.body, controller, () => cancelled, void 0, projectId);
|
|
321
|
+
if (done) break;
|
|
322
|
+
} catch (e) {
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
try {
|
|
326
|
+
controller.close();
|
|
327
|
+
} catch (e) {
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
async function readStream(body, controller, isCancelled, disconnectSignal, currentProjectId) {
|
|
333
|
+
var _a;
|
|
334
|
+
const reader = body.getReader();
|
|
335
|
+
const onDisconnect = () => reader.cancel();
|
|
336
|
+
disconnectSignal == null ? void 0 : disconnectSignal.addEventListener("abort", onDisconnect);
|
|
337
|
+
const decoder = new TextDecoder();
|
|
338
|
+
let buffer = "";
|
|
339
|
+
try {
|
|
340
|
+
while (true) {
|
|
341
|
+
const { done, value } = await reader.read();
|
|
342
|
+
if (done || isCancelled()) break;
|
|
343
|
+
const text = decoder.decode(value, { stream: true });
|
|
344
|
+
buffer += text;
|
|
345
|
+
const events = buffer.split("\n\n");
|
|
346
|
+
buffer = (_a = events.pop()) != null ? _a : "";
|
|
347
|
+
for (const event of events) {
|
|
348
|
+
let id = null;
|
|
349
|
+
let isDone = false;
|
|
350
|
+
for (const line of event.split("\n")) {
|
|
351
|
+
if (line.startsWith("id: ")) id = parseInt(line.slice(4).trim());
|
|
352
|
+
if (line.startsWith("event: ") && line.slice(7).trim() === "done") isDone = true;
|
|
353
|
+
if (line.startsWith("data: ") && line.includes('"type":"done"')) isDone = true;
|
|
354
|
+
}
|
|
355
|
+
if (id !== null) {
|
|
356
|
+
if (currentProjectId) sessionStorage.setItem(eventKey(currentProjectId), String(id));
|
|
357
|
+
logSSE("chunk", { eventId: id });
|
|
358
|
+
}
|
|
359
|
+
controller.enqueue(new TextEncoder().encode(event + "\n\n"));
|
|
360
|
+
if (isDone) {
|
|
361
|
+
logSSE("done", { lastEventId: id });
|
|
362
|
+
if (currentProjectId) clearSession(currentProjectId);
|
|
363
|
+
return true;
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
} catch (e) {
|
|
368
|
+
} finally {
|
|
369
|
+
disconnectSignal == null ? void 0 : disconnectSignal.removeEventListener("abort", onDisconnect);
|
|
370
|
+
try {
|
|
371
|
+
reader.releaseLock();
|
|
372
|
+
} catch (e) {
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
return false;
|
|
376
|
+
}
|
|
377
|
+
function clearSession(projectId) {
|
|
378
|
+
sessionStorage.removeItem(streamKey(projectId));
|
|
379
|
+
sessionStorage.removeItem(eventKey(projectId));
|
|
380
|
+
}
|
|
381
|
+
function sleep2(ms) {
|
|
382
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// lib/resilient-sse/kv/upstash.ts
|
|
386
|
+
function upstashAdapter(redis) {
|
|
387
|
+
return {
|
|
388
|
+
rpush: (key, ...values) => redis.rpush(key, ...values).then(() => {
|
|
389
|
+
}),
|
|
390
|
+
lrange: (key, start, stop) => redis.lrange(key, start, stop),
|
|
391
|
+
set: (key, value, ttl) => redis.set(key, value, { ex: ttl }).then(() => {
|
|
392
|
+
}),
|
|
393
|
+
get: (key) => redis.get(key),
|
|
394
|
+
expire: (key, ttl) => redis.expire(key, ttl).then(() => {
|
|
395
|
+
})
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// lib/resilient-sse/kv/vercelKv.ts
|
|
400
|
+
var import_kv = require("@vercel/kv");
|
|
401
|
+
var vercelKvAdapter = {
|
|
402
|
+
rpush: (key, ...values) => import_kv.kv.rpush(key, ...values).then(() => {
|
|
403
|
+
}),
|
|
404
|
+
lrange: (key, start, stop) => import_kv.kv.lrange(key, start, stop),
|
|
405
|
+
set: (key, value, ttl) => import_kv.kv.set(key, value, { ex: ttl }).then(() => {
|
|
406
|
+
}),
|
|
407
|
+
get: (key) => import_kv.kv.get(key),
|
|
408
|
+
expire: (key, ttl) => import_kv.kv.expire(key, ttl).then(() => {
|
|
409
|
+
})
|
|
410
|
+
};
|
|
411
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
412
|
+
0 && (module.exports = {
|
|
413
|
+
createResilientSSE,
|
|
414
|
+
persistentFetch,
|
|
415
|
+
upstashAdapter,
|
|
416
|
+
vercelKvAdapter
|
|
417
|
+
});
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,377 @@
|
|
|
1
|
+
// lib/resilient-sse/server/createPersistentStream.ts
|
|
2
|
+
import { v4 as uuidv4 } from "uuid";
|
|
3
|
+
|
|
4
|
+
// lib/resilient-sse/server/fileLogger.ts
|
|
5
|
+
import fs from "fs";
|
|
6
|
+
import path from "path";
|
|
7
|
+
var LOG_FILE = path.join(process.cwd(), "sse-debug.log");
|
|
8
|
+
var MAX_PREVIEW = 80;
|
|
9
|
+
function ts() {
|
|
10
|
+
const d = /* @__PURE__ */ new Date();
|
|
11
|
+
return d.getHours().toString().padStart(2, "0") + ":" + d.getMinutes().toString().padStart(2, "0") + ":" + d.getSeconds().toString().padStart(2, "0") + "." + d.getMilliseconds().toString().padStart(3, "0");
|
|
12
|
+
}
|
|
13
|
+
function short(id) {
|
|
14
|
+
return id.slice(0, 8);
|
|
15
|
+
}
|
|
16
|
+
function preview(s) {
|
|
17
|
+
const oneline = s.replace(/\n/g, "\u21B5");
|
|
18
|
+
return oneline.length > MAX_PREVIEW ? oneline.slice(0, MAX_PREVIEW) + "\u2026" : oneline;
|
|
19
|
+
}
|
|
20
|
+
function write(line) {
|
|
21
|
+
if (process.env.NODE_ENV !== "development") return;
|
|
22
|
+
fs.appendFile(LOG_FILE, line + "\n", () => {
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
function logCreate(streamId) {
|
|
26
|
+
write(`[${ts()}] [CREATE ] sid=${short(streamId)}`);
|
|
27
|
+
}
|
|
28
|
+
function logEnqueue(streamId, eventId, sseContent) {
|
|
29
|
+
write(`[${ts()}] [ENQUEUE ] sid=${short(streamId)} id=${String(eventId).padStart(4)} ${preview(sseContent)}`);
|
|
30
|
+
}
|
|
31
|
+
function logClose(streamId, totalChunks, reason) {
|
|
32
|
+
write(`[${ts()}] [CLOSE ] sid=${short(streamId)} total=${totalChunks} reason=${reason}`);
|
|
33
|
+
}
|
|
34
|
+
function logResumeStart(streamId, fromEventId) {
|
|
35
|
+
write(`[${ts()}] [RESUME ] sid=${short(streamId)} fromId=${fromEventId}`);
|
|
36
|
+
}
|
|
37
|
+
function logResumeBuffered(streamId, count) {
|
|
38
|
+
write(`[${ts()}] [BUFFERED ] sid=${short(streamId)} replaying=${count} buffered chunks`);
|
|
39
|
+
}
|
|
40
|
+
function logResumePoll(streamId, nextIndex, newCount) {
|
|
41
|
+
write(`[${ts()}] [POLL ] sid=${short(streamId)} nextIdx=${nextIndex} newChunks=${newCount}`);
|
|
42
|
+
}
|
|
43
|
+
function logResumeDone(streamId, totalSent) {
|
|
44
|
+
write(`[${ts()}] [RES_DONE ] sid=${short(streamId)} totalSent=${totalSent}`);
|
|
45
|
+
}
|
|
46
|
+
function logResumeError(streamId, err) {
|
|
47
|
+
write(`[${ts()}] [RES_ERR ] sid=${short(streamId)} ${String(err)}`);
|
|
48
|
+
}
|
|
49
|
+
function logSeparator() {
|
|
50
|
+
write("\u2500".repeat(100));
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// lib/resilient-sse/server/createPersistentStream.ts
|
|
54
|
+
function createPersistentStream(options) {
|
|
55
|
+
const { kv: kv2, ttl, streamId = uuidv4() } = options;
|
|
56
|
+
const chunksKey = `stream:${streamId}:chunks`;
|
|
57
|
+
const statusKey = `stream:${streamId}:status`;
|
|
58
|
+
let controller;
|
|
59
|
+
const encoder = new TextEncoder();
|
|
60
|
+
let currentIndex = 0;
|
|
61
|
+
const BATCH_SIZE = 10;
|
|
62
|
+
const pendingBatch = [];
|
|
63
|
+
let writeQueue = Promise.resolve();
|
|
64
|
+
const flushBatch = () => {
|
|
65
|
+
if (pendingBatch.length === 0) return;
|
|
66
|
+
const batch = pendingBatch.splice(0, pendingBatch.length);
|
|
67
|
+
writeQueue = writeQueue.then(() => kv2.rpush(chunksKey, ...batch)).catch(console.error);
|
|
68
|
+
};
|
|
69
|
+
const stream = new ReadableStream({
|
|
70
|
+
start(c) {
|
|
71
|
+
controller = c;
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
kv2.set(statusKey, "streaming", ttl).catch(console.error);
|
|
75
|
+
logSeparator();
|
|
76
|
+
logCreate(streamId);
|
|
77
|
+
const enqueue = async (sseContent) => {
|
|
78
|
+
const eventId = currentIndex++;
|
|
79
|
+
const withId = `id: ${eventId}
|
|
80
|
+
${sseContent}`;
|
|
81
|
+
pendingBatch.push(withId);
|
|
82
|
+
if (pendingBatch.length >= BATCH_SIZE) flushBatch();
|
|
83
|
+
if (eventId % 500 === 0 && eventId > 0) {
|
|
84
|
+
writeQueue = writeQueue.then(() => kv2.expire(chunksKey, ttl)).catch(console.error);
|
|
85
|
+
}
|
|
86
|
+
logEnqueue(streamId, eventId, sseContent);
|
|
87
|
+
try {
|
|
88
|
+
controller.enqueue(encoder.encode(withId));
|
|
89
|
+
} catch (e) {
|
|
90
|
+
}
|
|
91
|
+
};
|
|
92
|
+
const close = async () => {
|
|
93
|
+
flushBatch();
|
|
94
|
+
await writeQueue;
|
|
95
|
+
await kv2.expire(chunksKey, ttl);
|
|
96
|
+
await kv2.set(statusKey, "done", ttl);
|
|
97
|
+
logClose(streamId, currentIndex, "done");
|
|
98
|
+
try {
|
|
99
|
+
controller.close();
|
|
100
|
+
} catch (e) {
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
return { stream, enqueue, close, streamId };
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// lib/resilient-sse/server/resume.ts
|
|
107
|
+
function sleep(ms) {
|
|
108
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
109
|
+
}
|
|
110
|
+
function resume(kv2, streamId, lastEventId = -1, pollInterval = 100, ttl = 600) {
|
|
111
|
+
const chunksKey = `stream:${streamId}:chunks`;
|
|
112
|
+
const statusKey = `stream:${streamId}:status`;
|
|
113
|
+
const encoder = new TextEncoder();
|
|
114
|
+
let cancelled = false;
|
|
115
|
+
const stream = new ReadableStream({
|
|
116
|
+
cancel() {
|
|
117
|
+
cancelled = true;
|
|
118
|
+
},
|
|
119
|
+
async start(controller) {
|
|
120
|
+
const MAX_POLL_MS = 2 * 60 * 1e3;
|
|
121
|
+
try {
|
|
122
|
+
const startIndex = lastEventId + 1;
|
|
123
|
+
logResumeStart(streamId, lastEventId);
|
|
124
|
+
const buffered = await kv2.lrange(chunksKey, startIndex, -1);
|
|
125
|
+
logResumeBuffered(streamId, buffered.length);
|
|
126
|
+
for (const chunk of buffered) {
|
|
127
|
+
if (cancelled) return;
|
|
128
|
+
controller.enqueue(encoder.encode(chunk));
|
|
129
|
+
}
|
|
130
|
+
let status = await kv2.get(statusKey);
|
|
131
|
+
if (status === "done") {
|
|
132
|
+
logResumeDone(streamId, buffered.length);
|
|
133
|
+
controller.close();
|
|
134
|
+
return;
|
|
135
|
+
}
|
|
136
|
+
let nextIndex = startIndex + buffered.length;
|
|
137
|
+
let totalSent = buffered.length;
|
|
138
|
+
const deadline = Date.now() + MAX_POLL_MS;
|
|
139
|
+
while (true) {
|
|
140
|
+
await sleep(pollInterval);
|
|
141
|
+
if (cancelled) return;
|
|
142
|
+
if (Date.now() > deadline) {
|
|
143
|
+
logResumeError(streamId, "poll timeout \u2014 upstream stream never completed");
|
|
144
|
+
controller.close();
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
status = await kv2.get(statusKey);
|
|
148
|
+
const newChunks = await kv2.lrange(chunksKey, nextIndex, -1);
|
|
149
|
+
logResumePoll(streamId, nextIndex, newChunks.length);
|
|
150
|
+
for (const chunk of newChunks) {
|
|
151
|
+
if (cancelled) return;
|
|
152
|
+
controller.enqueue(encoder.encode(chunk));
|
|
153
|
+
}
|
|
154
|
+
nextIndex += newChunks.length;
|
|
155
|
+
totalSent += newChunks.length;
|
|
156
|
+
if (status === "done" || status === null) {
|
|
157
|
+
logResumeDone(streamId, totalSent);
|
|
158
|
+
controller.close();
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
} catch (err) {
|
|
163
|
+
console.error("[resume] error", err);
|
|
164
|
+
logResumeError(streamId, err);
|
|
165
|
+
try {
|
|
166
|
+
controller.close();
|
|
167
|
+
} catch (e) {
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
});
|
|
172
|
+
return new Response(stream, {
|
|
173
|
+
headers: {
|
|
174
|
+
"Content-Type": "text/event-stream",
|
|
175
|
+
"Cache-Control": "no-cache, no-transform",
|
|
176
|
+
"Connection": "keep-alive",
|
|
177
|
+
"X-Accel-Buffering": "no"
|
|
178
|
+
}
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// lib/resilient-sse/createResilientSSE.ts
|
|
183
|
+
function createResilientSSE(options) {
|
|
184
|
+
const { kv: kv2, ttl = 600 } = options;
|
|
185
|
+
return {
|
|
186
|
+
createStream: (streamId) => createPersistentStream({ kv: kv2, ttl, streamId }),
|
|
187
|
+
resume: (streamId, lastEventId = -1) => resume(kv2, streamId, lastEventId, 100, ttl)
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// lib/resilient-sse/client/debugLog.ts
|
|
192
|
+
var MAX_ENTRIES = 100;
|
|
193
|
+
var nextId = 0;
|
|
194
|
+
var entries = [];
|
|
195
|
+
var listeners = /* @__PURE__ */ new Set();
|
|
196
|
+
function logSSE(type, data = {}) {
|
|
197
|
+
entries.push({ id: nextId++, timestamp: Date.now(), type, data });
|
|
198
|
+
if (entries.length > MAX_ENTRIES) entries.shift();
|
|
199
|
+
listeners.forEach((fn) => fn());
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// lib/resilient-sse/client/persistentFetch.ts
|
|
203
|
+
var streamKey = (projectId) => `pf-stream-id-${projectId}`;
|
|
204
|
+
var eventKey = (projectId) => `pf-last-event-id-${projectId}`;
|
|
205
|
+
async function persistentFetch(url, init, options) {
|
|
206
|
+
const response = await fetch(url, init);
|
|
207
|
+
if (!response.ok || !response.body) {
|
|
208
|
+
return response;
|
|
209
|
+
}
|
|
210
|
+
const streamId = response.headers.get("X-Stream-Id");
|
|
211
|
+
const projectId = options == null ? void 0 : options.projectId;
|
|
212
|
+
if (streamId && projectId) {
|
|
213
|
+
sessionStorage.setItem(streamKey(projectId), streamId);
|
|
214
|
+
sessionStorage.setItem(eventKey(projectId), "-1");
|
|
215
|
+
logSSE("stream_start", { streamId });
|
|
216
|
+
}
|
|
217
|
+
const resilientBody = buildResilientStream(
|
|
218
|
+
response.body,
|
|
219
|
+
url,
|
|
220
|
+
init,
|
|
221
|
+
streamId,
|
|
222
|
+
projectId,
|
|
223
|
+
options == null ? void 0 : options.disconnectSignal
|
|
224
|
+
);
|
|
225
|
+
return new Response(resilientBody, {
|
|
226
|
+
status: response.status,
|
|
227
|
+
headers: response.headers
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
persistentFetch.resume = async (url, projectId) => {
|
|
231
|
+
const streamId = sessionStorage.getItem(streamKey(projectId));
|
|
232
|
+
if (!streamId) return null;
|
|
233
|
+
const response = await fetch(url, {
|
|
234
|
+
method: "POST",
|
|
235
|
+
headers: { "Content-Type": "application/json" },
|
|
236
|
+
body: JSON.stringify({ streamId, lastEventId: -1 })
|
|
237
|
+
});
|
|
238
|
+
if (!response.ok || !response.body) {
|
|
239
|
+
clearSession(projectId);
|
|
240
|
+
return null;
|
|
241
|
+
}
|
|
242
|
+
const resilientBody = buildResilientStream(response.body, url, void 0, streamId, projectId);
|
|
243
|
+
return new Response(resilientBody, {
|
|
244
|
+
status: response.status,
|
|
245
|
+
headers: response.headers
|
|
246
|
+
});
|
|
247
|
+
};
|
|
248
|
+
function buildResilientStream(initialBody, url, init, streamId, projectId, disconnectSignal) {
|
|
249
|
+
let cancelled = false;
|
|
250
|
+
return new ReadableStream({
|
|
251
|
+
cancel() {
|
|
252
|
+
cancelled = true;
|
|
253
|
+
if (projectId) clearSession(projectId);
|
|
254
|
+
},
|
|
255
|
+
async start(controller) {
|
|
256
|
+
var _a;
|
|
257
|
+
const initialDone = await readStream(initialBody, controller, () => cancelled, disconnectSignal, projectId);
|
|
258
|
+
if (initialDone) {
|
|
259
|
+
try {
|
|
260
|
+
controller.close();
|
|
261
|
+
} catch (e) {
|
|
262
|
+
}
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
while (!cancelled && streamId) {
|
|
266
|
+
const lastEventId = parseInt(
|
|
267
|
+
(_a = projectId ? sessionStorage.getItem(eventKey(projectId)) : null) != null ? _a : "-1"
|
|
268
|
+
);
|
|
269
|
+
logSSE("disconnected", { streamId, lastEventId });
|
|
270
|
+
await sleep2(1e3);
|
|
271
|
+
if (cancelled) break;
|
|
272
|
+
logSSE("reconnecting", { streamId, lastEventId });
|
|
273
|
+
try {
|
|
274
|
+
const response = await fetch(url, {
|
|
275
|
+
method: "POST",
|
|
276
|
+
headers: { "Content-Type": "application/json" },
|
|
277
|
+
body: JSON.stringify({ streamId, lastEventId })
|
|
278
|
+
});
|
|
279
|
+
if (!response.ok || !response.body) break;
|
|
280
|
+
logSSE("resumed", { streamId, fromEventId: lastEventId });
|
|
281
|
+
const done = await readStream(response.body, controller, () => cancelled, void 0, projectId);
|
|
282
|
+
if (done) break;
|
|
283
|
+
} catch (e) {
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
try {
|
|
287
|
+
controller.close();
|
|
288
|
+
} catch (e) {
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
async function readStream(body, controller, isCancelled, disconnectSignal, currentProjectId) {
|
|
294
|
+
var _a;
|
|
295
|
+
const reader = body.getReader();
|
|
296
|
+
const onDisconnect = () => reader.cancel();
|
|
297
|
+
disconnectSignal == null ? void 0 : disconnectSignal.addEventListener("abort", onDisconnect);
|
|
298
|
+
const decoder = new TextDecoder();
|
|
299
|
+
let buffer = "";
|
|
300
|
+
try {
|
|
301
|
+
while (true) {
|
|
302
|
+
const { done, value } = await reader.read();
|
|
303
|
+
if (done || isCancelled()) break;
|
|
304
|
+
const text = decoder.decode(value, { stream: true });
|
|
305
|
+
buffer += text;
|
|
306
|
+
const events = buffer.split("\n\n");
|
|
307
|
+
buffer = (_a = events.pop()) != null ? _a : "";
|
|
308
|
+
for (const event of events) {
|
|
309
|
+
let id = null;
|
|
310
|
+
let isDone = false;
|
|
311
|
+
for (const line of event.split("\n")) {
|
|
312
|
+
if (line.startsWith("id: ")) id = parseInt(line.slice(4).trim());
|
|
313
|
+
if (line.startsWith("event: ") && line.slice(7).trim() === "done") isDone = true;
|
|
314
|
+
if (line.startsWith("data: ") && line.includes('"type":"done"')) isDone = true;
|
|
315
|
+
}
|
|
316
|
+
if (id !== null) {
|
|
317
|
+
if (currentProjectId) sessionStorage.setItem(eventKey(currentProjectId), String(id));
|
|
318
|
+
logSSE("chunk", { eventId: id });
|
|
319
|
+
}
|
|
320
|
+
controller.enqueue(new TextEncoder().encode(event + "\n\n"));
|
|
321
|
+
if (isDone) {
|
|
322
|
+
logSSE("done", { lastEventId: id });
|
|
323
|
+
if (currentProjectId) clearSession(currentProjectId);
|
|
324
|
+
return true;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
} catch (e) {
|
|
329
|
+
} finally {
|
|
330
|
+
disconnectSignal == null ? void 0 : disconnectSignal.removeEventListener("abort", onDisconnect);
|
|
331
|
+
try {
|
|
332
|
+
reader.releaseLock();
|
|
333
|
+
} catch (e) {
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
return false;
|
|
337
|
+
}
|
|
338
|
+
function clearSession(projectId) {
|
|
339
|
+
sessionStorage.removeItem(streamKey(projectId));
|
|
340
|
+
sessionStorage.removeItem(eventKey(projectId));
|
|
341
|
+
}
|
|
342
|
+
function sleep2(ms) {
|
|
343
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
// lib/resilient-sse/kv/upstash.ts
|
|
347
|
+
function upstashAdapter(redis) {
|
|
348
|
+
return {
|
|
349
|
+
rpush: (key, ...values) => redis.rpush(key, ...values).then(() => {
|
|
350
|
+
}),
|
|
351
|
+
lrange: (key, start, stop) => redis.lrange(key, start, stop),
|
|
352
|
+
set: (key, value, ttl) => redis.set(key, value, { ex: ttl }).then(() => {
|
|
353
|
+
}),
|
|
354
|
+
get: (key) => redis.get(key),
|
|
355
|
+
expire: (key, ttl) => redis.expire(key, ttl).then(() => {
|
|
356
|
+
})
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// lib/resilient-sse/kv/vercelKv.ts
|
|
361
|
+
import { kv } from "@vercel/kv";
|
|
362
|
+
var vercelKvAdapter = {
|
|
363
|
+
rpush: (key, ...values) => kv.rpush(key, ...values).then(() => {
|
|
364
|
+
}),
|
|
365
|
+
lrange: (key, start, stop) => kv.lrange(key, start, stop),
|
|
366
|
+
set: (key, value, ttl) => kv.set(key, value, { ex: ttl }).then(() => {
|
|
367
|
+
}),
|
|
368
|
+
get: (key) => kv.get(key),
|
|
369
|
+
expire: (key, ttl) => kv.expire(key, ttl).then(() => {
|
|
370
|
+
})
|
|
371
|
+
};
|
|
372
|
+
export {
|
|
373
|
+
createResilientSSE,
|
|
374
|
+
persistentFetch,
|
|
375
|
+
upstashAdapter,
|
|
376
|
+
vercelKvAdapter
|
|
377
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "persistent-request-response",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Resilient SSE streaming with automatic reconnection and Redis-backed replay",
|
|
5
|
+
"private": false,
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.mjs",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./dist/index.d.ts",
|
|
12
|
+
"import": "./dist/index.mjs",
|
|
13
|
+
"require": "./dist/index.js"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"files": [
|
|
17
|
+
"dist"
|
|
18
|
+
],
|
|
19
|
+
"scripts": {
|
|
20
|
+
"dev": "next dev",
|
|
21
|
+
"build:lib": "tsup",
|
|
22
|
+
"build": "next build",
|
|
23
|
+
"start": "next start",
|
|
24
|
+
"lint": "eslint"
|
|
25
|
+
},
|
|
26
|
+
"peerDependencies": {
|
|
27
|
+
"@upstash/redis": ">=1.0.0",
|
|
28
|
+
"@vercel/kv": ">=1.0.0"
|
|
29
|
+
},
|
|
30
|
+
"peerDependenciesMeta": {
|
|
31
|
+
"@upstash/redis": {
|
|
32
|
+
"optional": true
|
|
33
|
+
},
|
|
34
|
+
"@vercel/kv": {
|
|
35
|
+
"optional": true
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
"dependencies": {
|
|
39
|
+
"next": "16.2.2",
|
|
40
|
+
"react": "19.2.4",
|
|
41
|
+
"react-dom": "19.2.4",
|
|
42
|
+
"uuid": "^13.0.0"
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"@tailwindcss/postcss": "^4",
|
|
46
|
+
"@types/node": "^20",
|
|
47
|
+
"@types/react": "^19",
|
|
48
|
+
"@types/react-dom": "^19",
|
|
49
|
+
"@types/uuid": "^10.0.0",
|
|
50
|
+
"@upstash/redis": "^1.37.0",
|
|
51
|
+
"@vercel/kv": "^3.0.0",
|
|
52
|
+
"eslint": "^9",
|
|
53
|
+
"eslint-config-next": "16.2.2",
|
|
54
|
+
"tailwindcss": "^4",
|
|
55
|
+
"tsup": "^8.5.1",
|
|
56
|
+
"typescript": "^5"
|
|
57
|
+
},
|
|
58
|
+
"keywords": [
|
|
59
|
+
"sse",
|
|
60
|
+
"server-sent-events",
|
|
61
|
+
"resilient",
|
|
62
|
+
"streaming",
|
|
63
|
+
"redis",
|
|
64
|
+
"reconnect"
|
|
65
|
+
],
|
|
66
|
+
"license": "MIT"
|
|
67
|
+
}
|