@upstash/qstash 2.6.2 → 2.6.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +49 -4
- package/chunk-DZD3BOMO.js +999 -0
- package/chunk-PTZPACVC.mjs +999 -0
- package/index.d.mts +59 -11
- package/index.d.ts +59 -11
- package/index.js +32 -963
- package/index.mjs +16 -947
- package/nextjs.js +1 -1
- package/nextjs.mjs +1 -1
- package/nuxt.d.mts +11 -0
- package/nuxt.d.ts +11 -0
- package/nuxt.js +48 -0
- package/nuxt.mjs +48 -0
- package/package.json +1 -1
- package/solidjs.d.mts +10 -0
- package/solidjs.d.ts +10 -0
- package/solidjs.js +43 -0
- package/solidjs.mjs +43 -0
- package/svelte.d.mts +10 -0
- package/svelte.d.ts +10 -0
- package/svelte.js +44 -0
- package/svelte.mjs +44 -0
|
@@ -0,0 +1,999 @@
|
|
|
1
|
+
// src/client/dlq.ts
|
|
2
|
+
var DLQ = class {
|
|
3
|
+
http;
|
|
4
|
+
constructor(http) {
|
|
5
|
+
this.http = http;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* List messages in the dlq
|
|
9
|
+
*/
|
|
10
|
+
async listMessages(options) {
|
|
11
|
+
const filterPayload = {
|
|
12
|
+
...options?.filter,
|
|
13
|
+
topicName: options?.filter?.urlGroup
|
|
14
|
+
};
|
|
15
|
+
const messagesPayload = await this.http.request({
|
|
16
|
+
method: "GET",
|
|
17
|
+
path: ["v2", "dlq"],
|
|
18
|
+
query: {
|
|
19
|
+
cursor: options?.cursor,
|
|
20
|
+
count: options?.count,
|
|
21
|
+
...filterPayload
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
return {
|
|
25
|
+
messages: messagesPayload.messages.map((message) => {
|
|
26
|
+
return {
|
|
27
|
+
...message,
|
|
28
|
+
urlGroup: message.topicName
|
|
29
|
+
};
|
|
30
|
+
}),
|
|
31
|
+
cursor: messagesPayload.cursor
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Remove a message from the dlq using it's `dlqId`
|
|
36
|
+
*/
|
|
37
|
+
async delete(dlqMessageId) {
|
|
38
|
+
return await this.http.request({
|
|
39
|
+
method: "DELETE",
|
|
40
|
+
path: ["v2", "dlq", dlqMessageId],
|
|
41
|
+
parseResponseAsJson: false
|
|
42
|
+
// there is no response
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Remove multiple messages from the dlq using their `dlqId`s
|
|
47
|
+
*/
|
|
48
|
+
async deleteMany(request) {
|
|
49
|
+
return await this.http.request({
|
|
50
|
+
method: "DELETE",
|
|
51
|
+
path: ["v2", "dlq"],
|
|
52
|
+
headers: { "Content-Type": "application/json" },
|
|
53
|
+
body: JSON.stringify({ dlqIds: request.dlqIds })
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
// src/client/error.ts
|
|
59
|
+
var QstashError = class extends Error {
|
|
60
|
+
constructor(message) {
|
|
61
|
+
super(message);
|
|
62
|
+
this.name = "QstashError";
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
var QstashRatelimitError = class extends QstashError {
|
|
66
|
+
limit;
|
|
67
|
+
remaining;
|
|
68
|
+
reset;
|
|
69
|
+
constructor(args) {
|
|
70
|
+
super(`Exceeded burst rate limit. ${JSON.stringify(args)} `);
|
|
71
|
+
this.limit = args.limit;
|
|
72
|
+
this.remaining = args.remaining;
|
|
73
|
+
this.reset = args.reset;
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
var QstashChatRatelimitError = class extends QstashError {
|
|
77
|
+
limitRequests;
|
|
78
|
+
limitTokens;
|
|
79
|
+
remainingRequests;
|
|
80
|
+
remainingTokens;
|
|
81
|
+
resetRequests;
|
|
82
|
+
resetTokens;
|
|
83
|
+
constructor(args) {
|
|
84
|
+
super(`Exceeded chat rate limit. ${JSON.stringify(args)} `);
|
|
85
|
+
this.limitRequests = args["limit-requests"];
|
|
86
|
+
this.limitTokens = args["limit-tokens"];
|
|
87
|
+
this.remainingRequests = args["remaining-requests"];
|
|
88
|
+
this.remainingTokens = args["remaining-tokens"];
|
|
89
|
+
this.resetRequests = args["reset-requests"];
|
|
90
|
+
this.resetTokens = args["reset-tokens"];
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
var QstashDailyRatelimitError = class extends QstashError {
|
|
94
|
+
limit;
|
|
95
|
+
remaining;
|
|
96
|
+
reset;
|
|
97
|
+
constructor(args) {
|
|
98
|
+
super(`Exceeded daily rate limit. ${JSON.stringify(args)} `);
|
|
99
|
+
this.limit = args.limit;
|
|
100
|
+
this.remaining = args.remaining;
|
|
101
|
+
this.reset = args.reset;
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
// src/client/http.ts
|
|
106
|
+
var HttpClient = class {
|
|
107
|
+
baseUrl;
|
|
108
|
+
authorization;
|
|
109
|
+
options;
|
|
110
|
+
retry;
|
|
111
|
+
constructor(config) {
|
|
112
|
+
this.baseUrl = config.baseUrl.replace(/\/$/, "");
|
|
113
|
+
this.authorization = config.authorization;
|
|
114
|
+
this.retry = // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
|
115
|
+
typeof config.retry === "boolean" && !config.retry ? {
|
|
116
|
+
attempts: 1,
|
|
117
|
+
backoff: () => 0
|
|
118
|
+
} : {
|
|
119
|
+
attempts: config.retry?.retries ? config.retry.retries + 1 : 5,
|
|
120
|
+
backoff: config.retry?.backoff ?? ((retryCount) => Math.exp(retryCount) * 50)
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
async request(request) {
|
|
124
|
+
const { response } = await this.requestWithBackoff(request);
|
|
125
|
+
if (request.parseResponseAsJson === false) {
|
|
126
|
+
return void 0;
|
|
127
|
+
}
|
|
128
|
+
return await response.json();
|
|
129
|
+
}
|
|
130
|
+
async *requestStream(request) {
|
|
131
|
+
const { response } = await this.requestWithBackoff(request);
|
|
132
|
+
if (!response.body) {
|
|
133
|
+
throw new Error("No response body");
|
|
134
|
+
}
|
|
135
|
+
const body = response.body;
|
|
136
|
+
const reader = body.getReader();
|
|
137
|
+
const decoder = new TextDecoder();
|
|
138
|
+
try {
|
|
139
|
+
while (true) {
|
|
140
|
+
const { done, value } = await reader.read();
|
|
141
|
+
if (done) {
|
|
142
|
+
break;
|
|
143
|
+
}
|
|
144
|
+
const chunkText = decoder.decode(value, { stream: true });
|
|
145
|
+
const chunks = chunkText.split("\n").filter(Boolean);
|
|
146
|
+
for (const chunk of chunks) {
|
|
147
|
+
if (chunk.startsWith("data: ")) {
|
|
148
|
+
const data = chunk.slice(6);
|
|
149
|
+
if (data === "[DONE]") {
|
|
150
|
+
break;
|
|
151
|
+
}
|
|
152
|
+
yield JSON.parse(data);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
} finally {
|
|
157
|
+
await reader.cancel();
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
requestWithBackoff = async (request) => {
|
|
161
|
+
const [url, requestOptions] = this.processRequest(request);
|
|
162
|
+
let response = void 0;
|
|
163
|
+
let error = void 0;
|
|
164
|
+
for (let index = 0; index < this.retry.attempts; index++) {
|
|
165
|
+
try {
|
|
166
|
+
response = await fetch(url.toString(), requestOptions);
|
|
167
|
+
break;
|
|
168
|
+
} catch (error_) {
|
|
169
|
+
error = error_;
|
|
170
|
+
await new Promise((r) => setTimeout(r, this.retry.backoff(index)));
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
if (!response) {
|
|
174
|
+
throw error ?? new Error("Exhausted all retries");
|
|
175
|
+
}
|
|
176
|
+
await this.checkResponse(response);
|
|
177
|
+
return {
|
|
178
|
+
response,
|
|
179
|
+
error
|
|
180
|
+
};
|
|
181
|
+
};
|
|
182
|
+
processRequest = (request) => {
|
|
183
|
+
const headers = new Headers(request.headers);
|
|
184
|
+
if (!headers.has("Authorization")) {
|
|
185
|
+
headers.set("Authorization", this.authorization);
|
|
186
|
+
}
|
|
187
|
+
const requestOptions = {
|
|
188
|
+
method: request.method,
|
|
189
|
+
headers,
|
|
190
|
+
body: request.body,
|
|
191
|
+
keepalive: request.keepalive
|
|
192
|
+
};
|
|
193
|
+
const url = new URL([request.baseUrl ?? this.baseUrl, ...request.path].join("/"));
|
|
194
|
+
if (request.query) {
|
|
195
|
+
for (const [key, value] of Object.entries(request.query)) {
|
|
196
|
+
if (value !== void 0) {
|
|
197
|
+
url.searchParams.set(key, value.toString());
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
return [url.toString(), requestOptions];
|
|
202
|
+
};
|
|
203
|
+
async checkResponse(response) {
|
|
204
|
+
if (response.status === 429) {
|
|
205
|
+
if (response.headers.get("x-ratelimit-limit-requests")) {
|
|
206
|
+
throw new QstashChatRatelimitError({
|
|
207
|
+
"limit-requests": response.headers.get("x-ratelimit-limit-requests"),
|
|
208
|
+
"limit-tokens": response.headers.get("x-ratelimit-limit-tokens"),
|
|
209
|
+
"remaining-requests": response.headers.get("x-ratelimit-remaining-requests"),
|
|
210
|
+
"remaining-tokens": response.headers.get("x-ratelimit-remaining-tokens"),
|
|
211
|
+
"reset-requests": response.headers.get("x-ratelimit-reset-requests"),
|
|
212
|
+
"reset-tokens": response.headers.get("x-ratelimit-reset-tokens")
|
|
213
|
+
});
|
|
214
|
+
} else if (response.headers.get("RateLimit-Limit")) {
|
|
215
|
+
throw new QstashDailyRatelimitError({
|
|
216
|
+
limit: response.headers.get("RateLimit-Limit"),
|
|
217
|
+
remaining: response.headers.get("RateLimit-Remaining"),
|
|
218
|
+
reset: response.headers.get("RateLimit-Reset")
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
throw new QstashRatelimitError({
|
|
222
|
+
limit: response.headers.get("Burst-RateLimit-Limit"),
|
|
223
|
+
remaining: response.headers.get("Burst-RateLimit-Remaining"),
|
|
224
|
+
reset: response.headers.get("Burst-RateLimit-Reset")
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
if (response.status < 200 || response.status >= 300) {
|
|
228
|
+
const body = await response.text();
|
|
229
|
+
throw new QstashError(body.length > 0 ? body : `Error: status=${response.status}`);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
// src/client/llm/providers.ts
|
|
235
|
+
var analyticsBaseUrlMap = (analyticsName, analyticsToken, providerApiKey, providerBaseUrl) => {
|
|
236
|
+
return {
|
|
237
|
+
helicone: {
|
|
238
|
+
baseURL: "https://gateway.helicone.ai/v1/chat/completions",
|
|
239
|
+
headers: {
|
|
240
|
+
"Helicone-Auth": `Bearer ${analyticsToken}`,
|
|
241
|
+
"Helicone-Target-Url": providerBaseUrl,
|
|
242
|
+
Authorization: `Bearer ${providerApiKey}`
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}[analyticsName];
|
|
246
|
+
};
|
|
247
|
+
var upstash = () => {
|
|
248
|
+
return {
|
|
249
|
+
owner: "upstash",
|
|
250
|
+
baseUrl: "https://qstash.upstash.io/llm",
|
|
251
|
+
token: ""
|
|
252
|
+
};
|
|
253
|
+
};
|
|
254
|
+
var openai = ({
|
|
255
|
+
token
|
|
256
|
+
}) => {
|
|
257
|
+
return { token, owner: "openai", baseUrl: "https://api.openai.com" };
|
|
258
|
+
};
|
|
259
|
+
var custom = ({
|
|
260
|
+
baseUrl,
|
|
261
|
+
token
|
|
262
|
+
}) => {
|
|
263
|
+
const trimmedBaseUrl = baseUrl.replace(/\/(v1\/)?chat\/completions$/, "");
|
|
264
|
+
return {
|
|
265
|
+
token,
|
|
266
|
+
owner: "custom",
|
|
267
|
+
baseUrl: trimmedBaseUrl
|
|
268
|
+
};
|
|
269
|
+
};
|
|
270
|
+
|
|
271
|
+
// src/client/llm/chat.ts
|
|
272
|
+
var Chat = class _Chat {
|
|
273
|
+
http;
|
|
274
|
+
token;
|
|
275
|
+
constructor(http, token) {
|
|
276
|
+
this.http = http;
|
|
277
|
+
this.token = token;
|
|
278
|
+
}
|
|
279
|
+
static toChatRequest(request) {
|
|
280
|
+
const messages = [];
|
|
281
|
+
messages.push(
|
|
282
|
+
{ role: "system", content: request.system },
|
|
283
|
+
{ role: "user", content: request.user }
|
|
284
|
+
);
|
|
285
|
+
const chatRequest = { ...request, messages };
|
|
286
|
+
return chatRequest;
|
|
287
|
+
}
|
|
288
|
+
/**
|
|
289
|
+
* Calls the Upstash completions api given a ChatRequest.
|
|
290
|
+
*
|
|
291
|
+
* Returns a ChatCompletion or a stream of ChatCompletionChunks
|
|
292
|
+
* if stream is enabled.
|
|
293
|
+
*
|
|
294
|
+
* @param request ChatRequest with messages
|
|
295
|
+
* @returns Chat completion or stream
|
|
296
|
+
*/
|
|
297
|
+
create = async (request) => {
|
|
298
|
+
if (request.provider.owner != "upstash")
|
|
299
|
+
return this.createThirdParty(request);
|
|
300
|
+
const body = JSON.stringify(request);
|
|
301
|
+
if ("stream" in request && request.stream) {
|
|
302
|
+
return this.http.requestStream({
|
|
303
|
+
path: ["llm", "v1", "chat", "completions"],
|
|
304
|
+
method: "POST",
|
|
305
|
+
headers: {
|
|
306
|
+
"Content-Type": "application/json",
|
|
307
|
+
Connection: "keep-alive",
|
|
308
|
+
Accept: "text/event-stream",
|
|
309
|
+
"Cache-Control": "no-cache",
|
|
310
|
+
Authorization: `Bearer ${this.token}`
|
|
311
|
+
},
|
|
312
|
+
body
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
return this.http.request({
|
|
316
|
+
path: ["llm", "v1", "chat", "completions"],
|
|
317
|
+
method: "POST",
|
|
318
|
+
headers: { "Content-Type": "application/json", Authorization: `Bearer ${this.token}` },
|
|
319
|
+
body
|
|
320
|
+
});
|
|
321
|
+
};
|
|
322
|
+
/**
|
|
323
|
+
* Calls the Upstash completions api given a ChatRequest.
|
|
324
|
+
*
|
|
325
|
+
* Returns a ChatCompletion or a stream of ChatCompletionChunks
|
|
326
|
+
* if stream is enabled.
|
|
327
|
+
*
|
|
328
|
+
* @param request ChatRequest with messages
|
|
329
|
+
* @returns Chat completion or stream
|
|
330
|
+
*/
|
|
331
|
+
// eslint-disable-next-line @typescript-eslint/require-await
|
|
332
|
+
createThirdParty = async (request) => {
|
|
333
|
+
const { baseUrl, token, owner } = request.provider;
|
|
334
|
+
if (owner === "upstash")
|
|
335
|
+
throw new Error("Upstash is not 3rd party provider!");
|
|
336
|
+
delete request.provider;
|
|
337
|
+
delete request.system;
|
|
338
|
+
const analytics = request.analytics;
|
|
339
|
+
delete request.analytics;
|
|
340
|
+
const body = JSON.stringify(request);
|
|
341
|
+
const isAnalyticsEnabled = analytics?.name && analytics.token;
|
|
342
|
+
const analyticsConfig = (
|
|
343
|
+
// This is exact copy of "isAnalyticsEnabled" but required in order to satify ts
|
|
344
|
+
analytics?.name && analytics.token ? analyticsBaseUrlMap(analytics.name, analytics.token, token, baseUrl) : { headers: void 0, baseURL: baseUrl }
|
|
345
|
+
);
|
|
346
|
+
const isStream = "stream" in request && request.stream;
|
|
347
|
+
const headers = {
|
|
348
|
+
"Content-Type": "application/json",
|
|
349
|
+
Authorization: `Bearer ${token}`,
|
|
350
|
+
...isStream ? {
|
|
351
|
+
Connection: "keep-alive",
|
|
352
|
+
Accept: "text/event-stream",
|
|
353
|
+
"Cache-Control": "no-cache"
|
|
354
|
+
} : {},
|
|
355
|
+
...analyticsConfig.headers
|
|
356
|
+
};
|
|
357
|
+
const response = await this.http[isStream ? "requestStream" : "request"]({
|
|
358
|
+
path: isAnalyticsEnabled ? [] : ["v1", "chat", "completions"],
|
|
359
|
+
method: "POST",
|
|
360
|
+
headers,
|
|
361
|
+
body,
|
|
362
|
+
baseUrl: analyticsConfig.baseURL
|
|
363
|
+
});
|
|
364
|
+
return response;
|
|
365
|
+
};
|
|
366
|
+
/**
|
|
367
|
+
* Calls the Upstash completions api given a PromptRequest.
|
|
368
|
+
*
|
|
369
|
+
* Returns a ChatCompletion or a stream of ChatCompletionChunks
|
|
370
|
+
* if stream is enabled.
|
|
371
|
+
*
|
|
372
|
+
* @param request PromptRequest with system and user messages.
|
|
373
|
+
* Note that system parameter shouldn't be passed in the case of
|
|
374
|
+
* mistralai/Mistral-7B-Instruct-v0.2 model.
|
|
375
|
+
* @returns Chat completion or stream
|
|
376
|
+
*/
|
|
377
|
+
prompt = async (request) => {
|
|
378
|
+
const chatRequest = _Chat.toChatRequest(request);
|
|
379
|
+
return this.create(chatRequest);
|
|
380
|
+
};
|
|
381
|
+
};
|
|
382
|
+
|
|
383
|
+
// src/client/llm/utils.ts
|
|
384
|
+
function appendLLMOptionsIfNeeded(request, headers) {
|
|
385
|
+
if (request.api?.provider?.owner === "upstash") {
|
|
386
|
+
request.api = { name: "llm" };
|
|
387
|
+
return;
|
|
388
|
+
}
|
|
389
|
+
if (request.api && "provider" in request.api) {
|
|
390
|
+
const provider = request.api.provider;
|
|
391
|
+
if (!provider?.baseUrl)
|
|
392
|
+
throw new Error("baseUrl cannot be empty or undefined!");
|
|
393
|
+
if (!provider.token)
|
|
394
|
+
throw new Error("token cannot be empty or undefined!");
|
|
395
|
+
if (request.api.analytics) {
|
|
396
|
+
const analyticsToken = request.api.analytics.token;
|
|
397
|
+
const analyticsName = request.api.analytics.name;
|
|
398
|
+
const { baseURL, headers: defaultHeaders } = analyticsBaseUrlMap(
|
|
399
|
+
analyticsName,
|
|
400
|
+
analyticsToken,
|
|
401
|
+
provider.token,
|
|
402
|
+
provider.baseUrl
|
|
403
|
+
);
|
|
404
|
+
request.url = baseURL;
|
|
405
|
+
headers.set("Helicone-Auth", defaultHeaders["Helicone-Auth"]);
|
|
406
|
+
headers.set("Helicone-Target-Url", defaultHeaders["Helicone-Target-Url"]);
|
|
407
|
+
headers.set("Authorization", defaultHeaders.Authorization);
|
|
408
|
+
} else {
|
|
409
|
+
request.url = `${provider.baseUrl}/v1/chat/completions`;
|
|
410
|
+
headers.set("Authorization", `Bearer ${provider.token}`);
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
function ensureCallbackPresent(request) {
|
|
415
|
+
if (request.api?.name === "llm" && !request.callback) {
|
|
416
|
+
throw new TypeError("Callback cannot be undefined when using LLM");
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
// src/client/messages.ts
|
|
421
|
+
var Messages = class {
|
|
422
|
+
http;
|
|
423
|
+
constructor(http) {
|
|
424
|
+
this.http = http;
|
|
425
|
+
}
|
|
426
|
+
/**
|
|
427
|
+
* Get a message
|
|
428
|
+
*/
|
|
429
|
+
async get(messageId) {
|
|
430
|
+
const messagePayload = await this.http.request({
|
|
431
|
+
method: "GET",
|
|
432
|
+
path: ["v2", "messages", messageId]
|
|
433
|
+
});
|
|
434
|
+
const message = {
|
|
435
|
+
...messagePayload,
|
|
436
|
+
urlGroup: messagePayload.topicName
|
|
437
|
+
};
|
|
438
|
+
return message;
|
|
439
|
+
}
|
|
440
|
+
/**
|
|
441
|
+
* Cancel a message
|
|
442
|
+
*/
|
|
443
|
+
async delete(messageId) {
|
|
444
|
+
return await this.http.request({
|
|
445
|
+
method: "DELETE",
|
|
446
|
+
path: ["v2", "messages", messageId],
|
|
447
|
+
parseResponseAsJson: false
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
async deleteMany(messageIds) {
|
|
451
|
+
const result = await this.http.request({
|
|
452
|
+
method: "DELETE",
|
|
453
|
+
path: ["v2", "messages"],
|
|
454
|
+
headers: { "Content-Type": "application/json" },
|
|
455
|
+
body: JSON.stringify({ messageIds })
|
|
456
|
+
});
|
|
457
|
+
return result.cancelled;
|
|
458
|
+
}
|
|
459
|
+
async deleteAll() {
|
|
460
|
+
const result = await this.http.request({
|
|
461
|
+
method: "DELETE",
|
|
462
|
+
path: ["v2", "messages"]
|
|
463
|
+
});
|
|
464
|
+
return result.cancelled;
|
|
465
|
+
}
|
|
466
|
+
};
|
|
467
|
+
|
|
468
|
+
// src/client/utils.ts
|
|
469
|
+
var isIgnoredHeader = (header) => {
|
|
470
|
+
const lowerCaseHeader = header.toLowerCase();
|
|
471
|
+
return lowerCaseHeader.startsWith("content-type") || lowerCaseHeader.startsWith("upstash-");
|
|
472
|
+
};
|
|
473
|
+
function prefixHeaders(headers) {
|
|
474
|
+
const keysToBePrefixed = [...headers.keys()].filter((key) => !isIgnoredHeader(key));
|
|
475
|
+
for (const key of keysToBePrefixed) {
|
|
476
|
+
const value = headers.get(key);
|
|
477
|
+
if (value !== null) {
|
|
478
|
+
headers.set(`Upstash-Forward-${key}`, value);
|
|
479
|
+
}
|
|
480
|
+
headers.delete(key);
|
|
481
|
+
}
|
|
482
|
+
return headers;
|
|
483
|
+
}
|
|
484
|
+
function processHeaders(request) {
|
|
485
|
+
const headers = prefixHeaders(new Headers(request.headers));
|
|
486
|
+
headers.set("Upstash-Method", request.method ?? "POST");
|
|
487
|
+
if (request.delay !== void 0) {
|
|
488
|
+
headers.set("Upstash-Delay", `${request.delay.toFixed(0)}s`);
|
|
489
|
+
}
|
|
490
|
+
if (request.notBefore !== void 0) {
|
|
491
|
+
headers.set("Upstash-Not-Before", request.notBefore.toFixed(0));
|
|
492
|
+
}
|
|
493
|
+
if (request.deduplicationId !== void 0) {
|
|
494
|
+
headers.set("Upstash-Deduplication-Id", request.deduplicationId);
|
|
495
|
+
}
|
|
496
|
+
if (request.contentBasedDeduplication !== void 0) {
|
|
497
|
+
headers.set("Upstash-Content-Based-Deduplication", "true");
|
|
498
|
+
}
|
|
499
|
+
if (request.retries !== void 0) {
|
|
500
|
+
headers.set("Upstash-Retries", request.retries.toFixed(0));
|
|
501
|
+
}
|
|
502
|
+
if (request.callback !== void 0) {
|
|
503
|
+
headers.set("Upstash-Callback", request.callback);
|
|
504
|
+
}
|
|
505
|
+
if (request.failureCallback !== void 0) {
|
|
506
|
+
headers.set("Upstash-Failure-Callback", request.failureCallback);
|
|
507
|
+
}
|
|
508
|
+
if (request.timeout !== void 0) {
|
|
509
|
+
headers.set("Upstash-Timeout", `${request.timeout}s`);
|
|
510
|
+
}
|
|
511
|
+
return headers;
|
|
512
|
+
}
|
|
513
|
+
function getRequestPath(request) {
|
|
514
|
+
return request.url ?? request.urlGroup ?? request.topic ?? `api/${request.api?.name}`;
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
// src/client/queue.ts
|
|
518
|
+
var Queue = class {
|
|
519
|
+
http;
|
|
520
|
+
queueName;
|
|
521
|
+
constructor(http, queueName) {
|
|
522
|
+
this.http = http;
|
|
523
|
+
this.queueName = queueName;
|
|
524
|
+
}
|
|
525
|
+
/**
|
|
526
|
+
* Create or update the queue
|
|
527
|
+
*/
|
|
528
|
+
async upsert(request) {
|
|
529
|
+
if (!this.queueName) {
|
|
530
|
+
throw new Error("Please provide a queue name to the Queue constructor");
|
|
531
|
+
}
|
|
532
|
+
const body = {
|
|
533
|
+
queueName: this.queueName,
|
|
534
|
+
parallelism: request.parallelism ?? 1,
|
|
535
|
+
paused: request.paused ?? false
|
|
536
|
+
};
|
|
537
|
+
await this.http.request({
|
|
538
|
+
method: "POST",
|
|
539
|
+
path: ["v2", "queues"],
|
|
540
|
+
headers: {
|
|
541
|
+
"Content-Type": "application/json"
|
|
542
|
+
},
|
|
543
|
+
body: JSON.stringify(body),
|
|
544
|
+
parseResponseAsJson: false
|
|
545
|
+
});
|
|
546
|
+
}
|
|
547
|
+
/**
|
|
548
|
+
* Get the queue details
|
|
549
|
+
*/
|
|
550
|
+
async get() {
|
|
551
|
+
if (!this.queueName) {
|
|
552
|
+
throw new Error("Please provide a queue name to the Queue constructor");
|
|
553
|
+
}
|
|
554
|
+
return await this.http.request({
|
|
555
|
+
method: "GET",
|
|
556
|
+
path: ["v2", "queues", this.queueName]
|
|
557
|
+
});
|
|
558
|
+
}
|
|
559
|
+
/**
|
|
560
|
+
* List queues
|
|
561
|
+
*/
|
|
562
|
+
async list() {
|
|
563
|
+
return await this.http.request({
|
|
564
|
+
method: "GET",
|
|
565
|
+
path: ["v2", "queues"]
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
/**
|
|
569
|
+
* Delete the queue
|
|
570
|
+
*/
|
|
571
|
+
async delete() {
|
|
572
|
+
if (!this.queueName) {
|
|
573
|
+
throw new Error("Please provide a queue name to the Queue constructor");
|
|
574
|
+
}
|
|
575
|
+
await this.http.request({
|
|
576
|
+
method: "DELETE",
|
|
577
|
+
path: ["v2", "queues", this.queueName],
|
|
578
|
+
parseResponseAsJson: false
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
/**
|
|
582
|
+
* Enqueue a message to a queue.
|
|
583
|
+
*/
|
|
584
|
+
async enqueue(request) {
|
|
585
|
+
if (!this.queueName) {
|
|
586
|
+
throw new Error("Please provide a queue name to the Queue constructor");
|
|
587
|
+
}
|
|
588
|
+
const headers = processHeaders(request);
|
|
589
|
+
const destination = getRequestPath(request);
|
|
590
|
+
const response = await this.http.request({
|
|
591
|
+
path: ["v2", "enqueue", this.queueName, destination],
|
|
592
|
+
body: request.body,
|
|
593
|
+
headers,
|
|
594
|
+
method: "POST"
|
|
595
|
+
});
|
|
596
|
+
return response;
|
|
597
|
+
}
|
|
598
|
+
/**
|
|
599
|
+
* Enqueue a message to a queue, serializing the body to JSON.
|
|
600
|
+
*/
|
|
601
|
+
async enqueueJSON(request) {
|
|
602
|
+
const headers = prefixHeaders(new Headers(request.headers));
|
|
603
|
+
headers.set("Content-Type", "application/json");
|
|
604
|
+
ensureCallbackPresent(request);
|
|
605
|
+
appendLLMOptionsIfNeeded(request, headers);
|
|
606
|
+
const response = await this.enqueue({
|
|
607
|
+
...request,
|
|
608
|
+
body: JSON.stringify(request.body),
|
|
609
|
+
headers
|
|
610
|
+
});
|
|
611
|
+
return response;
|
|
612
|
+
}
|
|
613
|
+
/**
|
|
614
|
+
* Pauses the queue.
|
|
615
|
+
*
|
|
616
|
+
* A paused queue will not deliver messages until
|
|
617
|
+
* it is resumed.
|
|
618
|
+
*/
|
|
619
|
+
async pause() {
|
|
620
|
+
if (!this.queueName) {
|
|
621
|
+
throw new Error("Please provide a queue name to the Queue constructor");
|
|
622
|
+
}
|
|
623
|
+
await this.http.request({
|
|
624
|
+
method: "POST",
|
|
625
|
+
path: ["v2", "queues", this.queueName, "pause"],
|
|
626
|
+
parseResponseAsJson: false
|
|
627
|
+
});
|
|
628
|
+
}
|
|
629
|
+
/**
|
|
630
|
+
* Resumes the queue.
|
|
631
|
+
*/
|
|
632
|
+
async resume() {
|
|
633
|
+
if (!this.queueName) {
|
|
634
|
+
throw new Error("Please provide a queue name to the Queue constructor");
|
|
635
|
+
}
|
|
636
|
+
await this.http.request({
|
|
637
|
+
method: "POST",
|
|
638
|
+
path: ["v2", "queues", this.queueName, "resume"],
|
|
639
|
+
parseResponseAsJson: false
|
|
640
|
+
});
|
|
641
|
+
}
|
|
642
|
+
};
|
|
643
|
+
|
|
644
|
+
// src/client/schedules.ts
|
|
645
|
+
var Schedules = class {
|
|
646
|
+
http;
|
|
647
|
+
constructor(http) {
|
|
648
|
+
this.http = http;
|
|
649
|
+
}
|
|
650
|
+
/**
|
|
651
|
+
* Create a schedule
|
|
652
|
+
*/
|
|
653
|
+
async create(request) {
|
|
654
|
+
const headers = prefixHeaders(new Headers(request.headers));
|
|
655
|
+
if (!headers.has("Content-Type")) {
|
|
656
|
+
headers.set("Content-Type", "application/json");
|
|
657
|
+
}
|
|
658
|
+
headers.set("Upstash-Cron", request.cron);
|
|
659
|
+
if (request.method !== void 0) {
|
|
660
|
+
headers.set("Upstash-Method", request.method);
|
|
661
|
+
}
|
|
662
|
+
if (request.delay !== void 0) {
|
|
663
|
+
headers.set("Upstash-Delay", `${request.delay.toFixed(0)}s`);
|
|
664
|
+
}
|
|
665
|
+
if (request.retries !== void 0) {
|
|
666
|
+
headers.set("Upstash-Retries", request.retries.toFixed(0));
|
|
667
|
+
}
|
|
668
|
+
if (request.callback !== void 0) {
|
|
669
|
+
headers.set("Upstash-Callback", request.callback);
|
|
670
|
+
}
|
|
671
|
+
if (request.failureCallback !== void 0) {
|
|
672
|
+
headers.set("Upstash-Failure-Callback", request.failureCallback);
|
|
673
|
+
}
|
|
674
|
+
if (request.timeout !== void 0) {
|
|
675
|
+
headers.set("Upstash-Timeout", `${request.timeout}s`);
|
|
676
|
+
}
|
|
677
|
+
return await this.http.request({
|
|
678
|
+
method: "POST",
|
|
679
|
+
headers,
|
|
680
|
+
path: ["v2", "schedules", request.destination],
|
|
681
|
+
body: request.body
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
/**
|
|
685
|
+
* Get a schedule
|
|
686
|
+
*/
|
|
687
|
+
async get(scheduleId) {
|
|
688
|
+
return await this.http.request({
|
|
689
|
+
method: "GET",
|
|
690
|
+
path: ["v2", "schedules", scheduleId]
|
|
691
|
+
});
|
|
692
|
+
}
|
|
693
|
+
/**
|
|
694
|
+
* List your schedules
|
|
695
|
+
*/
|
|
696
|
+
async list() {
|
|
697
|
+
return await this.http.request({
|
|
698
|
+
method: "GET",
|
|
699
|
+
path: ["v2", "schedules"]
|
|
700
|
+
});
|
|
701
|
+
}
|
|
702
|
+
/**
|
|
703
|
+
* Delete a schedule
|
|
704
|
+
*/
|
|
705
|
+
async delete(scheduleId) {
|
|
706
|
+
return await this.http.request({
|
|
707
|
+
method: "DELETE",
|
|
708
|
+
path: ["v2", "schedules", scheduleId],
|
|
709
|
+
parseResponseAsJson: false
|
|
710
|
+
});
|
|
711
|
+
}
|
|
712
|
+
/**
|
|
713
|
+
* Pauses the schedule.
|
|
714
|
+
*
|
|
715
|
+
* A paused schedule will not deliver messages until
|
|
716
|
+
* it is resumed.
|
|
717
|
+
*/
|
|
718
|
+
async pause({ schedule }) {
|
|
719
|
+
await this.http.request({
|
|
720
|
+
method: "PATCH",
|
|
721
|
+
path: ["v2", "schedules", schedule, "pause"],
|
|
722
|
+
parseResponseAsJson: false
|
|
723
|
+
});
|
|
724
|
+
}
|
|
725
|
+
/**
|
|
726
|
+
* Resumes the schedule.
|
|
727
|
+
*/
|
|
728
|
+
async resume({ schedule }) {
|
|
729
|
+
await this.http.request({
|
|
730
|
+
method: "PATCH",
|
|
731
|
+
path: ["v2", "schedules", schedule, "resume"],
|
|
732
|
+
parseResponseAsJson: false
|
|
733
|
+
});
|
|
734
|
+
}
|
|
735
|
+
};
|
|
736
|
+
|
|
737
|
+
// src/client/url-groups.ts
|
|
738
|
+
var UrlGroups = class {
|
|
739
|
+
http;
|
|
740
|
+
constructor(http) {
|
|
741
|
+
this.http = http;
|
|
742
|
+
}
|
|
743
|
+
/**
|
|
744
|
+
* Create a new url group with the given name and endpoints
|
|
745
|
+
*/
|
|
746
|
+
async addEndpoints(request) {
|
|
747
|
+
await this.http.request({
|
|
748
|
+
method: "POST",
|
|
749
|
+
path: ["v2", "topics", request.name, "endpoints"],
|
|
750
|
+
headers: { "Content-Type": "application/json" },
|
|
751
|
+
body: JSON.stringify({ endpoints: request.endpoints }),
|
|
752
|
+
parseResponseAsJson: false
|
|
753
|
+
});
|
|
754
|
+
}
|
|
755
|
+
/**
|
|
756
|
+
* Remove endpoints from a url group.
|
|
757
|
+
*/
|
|
758
|
+
async removeEndpoints(request) {
|
|
759
|
+
await this.http.request({
|
|
760
|
+
method: "DELETE",
|
|
761
|
+
path: ["v2", "topics", request.name, "endpoints"],
|
|
762
|
+
headers: { "Content-Type": "application/json" },
|
|
763
|
+
body: JSON.stringify({ endpoints: request.endpoints }),
|
|
764
|
+
parseResponseAsJson: false
|
|
765
|
+
});
|
|
766
|
+
}
|
|
767
|
+
/**
|
|
768
|
+
* Get a list of all url groups.
|
|
769
|
+
*/
|
|
770
|
+
async list() {
|
|
771
|
+
return await this.http.request({
|
|
772
|
+
method: "GET",
|
|
773
|
+
path: ["v2", "topics"]
|
|
774
|
+
});
|
|
775
|
+
}
|
|
776
|
+
/**
|
|
777
|
+
* Get a single url group
|
|
778
|
+
*/
|
|
779
|
+
async get(name) {
|
|
780
|
+
return await this.http.request({
|
|
781
|
+
method: "GET",
|
|
782
|
+
path: ["v2", "topics", name]
|
|
783
|
+
});
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Delete a url group
|
|
787
|
+
*/
|
|
788
|
+
async delete(name) {
|
|
789
|
+
return await this.http.request({
|
|
790
|
+
method: "DELETE",
|
|
791
|
+
path: ["v2", "topics", name],
|
|
792
|
+
parseResponseAsJson: false
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
};
|
|
796
|
+
|
|
797
|
+
// src/client/client.ts
|
|
798
|
+
var Client = class {
|
|
799
|
+
http;
|
|
800
|
+
token;
|
|
801
|
+
constructor(config) {
|
|
802
|
+
this.http = new HttpClient({
|
|
803
|
+
retry: config.retry,
|
|
804
|
+
baseUrl: config.baseUrl ? config.baseUrl.replace(/\/$/, "") : "https://qstash.upstash.io",
|
|
805
|
+
authorization: `Bearer ${config.token}`
|
|
806
|
+
});
|
|
807
|
+
this.token = config.token;
|
|
808
|
+
}
|
|
809
|
+
/**
|
|
810
|
+
* Access the urlGroup API.
|
|
811
|
+
*
|
|
812
|
+
* Create, read, update or delete urlGroups.
|
|
813
|
+
*/
|
|
814
|
+
get urlGroups() {
|
|
815
|
+
return new UrlGroups(this.http);
|
|
816
|
+
}
|
|
817
|
+
/**
|
|
818
|
+
* Deprecated. Use urlGroups instead.
|
|
819
|
+
*
|
|
820
|
+
* Access the topic API.
|
|
821
|
+
*
|
|
822
|
+
* Create, read, update or delete topics.
|
|
823
|
+
*/
|
|
824
|
+
get topics() {
|
|
825
|
+
return this.urlGroups;
|
|
826
|
+
}
|
|
827
|
+
/**
|
|
828
|
+
* Access the dlq API.
|
|
829
|
+
*
|
|
830
|
+
* List or remove messages from the DLQ.
|
|
831
|
+
*/
|
|
832
|
+
get dlq() {
|
|
833
|
+
return new DLQ(this.http);
|
|
834
|
+
}
|
|
835
|
+
/**
|
|
836
|
+
* Access the message API.
|
|
837
|
+
*
|
|
838
|
+
* Read or cancel messages.
|
|
839
|
+
*/
|
|
840
|
+
get messages() {
|
|
841
|
+
return new Messages(this.http);
|
|
842
|
+
}
|
|
843
|
+
/**
|
|
844
|
+
* Access the schedule API.
|
|
845
|
+
*
|
|
846
|
+
* Create, read or delete schedules.
|
|
847
|
+
*/
|
|
848
|
+
get schedules() {
|
|
849
|
+
return new Schedules(this.http);
|
|
850
|
+
}
|
|
851
|
+
/**
|
|
852
|
+
* Access the queue API.
|
|
853
|
+
*
|
|
854
|
+
* Create, read, update or delete queues.
|
|
855
|
+
*/
|
|
856
|
+
queue(request) {
|
|
857
|
+
return new Queue(this.http, request?.queueName);
|
|
858
|
+
}
|
|
859
|
+
/**
|
|
860
|
+
* Access the Chat API
|
|
861
|
+
*
|
|
862
|
+
* Call the create or prompt methods
|
|
863
|
+
*/
|
|
864
|
+
chat() {
|
|
865
|
+
return new Chat(this.http, this.token);
|
|
866
|
+
}
|
|
867
|
+
async publish(request) {
|
|
868
|
+
const headers = processHeaders(request);
|
|
869
|
+
const response = await this.http.request({
|
|
870
|
+
path: ["v2", "publish", getRequestPath(request)],
|
|
871
|
+
body: request.body,
|
|
872
|
+
headers,
|
|
873
|
+
method: "POST"
|
|
874
|
+
});
|
|
875
|
+
return response;
|
|
876
|
+
}
|
|
877
|
+
/**
|
|
878
|
+
* publishJSON is a utility wrapper around `publish` that automatically serializes the body
|
|
879
|
+
* and sets the `Content-Type` header to `application/json`.
|
|
880
|
+
*/
|
|
881
|
+
async publishJSON(request) {
|
|
882
|
+
const headers = prefixHeaders(new Headers(request.headers));
|
|
883
|
+
headers.set("Content-Type", "application/json");
|
|
884
|
+
ensureCallbackPresent(request);
|
|
885
|
+
appendLLMOptionsIfNeeded(request, headers);
|
|
886
|
+
const response = await this.publish({
|
|
887
|
+
...request,
|
|
888
|
+
headers,
|
|
889
|
+
body: JSON.stringify(request.body)
|
|
890
|
+
});
|
|
891
|
+
return response;
|
|
892
|
+
}
|
|
893
|
+
/**
|
|
894
|
+
* Batch publish messages to QStash.
|
|
895
|
+
*/
|
|
896
|
+
async batch(request) {
|
|
897
|
+
const messages = [];
|
|
898
|
+
for (const message of request) {
|
|
899
|
+
const headers = processHeaders(message);
|
|
900
|
+
const headerEntries = Object.fromEntries(headers.entries());
|
|
901
|
+
messages.push({
|
|
902
|
+
destination: getRequestPath(message),
|
|
903
|
+
headers: headerEntries,
|
|
904
|
+
body: message.body,
|
|
905
|
+
...message.queueName && { queue: message.queueName }
|
|
906
|
+
});
|
|
907
|
+
}
|
|
908
|
+
const response = await this.http.request({
|
|
909
|
+
path: ["v2", "batch"],
|
|
910
|
+
body: JSON.stringify(messages),
|
|
911
|
+
headers: {
|
|
912
|
+
"Content-Type": "application/json"
|
|
913
|
+
},
|
|
914
|
+
method: "POST"
|
|
915
|
+
});
|
|
916
|
+
return response;
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Batch publish messages to QStash, serializing each body to JSON.
|
|
920
|
+
*/
|
|
921
|
+
async batchJSON(request) {
|
|
922
|
+
for (const message of request) {
|
|
923
|
+
if ("body" in message) {
|
|
924
|
+
message.body = JSON.stringify(message.body);
|
|
925
|
+
}
|
|
926
|
+
message.headers = new Headers(message.headers);
|
|
927
|
+
ensureCallbackPresent(message);
|
|
928
|
+
appendLLMOptionsIfNeeded(message, message.headers);
|
|
929
|
+
message.headers.set("Content-Type", "application/json");
|
|
930
|
+
}
|
|
931
|
+
const response = await this.batch(request);
|
|
932
|
+
return response;
|
|
933
|
+
}
|
|
934
|
+
/**
|
|
935
|
+
* Retrieve your logs.
|
|
936
|
+
*
|
|
937
|
+
* The logs endpoint is paginated and returns only 100 logs at a time.
|
|
938
|
+
* If you want to receive more logs, you can use the cursor to paginate.
|
|
939
|
+
*
|
|
940
|
+
* The cursor is a unix timestamp with millisecond precision
|
|
941
|
+
*
|
|
942
|
+
* @example
|
|
943
|
+
* ```ts
|
|
944
|
+
* let cursor = Date.now()
|
|
945
|
+
* const logs: Log[] = []
|
|
946
|
+
* while (cursor > 0) {
|
|
947
|
+
* const res = await qstash.logs({ cursor })
|
|
948
|
+
* logs.push(...res.logs)
|
|
949
|
+
* cursor = res.cursor ?? 0
|
|
950
|
+
* }
|
|
951
|
+
* ```
|
|
952
|
+
*/
|
|
953
|
+
async events(request) {
|
|
954
|
+
const query = {};
|
|
955
|
+
if (request?.cursor && request.cursor > 0) {
|
|
956
|
+
query.cursor = request.cursor.toString();
|
|
957
|
+
}
|
|
958
|
+
for (const [key, value] of Object.entries(request?.filter ?? {})) {
|
|
959
|
+
if (typeof value === "number" && value < 0) {
|
|
960
|
+
continue;
|
|
961
|
+
}
|
|
962
|
+
if (key === "urlGroup") {
|
|
963
|
+
query.topicName = value.toString();
|
|
964
|
+
} else if (typeof value !== "undefined") {
|
|
965
|
+
query[key] = value.toString();
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
const responsePayload = await this.http.request({
|
|
969
|
+
path: ["v2", "events"],
|
|
970
|
+
method: "GET",
|
|
971
|
+
query
|
|
972
|
+
});
|
|
973
|
+
return {
|
|
974
|
+
cursor: responsePayload.cursor,
|
|
975
|
+
events: responsePayload.events.map((event) => {
|
|
976
|
+
return {
|
|
977
|
+
...event,
|
|
978
|
+
urlGroup: event.topicName
|
|
979
|
+
};
|
|
980
|
+
})
|
|
981
|
+
};
|
|
982
|
+
}
|
|
983
|
+
};
|
|
984
|
+
|
|
985
|
+
export {
|
|
986
|
+
QstashError,
|
|
987
|
+
QstashRatelimitError,
|
|
988
|
+
QstashChatRatelimitError,
|
|
989
|
+
QstashDailyRatelimitError,
|
|
990
|
+
analyticsBaseUrlMap,
|
|
991
|
+
upstash,
|
|
992
|
+
openai,
|
|
993
|
+
custom,
|
|
994
|
+
Chat,
|
|
995
|
+
Messages,
|
|
996
|
+
Schedules,
|
|
997
|
+
UrlGroups,
|
|
998
|
+
Client
|
|
999
|
+
};
|