@upstash/qstash 2.7.21 → 2.7.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/h3.js CHANGED
@@ -445,7 +445,8 @@ var DLQ = class {
445
445
  messages: messagesPayload.messages.map((message) => {
446
446
  return {
447
447
  ...message,
448
- urlGroup: message.topicName
448
+ urlGroup: message.topicName,
449
+ ratePerSecond: "rate" in message ? message.rate : void 0
449
450
  };
450
451
  }),
451
452
  cursor: messagesPayload.cursor
@@ -562,6 +563,7 @@ var HttpClient = class {
562
563
  options;
563
564
  retry;
564
565
  headers;
566
+ telemetryHeaders;
565
567
  constructor(config) {
566
568
  this.baseUrl = config.baseUrl.replace(/\/$/, "");
567
569
  this.authorization = config.authorization;
@@ -574,6 +576,7 @@ var HttpClient = class {
574
576
  backoff: config.retry?.backoff ?? ((retryCount) => Math.exp(retryCount) * 50)
575
577
  };
576
578
  this.headers = config.headers;
579
+ this.telemetryHeaders = config.telemetryHeaders;
577
580
  }
578
581
  async request(request) {
579
582
  const { response } = await this.requestWithBackoff(request);
@@ -875,7 +878,8 @@ var Messages = class {
875
878
  });
876
879
  const message = {
877
880
  ...messagePayload,
878
- urlGroup: messagePayload.topicName
881
+ urlGroup: messagePayload.topicName,
882
+ ratePerSecond: "rate" in messagePayload ? messagePayload.rate : void 0
879
883
  };
880
884
  return message;
881
885
  }
@@ -1071,7 +1075,7 @@ function prefixHeaders(headers) {
1071
1075
  }
1072
1076
  return headers;
1073
1077
  }
1074
- function wrapWithGlobalHeaders(headers, globalHeaders) {
1078
+ function wrapWithGlobalHeaders(headers, globalHeaders, telemetryHeaders) {
1075
1079
  if (!globalHeaders) {
1076
1080
  return headers;
1077
1081
  }
@@ -1079,6 +1083,11 @@ function wrapWithGlobalHeaders(headers, globalHeaders) {
1079
1083
  headers.forEach((value, key) => {
1080
1084
  finalHeaders.set(key, value);
1081
1085
  });
1086
+ telemetryHeaders?.forEach((value, key) => {
1087
+ if (!value)
1088
+ return;
1089
+ finalHeaders.append(key, value);
1090
+ });
1082
1091
  return finalHeaders;
1083
1092
  }
1084
1093
  function processHeaders(request) {
@@ -1116,6 +1125,19 @@ function processHeaders(request) {
1116
1125
  headers.set("Upstash-Timeout", `${request.timeout}s`);
1117
1126
  }
1118
1127
  }
1128
+ if (request.flowControl?.key) {
1129
+ const parallelism = request.flowControl.parallelism?.toString();
1130
+ const rate = request.flowControl.ratePerSecond?.toString();
1131
+ const controlValue = [
1132
+ parallelism ? `parallelism=${parallelism}` : void 0,
1133
+ rate ? `rate=${rate}` : void 0
1134
+ ].filter(Boolean);
1135
+ if (controlValue.length === 0) {
1136
+ throw new QstashError("Provide at least one of parallelism or ratePerSecond for flowControl");
1137
+ }
1138
+ headers.set("Upstash-Flow-Control-Key", request.flowControl.key);
1139
+ headers.set("Upstash-Flow-Control-Value", controlValue.join(", "));
1140
+ }
1119
1141
  return headers;
1120
1142
  }
1121
1143
  function getRequestPath(request) {
@@ -1155,6 +1177,15 @@ function decodeBase64(base64) {
1155
1177
  }
1156
1178
  }
1157
1179
  }
1180
+ function getRuntime() {
1181
+ if (typeof process === "object" && typeof process.versions == "object" && process.versions.bun)
1182
+ return `bun@${process.versions.bun}`;
1183
+ if (typeof EdgeRuntime === "string")
1184
+ return "edge-light";
1185
+ else if (typeof process === "object" && typeof process.version === "string")
1186
+ return `node@${process.version}`;
1187
+ return "";
1188
+ }
1158
1189
 
1159
1190
  // src/client/queue.ts
1160
1191
  var Queue = class {
@@ -1229,7 +1260,8 @@ var Queue = class {
1229
1260
  }
1230
1261
  const headers = wrapWithGlobalHeaders(
1231
1262
  processHeaders(request),
1232
- this.http.headers
1263
+ this.http.headers,
1264
+ this.http.telemetryHeaders
1233
1265
  );
1234
1266
  const destination = getRequestPath(request);
1235
1267
  const response = await this.http.request({
@@ -1332,9 +1364,24 @@ var Schedules = class {
1332
1364
  if (request.queueName !== void 0) {
1333
1365
  headers.set("Upstash-Queue-Name", request.queueName);
1334
1366
  }
1367
+ if (request.flowControl?.key) {
1368
+ const parallelism = request.flowControl.parallelism?.toString();
1369
+ const rate = request.flowControl.ratePerSecond?.toString();
1370
+ const controlValue = [
1371
+ parallelism ? `parallelism=${parallelism}` : void 0,
1372
+ rate ? `rate=${rate}` : void 0
1373
+ ].filter(Boolean);
1374
+ if (controlValue.length === 0) {
1375
+ throw new QstashError(
1376
+ "Provide at least one of parallelism or ratePerSecond for flowControl"
1377
+ );
1378
+ }
1379
+ headers.set("Upstash-Flow-Control-Key", request.flowControl.key);
1380
+ headers.set("Upstash-Flow-Control-Value", controlValue.join(", "));
1381
+ }
1335
1382
  return await this.http.request({
1336
1383
  method: "POST",
1337
- headers: wrapWithGlobalHeaders(headers, this.http.headers),
1384
+ headers: wrapWithGlobalHeaders(headers, this.http.headers, this.http.telemetryHeaders),
1338
1385
  path: ["v2", "schedules", request.destination],
1339
1386
  body: request.body
1340
1387
  });
@@ -1343,19 +1390,27 @@ var Schedules = class {
1343
1390
  * Get a schedule
1344
1391
  */
1345
1392
  async get(scheduleId) {
1346
- return await this.http.request({
1393
+ const schedule = await this.http.request({
1347
1394
  method: "GET",
1348
1395
  path: ["v2", "schedules", scheduleId]
1349
1396
  });
1397
+ if ("rate" in schedule)
1398
+ schedule.ratePerSecond = schedule.rate;
1399
+ return schedule;
1350
1400
  }
1351
1401
  /**
1352
1402
  * List your schedules
1353
1403
  */
1354
1404
  async list() {
1355
- return await this.http.request({
1405
+ const schedules = await this.http.request({
1356
1406
  method: "GET",
1357
1407
  path: ["v2", "schedules"]
1358
1408
  });
1409
+ for (const schedule of schedules) {
1410
+ if ("rate" in schedule)
1411
+ schedule.ratePerSecond = schedule.rate;
1412
+ }
1413
+ return schedules;
1359
1414
  }
1360
1415
  /**
1361
1416
  * Delete a schedule
@@ -2838,20 +2893,37 @@ var Workflow = class {
2838
2893
  }
2839
2894
  };
2840
2895
 
2896
+ // version.ts
2897
+ var VERSION = "v2.7.23";
2898
+
2841
2899
  // src/client/client.ts
2842
2900
  var Client = class {
2843
2901
  http;
2844
2902
  token;
2845
2903
  constructor(config) {
2846
2904
  const environment = typeof process === "undefined" ? {} : process.env;
2847
- const baseUrl = config?.baseUrl ? config.baseUrl.replace(/\/$/, "") : environment.QSTASH_URL ?? "https://qstash.upstash.io";
2905
+ let baseUrl = (config?.baseUrl ?? environment.QSTASH_URL ?? "https://qstash.upstash.io").replace(/\/$/, "");
2906
+ if (baseUrl === "https://qstash.upstash.io/v2/publish") {
2907
+ baseUrl = "https://qstash.upstash.io";
2908
+ }
2848
2909
  const token = config?.token ?? environment.QSTASH_TOKEN;
2910
+ const enableTelemetry = environment.UPSTASH_DISABLE_TELEMETRY ? false : config?.enableTelemetry ?? true;
2911
+ const isCloudflare = typeof caches !== "undefined" && "default" in caches;
2912
+ const telemetryHeaders = new Headers(
2913
+ enableTelemetry ? {
2914
+ "Upstash-Telemetry-Sdk": `upstash-qstash-js@${VERSION}`,
2915
+ "Upstash-Telemetry-Platform": isCloudflare ? "cloudflare" : environment.VERCEL ? "vercel" : environment.AWS_REGION ? "aws" : "",
2916
+ "Upstash-Telemetry-Runtime": getRuntime()
2917
+ } : {}
2918
+ );
2849
2919
  this.http = new HttpClient({
2850
2920
  retry: config?.retry,
2851
2921
  baseUrl,
2852
2922
  authorization: `Bearer ${token}`,
2853
2923
  //@ts-expect-error caused by undici and bunjs type overlap
2854
- headers: prefixHeaders(new Headers(config?.headers ?? {}))
2924
+ headers: prefixHeaders(new Headers(config?.headers ?? {})),
2925
+ //@ts-expect-error caused by undici and bunjs type overlap
2926
+ telemetryHeaders
2855
2927
  });
2856
2928
  if (!token) {
2857
2929
  console.warn(
@@ -2933,7 +3005,8 @@ var Client = class {
2933
3005
  async publish(request) {
2934
3006
  const headers = wrapWithGlobalHeaders(
2935
3007
  processHeaders(request),
2936
- this.http.headers
3008
+ this.http.headers,
3009
+ this.http.telemetryHeaders
2937
3010
  );
2938
3011
  const response = await this.http.request({
2939
3012
  path: ["v2", "publish", getRequestPath(request)],
@@ -2964,7 +3037,11 @@ var Client = class {
2964
3037
  async batch(request) {
2965
3038
  const messages = [];
2966
3039
  for (const message of request) {
2967
- const headers = wrapWithGlobalHeaders(processHeaders(message), this.http.headers);
3040
+ const headers = wrapWithGlobalHeaders(
3041
+ processHeaders(message),
3042
+ this.http.headers,
3043
+ this.http.telemetryHeaders
3044
+ );
2968
3045
  const headerEntries = Object.fromEntries(headers.entries());
2969
3046
  messages.push({
2970
3047
  destination: getRequestPath(message),
@@ -3019,7 +3096,7 @@ var Client = class {
3019
3096
  * }
3020
3097
  * ```
3021
3098
  */
3022
- async events(request) {
3099
+ async logs(request) {
3023
3100
  const query = {};
3024
3101
  if (typeof request?.cursor === "number" && request.cursor > 0) {
3025
3102
  query.cursor = request.cursor.toString();
@@ -3041,16 +3118,42 @@ var Client = class {
3041
3118
  method: "GET",
3042
3119
  query
3043
3120
  });
3121
+ const logs = responsePayload.events.map((event) => {
3122
+ return {
3123
+ ...event,
3124
+ urlGroup: event.topicName
3125
+ };
3126
+ });
3044
3127
  return {
3045
3128
  cursor: responsePayload.cursor,
3046
- events: responsePayload.events.map((event) => {
3047
- return {
3048
- ...event,
3049
- urlGroup: event.topicName
3050
- };
3051
- })
3129
+ logs,
3130
+ events: logs
3052
3131
  };
3053
3132
  }
3133
+ /**
3134
+ * @deprecated Will be removed in the next major release. Use the `logs` method instead.
3135
+ *
3136
+ * Retrieve your logs.
3137
+ *
3138
+ * The logs endpoint is paginated and returns only 100 logs at a time.
3139
+ * If you want to receive more logs, you can use the cursor to paginate.
3140
+ *
3141
+ * The cursor is a unix timestamp with millisecond precision
3142
+ *
3143
+ * @example
3144
+ * ```ts
3145
+ * let cursor = Date.now()
3146
+ * const logs: Log[] = []
3147
+ * while (cursor > 0) {
3148
+ * const res = await qstash.logs({ cursor })
3149
+ * logs.push(...res.logs)
3150
+ * cursor = res.cursor ?? 0
3151
+ * }
3152
+ * ```
3153
+ */
3154
+ async events(request) {
3155
+ return await this.logs(request);
3156
+ }
3054
3157
  };
3055
3158
 
3056
3159
  // platforms/h3.ts
package/h3.mjs CHANGED
@@ -1,9 +1,9 @@
1
1
  import {
2
2
  serve,
3
3
  verifySignatureH3
4
- } from "./chunk-FGKPOZOO.mjs";
5
- import "./chunk-3D34OUXY.mjs";
6
- import "./chunk-QHCEWG63.mjs";
4
+ } from "./chunk-JZACTABH.mjs";
5
+ import "./chunk-ODRYYMMA.mjs";
6
+ import "./chunk-G7CVCBTL.mjs";
7
7
  export {
8
8
  serve,
9
9
  verifySignatureH3
package/hono.d.mts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { Context } from 'hono';
2
- import { a3 as RouteFunction, a4 as WorkflowServeOptions } from './client-DuOcoFUv.mjs';
2
+ import { a9 as RouteFunction, aa as WorkflowServeOptions } from './client-CYwLcEcQ.mjs';
3
3
  import 'neverthrow';
4
4
 
5
5
  type WorkflowBindings = {
package/hono.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { Context } from 'hono';
2
- import { a3 as RouteFunction, a4 as WorkflowServeOptions } from './client-DuOcoFUv.js';
2
+ import { a9 as RouteFunction, aa as WorkflowServeOptions } from './client-CYwLcEcQ.js';
3
3
  import 'neverthrow';
4
4
 
5
5
  type WorkflowBindings = {
package/hono.js CHANGED
@@ -121,7 +121,8 @@ var DLQ = class {
121
121
  messages: messagesPayload.messages.map((message) => {
122
122
  return {
123
123
  ...message,
124
- urlGroup: message.topicName
124
+ urlGroup: message.topicName,
125
+ ratePerSecond: "rate" in message ? message.rate : void 0
125
126
  };
126
127
  }),
127
128
  cursor: messagesPayload.cursor
@@ -238,6 +239,7 @@ var HttpClient = class {
238
239
  options;
239
240
  retry;
240
241
  headers;
242
+ telemetryHeaders;
241
243
  constructor(config) {
242
244
  this.baseUrl = config.baseUrl.replace(/\/$/, "");
243
245
  this.authorization = config.authorization;
@@ -250,6 +252,7 @@ var HttpClient = class {
250
252
  backoff: config.retry?.backoff ?? ((retryCount) => Math.exp(retryCount) * 50)
251
253
  };
252
254
  this.headers = config.headers;
255
+ this.telemetryHeaders = config.telemetryHeaders;
253
256
  }
254
257
  async request(request) {
255
258
  const { response } = await this.requestWithBackoff(request);
@@ -551,7 +554,8 @@ var Messages = class {
551
554
  });
552
555
  const message = {
553
556
  ...messagePayload,
554
- urlGroup: messagePayload.topicName
557
+ urlGroup: messagePayload.topicName,
558
+ ratePerSecond: "rate" in messagePayload ? messagePayload.rate : void 0
555
559
  };
556
560
  return message;
557
561
  }
@@ -747,7 +751,7 @@ function prefixHeaders(headers) {
747
751
  }
748
752
  return headers;
749
753
  }
750
- function wrapWithGlobalHeaders(headers, globalHeaders) {
754
+ function wrapWithGlobalHeaders(headers, globalHeaders, telemetryHeaders) {
751
755
  if (!globalHeaders) {
752
756
  return headers;
753
757
  }
@@ -755,6 +759,11 @@ function wrapWithGlobalHeaders(headers, globalHeaders) {
755
759
  headers.forEach((value, key) => {
756
760
  finalHeaders.set(key, value);
757
761
  });
762
+ telemetryHeaders?.forEach((value, key) => {
763
+ if (!value)
764
+ return;
765
+ finalHeaders.append(key, value);
766
+ });
758
767
  return finalHeaders;
759
768
  }
760
769
  function processHeaders(request) {
@@ -792,6 +801,19 @@ function processHeaders(request) {
792
801
  headers.set("Upstash-Timeout", `${request.timeout}s`);
793
802
  }
794
803
  }
804
+ if (request.flowControl?.key) {
805
+ const parallelism = request.flowControl.parallelism?.toString();
806
+ const rate = request.flowControl.ratePerSecond?.toString();
807
+ const controlValue = [
808
+ parallelism ? `parallelism=${parallelism}` : void 0,
809
+ rate ? `rate=${rate}` : void 0
810
+ ].filter(Boolean);
811
+ if (controlValue.length === 0) {
812
+ throw new QstashError("Provide at least one of parallelism or ratePerSecond for flowControl");
813
+ }
814
+ headers.set("Upstash-Flow-Control-Key", request.flowControl.key);
815
+ headers.set("Upstash-Flow-Control-Value", controlValue.join(", "));
816
+ }
795
817
  return headers;
796
818
  }
797
819
  function getRequestPath(request) {
@@ -831,6 +853,15 @@ function decodeBase64(base64) {
831
853
  }
832
854
  }
833
855
  }
856
+ function getRuntime() {
857
+ if (typeof process === "object" && typeof process.versions == "object" && process.versions.bun)
858
+ return `bun@${process.versions.bun}`;
859
+ if (typeof EdgeRuntime === "string")
860
+ return "edge-light";
861
+ else if (typeof process === "object" && typeof process.version === "string")
862
+ return `node@${process.version}`;
863
+ return "";
864
+ }
834
865
 
835
866
  // src/client/queue.ts
836
867
  var Queue = class {
@@ -905,7 +936,8 @@ var Queue = class {
905
936
  }
906
937
  const headers = wrapWithGlobalHeaders(
907
938
  processHeaders(request),
908
- this.http.headers
939
+ this.http.headers,
940
+ this.http.telemetryHeaders
909
941
  );
910
942
  const destination = getRequestPath(request);
911
943
  const response = await this.http.request({
@@ -1008,9 +1040,24 @@ var Schedules = class {
1008
1040
  if (request.queueName !== void 0) {
1009
1041
  headers.set("Upstash-Queue-Name", request.queueName);
1010
1042
  }
1043
+ if (request.flowControl?.key) {
1044
+ const parallelism = request.flowControl.parallelism?.toString();
1045
+ const rate = request.flowControl.ratePerSecond?.toString();
1046
+ const controlValue = [
1047
+ parallelism ? `parallelism=${parallelism}` : void 0,
1048
+ rate ? `rate=${rate}` : void 0
1049
+ ].filter(Boolean);
1050
+ if (controlValue.length === 0) {
1051
+ throw new QstashError(
1052
+ "Provide at least one of parallelism or ratePerSecond for flowControl"
1053
+ );
1054
+ }
1055
+ headers.set("Upstash-Flow-Control-Key", request.flowControl.key);
1056
+ headers.set("Upstash-Flow-Control-Value", controlValue.join(", "));
1057
+ }
1011
1058
  return await this.http.request({
1012
1059
  method: "POST",
1013
- headers: wrapWithGlobalHeaders(headers, this.http.headers),
1060
+ headers: wrapWithGlobalHeaders(headers, this.http.headers, this.http.telemetryHeaders),
1014
1061
  path: ["v2", "schedules", request.destination],
1015
1062
  body: request.body
1016
1063
  });
@@ -1019,19 +1066,27 @@ var Schedules = class {
1019
1066
  * Get a schedule
1020
1067
  */
1021
1068
  async get(scheduleId) {
1022
- return await this.http.request({
1069
+ const schedule = await this.http.request({
1023
1070
  method: "GET",
1024
1071
  path: ["v2", "schedules", scheduleId]
1025
1072
  });
1073
+ if ("rate" in schedule)
1074
+ schedule.ratePerSecond = schedule.rate;
1075
+ return schedule;
1026
1076
  }
1027
1077
  /**
1028
1078
  * List your schedules
1029
1079
  */
1030
1080
  async list() {
1031
- return await this.http.request({
1081
+ const schedules = await this.http.request({
1032
1082
  method: "GET",
1033
1083
  path: ["v2", "schedules"]
1034
1084
  });
1085
+ for (const schedule of schedules) {
1086
+ if ("rate" in schedule)
1087
+ schedule.ratePerSecond = schedule.rate;
1088
+ }
1089
+ return schedules;
1035
1090
  }
1036
1091
  /**
1037
1092
  * Delete a schedule
@@ -1128,20 +1183,37 @@ var UrlGroups = class {
1128
1183
  }
1129
1184
  };
1130
1185
 
1186
+ // version.ts
1187
+ var VERSION = "v2.7.23";
1188
+
1131
1189
  // src/client/client.ts
1132
1190
  var Client = class {
1133
1191
  http;
1134
1192
  token;
1135
1193
  constructor(config) {
1136
1194
  const environment = typeof process === "undefined" ? {} : process.env;
1137
- const baseUrl = config?.baseUrl ? config.baseUrl.replace(/\/$/, "") : environment.QSTASH_URL ?? "https://qstash.upstash.io";
1195
+ let baseUrl = (config?.baseUrl ?? environment.QSTASH_URL ?? "https://qstash.upstash.io").replace(/\/$/, "");
1196
+ if (baseUrl === "https://qstash.upstash.io/v2/publish") {
1197
+ baseUrl = "https://qstash.upstash.io";
1198
+ }
1138
1199
  const token = config?.token ?? environment.QSTASH_TOKEN;
1200
+ const enableTelemetry = environment.UPSTASH_DISABLE_TELEMETRY ? false : config?.enableTelemetry ?? true;
1201
+ const isCloudflare = typeof caches !== "undefined" && "default" in caches;
1202
+ const telemetryHeaders = new Headers(
1203
+ enableTelemetry ? {
1204
+ "Upstash-Telemetry-Sdk": `upstash-qstash-js@${VERSION}`,
1205
+ "Upstash-Telemetry-Platform": isCloudflare ? "cloudflare" : environment.VERCEL ? "vercel" : environment.AWS_REGION ? "aws" : "",
1206
+ "Upstash-Telemetry-Runtime": getRuntime()
1207
+ } : {}
1208
+ );
1139
1209
  this.http = new HttpClient({
1140
1210
  retry: config?.retry,
1141
1211
  baseUrl,
1142
1212
  authorization: `Bearer ${token}`,
1143
1213
  //@ts-expect-error caused by undici and bunjs type overlap
1144
- headers: prefixHeaders(new Headers(config?.headers ?? {}))
1214
+ headers: prefixHeaders(new Headers(config?.headers ?? {})),
1215
+ //@ts-expect-error caused by undici and bunjs type overlap
1216
+ telemetryHeaders
1145
1217
  });
1146
1218
  if (!token) {
1147
1219
  console.warn(
@@ -1223,7 +1295,8 @@ var Client = class {
1223
1295
  async publish(request) {
1224
1296
  const headers = wrapWithGlobalHeaders(
1225
1297
  processHeaders(request),
1226
- this.http.headers
1298
+ this.http.headers,
1299
+ this.http.telemetryHeaders
1227
1300
  );
1228
1301
  const response = await this.http.request({
1229
1302
  path: ["v2", "publish", getRequestPath(request)],
@@ -1254,7 +1327,11 @@ var Client = class {
1254
1327
  async batch(request) {
1255
1328
  const messages = [];
1256
1329
  for (const message of request) {
1257
- const headers = wrapWithGlobalHeaders(processHeaders(message), this.http.headers);
1330
+ const headers = wrapWithGlobalHeaders(
1331
+ processHeaders(message),
1332
+ this.http.headers,
1333
+ this.http.telemetryHeaders
1334
+ );
1258
1335
  const headerEntries = Object.fromEntries(headers.entries());
1259
1336
  messages.push({
1260
1337
  destination: getRequestPath(message),
@@ -1309,7 +1386,7 @@ var Client = class {
1309
1386
  * }
1310
1387
  * ```
1311
1388
  */
1312
- async events(request) {
1389
+ async logs(request) {
1313
1390
  const query = {};
1314
1391
  if (typeof request?.cursor === "number" && request.cursor > 0) {
1315
1392
  query.cursor = request.cursor.toString();
@@ -1331,16 +1408,42 @@ var Client = class {
1331
1408
  method: "GET",
1332
1409
  query
1333
1410
  });
1411
+ const logs = responsePayload.events.map((event) => {
1412
+ return {
1413
+ ...event,
1414
+ urlGroup: event.topicName
1415
+ };
1416
+ });
1334
1417
  return {
1335
1418
  cursor: responsePayload.cursor,
1336
- events: responsePayload.events.map((event) => {
1337
- return {
1338
- ...event,
1339
- urlGroup: event.topicName
1340
- };
1341
- })
1419
+ logs,
1420
+ events: logs
1342
1421
  };
1343
1422
  }
1423
+ /**
1424
+ * @deprecated Will be removed in the next major release. Use the `logs` method instead.
1425
+ *
1426
+ * Retrieve your logs.
1427
+ *
1428
+ * The logs endpoint is paginated and returns only 100 logs at a time.
1429
+ * If you want to receive more logs, you can use the cursor to paginate.
1430
+ *
1431
+ * The cursor is a unix timestamp with millisecond precision
1432
+ *
1433
+ * @example
1434
+ * ```ts
1435
+ * let cursor = Date.now()
1436
+ * const logs: Log[] = []
1437
+ * while (cursor > 0) {
1438
+ * const res = await qstash.logs({ cursor })
1439
+ * logs.push(...res.logs)
1440
+ * cursor = res.cursor ?? 0
1441
+ * }
1442
+ * ```
1443
+ */
1444
+ async events(request) {
1445
+ return await this.logs(request);
1446
+ }
1344
1447
  };
1345
1448
 
1346
1449
  // src/client/workflow/constants.ts
package/hono.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  serve
3
- } from "./chunk-QHCEWG63.mjs";
3
+ } from "./chunk-G7CVCBTL.mjs";
4
4
 
5
5
  // platforms/hono.ts
6
6
  var serve2 = (routeFunction, options) => {
package/index.d.mts CHANGED
@@ -1,5 +1,5 @@
1
- import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload, L as LLMOwner, B as BaseProvider, E as EmailOwner, P as ProviderInfo } from './client-DuOcoFUv.mjs';
2
- export { A as AddEndpointsRequest, y as BodyInit, I as Chat, K as ChatCompletion, N as ChatCompletionChunk, J as ChatCompletionMessage, _ as ChatRequest, h as Client, p as CreateScheduleRequest, r as Endpoint, v as Event, w as EventPayload, g as EventsRequest, x as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, z as HeadersInit, M as Message, m as MessagePayload, n as Messages, Y as OpenAIChatModel, Z as PromptChatRequest, d as PublishBatchRequest, f as PublishJsonRequest, e as PublishRequest, l as PublishResponse, i as PublishToApiResponse, k as PublishToUrlGroupsResponse, j as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, s as RemoveEndpointsRequest, D as RequestOptions, o as Schedule, q as Schedules, b as SignatureError, u as State, T as StreamDisabled, O as StreamEnabled, X as StreamParameter, U as UrlGroup, t as UrlGroups, V as VerifyRequest, W as WithCursor, a1 as anthropic, a2 as custom, a0 as openai, $ as upstash } from './client-DuOcoFUv.mjs';
1
+ import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload, L as LLMOwner, B as BaseProvider, E as EmailOwner, P as ProviderInfo } from './client-CYwLcEcQ.mjs';
2
+ export { A as AddEndpointsRequest, K as BodyInit, X as Chat, Z as ChatCompletion, _ as ChatCompletionChunk, Y as ChatCompletionMessage, a4 as ChatRequest, j as Client, r as CreateScheduleRequest, t as Endpoint, y as Event, D as EventPayload, h as EventsRequest, T as FlowControl, J as GetEventsPayload, i as GetEventsResponse, I as GetLogsPayload, G as GetLogsResponse, H as HTTPMethods, N as HeadersInit, x as Log, z as LogPayload, g as LogsRequest, M as Message, o as MessagePayload, p as Messages, a2 as OpenAIChatModel, a3 as PromptChatRequest, d as PublishBatchRequest, f as PublishJsonRequest, e as PublishRequest, n as PublishResponse, k as PublishToApiResponse, m as PublishToUrlGroupsResponse, l as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, u as RemoveEndpointsRequest, O as RequestOptions, q as Schedule, s as Schedules, b as SignatureError, w as State, a0 as StreamDisabled, $ as StreamEnabled, a1 as StreamParameter, U as UrlGroup, v as UrlGroups, V as VerifyRequest, W as WithCursor, a7 as anthropic, a8 as custom, a6 as openai, a5 as upstash } from './client-CYwLcEcQ.mjs';
3
3
  import 'neverthrow';
4
4
 
5
5
  /**
package/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload, L as LLMOwner, B as BaseProvider, E as EmailOwner, P as ProviderInfo } from './client-DuOcoFUv.js';
2
- export { A as AddEndpointsRequest, y as BodyInit, I as Chat, K as ChatCompletion, N as ChatCompletionChunk, J as ChatCompletionMessage, _ as ChatRequest, h as Client, p as CreateScheduleRequest, r as Endpoint, v as Event, w as EventPayload, g as EventsRequest, x as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, z as HeadersInit, M as Message, m as MessagePayload, n as Messages, Y as OpenAIChatModel, Z as PromptChatRequest, d as PublishBatchRequest, f as PublishJsonRequest, e as PublishRequest, l as PublishResponse, i as PublishToApiResponse, k as PublishToUrlGroupsResponse, j as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, s as RemoveEndpointsRequest, D as RequestOptions, o as Schedule, q as Schedules, b as SignatureError, u as State, T as StreamDisabled, O as StreamEnabled, X as StreamParameter, U as UrlGroup, t as UrlGroups, V as VerifyRequest, W as WithCursor, a1 as anthropic, a2 as custom, a0 as openai, $ as upstash } from './client-DuOcoFUv.js';
1
+ import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload, L as LLMOwner, B as BaseProvider, E as EmailOwner, P as ProviderInfo } from './client-CYwLcEcQ.js';
2
+ export { A as AddEndpointsRequest, K as BodyInit, X as Chat, Z as ChatCompletion, _ as ChatCompletionChunk, Y as ChatCompletionMessage, a4 as ChatRequest, j as Client, r as CreateScheduleRequest, t as Endpoint, y as Event, D as EventPayload, h as EventsRequest, T as FlowControl, J as GetEventsPayload, i as GetEventsResponse, I as GetLogsPayload, G as GetLogsResponse, H as HTTPMethods, N as HeadersInit, x as Log, z as LogPayload, g as LogsRequest, M as Message, o as MessagePayload, p as Messages, a2 as OpenAIChatModel, a3 as PromptChatRequest, d as PublishBatchRequest, f as PublishJsonRequest, e as PublishRequest, n as PublishResponse, k as PublishToApiResponse, m as PublishToUrlGroupsResponse, l as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, u as RemoveEndpointsRequest, O as RequestOptions, q as Schedule, s as Schedules, b as SignatureError, w as State, a0 as StreamDisabled, $ as StreamEnabled, a1 as StreamParameter, U as UrlGroup, v as UrlGroups, V as VerifyRequest, W as WithCursor, a7 as anthropic, a8 as custom, a6 as openai, a5 as upstash } from './client-CYwLcEcQ.js';
3
3
  import 'neverthrow';
4
4
 
5
5
  /**