hirefire-resource 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +2 -1
- package/package.json +2 -1
- package/src/macro/bullmq.js +9 -1
- package/src/middleware/next.js +47 -0
package/CHANGELOG.md
CHANGED
package/README.md
CHANGED
|
@@ -8,6 +8,7 @@ This library integrates Node.js applications with HireFire's Dyno Managers (Hero
|
|
|
8
8
|
- Koa
|
|
9
9
|
- Connect
|
|
10
10
|
- Fastify
|
|
11
|
+
- Next.js
|
|
11
12
|
- Sails
|
|
12
13
|
- Nest
|
|
13
14
|
|
|
@@ -17,7 +18,7 @@ This library integrates Node.js applications with HireFire's Dyno Managers (Hero
|
|
|
17
18
|
|
|
18
19
|
---
|
|
19
20
|
|
|
20
|
-
Since 2011, over 1,
|
|
21
|
+
Since 2011, over 1,500 companies have trusted [HireFire] to autoscale more than 5,000 applications hosted on [Heroku], managing over 10,000 web and worker dynos.
|
|
21
22
|
|
|
22
23
|
HireFire is distinguished by its support for both web and worker dynos, extending autoscaling capabilities to Standard-tier dynos. It provides fine-grained control over scaling behavior and improves scaling accuracy by monitoring more reliable metrics at the application level. These metrics include request queue time (web), job queue latency (worker), and job queue size (worker), which contribute to making more effective scaling decisions.
|
|
23
24
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "hirefire-resource",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.2.0",
|
|
4
4
|
"description": "HireFire integration library for Node.js applications",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"hirefire",
|
|
@@ -25,6 +25,7 @@
|
|
|
25
25
|
"./middleware/express": "./src/middleware/express.js",
|
|
26
26
|
"./middleware/koa": "./src/middleware/koa.js",
|
|
27
27
|
"./middleware/fastify": "./src/middleware/fastify.js",
|
|
28
|
+
"./middleware/next": "./src/middleware/next.js",
|
|
28
29
|
"./macro/bullmq": "./src/macro/bullmq.js"
|
|
29
30
|
},
|
|
30
31
|
"repository": {
|
package/src/macro/bullmq.js
CHANGED
|
@@ -25,7 +25,11 @@ async function jobQueueLatency(...args) {
|
|
|
25
25
|
* function will use the value of the `REDIS_TLS_URL`, `REDIS_URL`,
|
|
26
26
|
* `REDISTOGO_URL`, `REDISCLOUD_URL`, `OPENREDIS_URL` environment
|
|
27
27
|
* variables, in the order specified. If none of these environment
|
|
28
|
-
* variables are set, it defaults to `redis://localhost:6379/0`.
|
|
28
|
+
* variables are set, it defaults to `redis://localhost:6379/0`. The
|
|
29
|
+
* `options` object can also include a `connectionOptions` property,
|
|
30
|
+
* which is passed as the second argument to the `IORedis` constructor,
|
|
31
|
+
* allowing for further customization of the Redis connection (e.g., TLS
|
|
32
|
+
* options, retry strategies).
|
|
29
33
|
* @returns {Promise<number>} Cumulative job queue size across the specified queues.
|
|
30
34
|
* @example
|
|
31
35
|
* // Calculate size across all queues
|
|
@@ -39,6 +43,9 @@ async function jobQueueLatency(...args) {
|
|
|
39
43
|
* @example
|
|
40
44
|
* // Calculate Size using the options.connection property
|
|
41
45
|
* await jobQueueSize("default", { connection: "redis://localhost:6379/0" })
|
|
46
|
+
* @example
|
|
47
|
+
* // Calculate Size using the options.connectionOptions property
|
|
48
|
+
* await jobQueueSize("default", { connectionOptions: { tls: { rejectUnauthorized: false } } })
|
|
42
49
|
*/
|
|
43
50
|
async function jobQueueSize(...args) {
|
|
44
51
|
let { queues, options } = unpack(args)
|
|
@@ -51,6 +58,7 @@ async function jobQueueSize(...args) {
|
|
|
51
58
|
process.env.REDISCLOUD_URL ||
|
|
52
59
|
process.env.OPENREDIS_URL ||
|
|
53
60
|
"redis://localhost:6379/0",
|
|
61
|
+
options.connectionOptions,
|
|
54
62
|
)
|
|
55
63
|
|
|
56
64
|
if (queues.length === 0) {
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
const { RequestInfo, request } = require("../middleware")
|
|
2
|
+
|
|
3
|
+
async function middleware(nextRequest) {
|
|
4
|
+
const { NextResponse } = require("next/server")
|
|
5
|
+
|
|
6
|
+
const response = await request(
|
|
7
|
+
new RequestInfo(
|
|
8
|
+
nextRequest.nextUrl.pathname,
|
|
9
|
+
nextRequest.headers.get("X-Request-Start"),
|
|
10
|
+
nextRequest.headers.get("HireFire-Token"),
|
|
11
|
+
),
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
if (response) {
|
|
15
|
+
return NextResponse.json(response.body, {
|
|
16
|
+
status: response.status,
|
|
17
|
+
headers: response.headers,
|
|
18
|
+
})
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
return NextResponse.next()
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function withHireFire(userMiddleware) {
|
|
25
|
+
return async function wrappedMiddleware(nextRequest, event) {
|
|
26
|
+
const { NextResponse } = require("next/server")
|
|
27
|
+
|
|
28
|
+
const response = await request(
|
|
29
|
+
new RequestInfo(
|
|
30
|
+
nextRequest.nextUrl.pathname,
|
|
31
|
+
nextRequest.headers.get("X-Request-Start"),
|
|
32
|
+
nextRequest.headers.get("HireFire-Token"),
|
|
33
|
+
),
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
if (response) {
|
|
37
|
+
return NextResponse.json(response.body, {
|
|
38
|
+
status: response.status,
|
|
39
|
+
headers: response.headers,
|
|
40
|
+
})
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return userMiddleware(nextRequest, event)
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
module.exports = { middleware, withHireFire }
|