@pgpm/database-jobs 0.15.3 → 0.15.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Makefile
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pgpm/database-jobs",
|
|
3
|
-
"version": "0.15.
|
|
3
|
+
"version": "0.15.4",
|
|
4
4
|
"description": "Database-specific job handling and queue management",
|
|
5
5
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
6
6
|
"contributors": [
|
|
@@ -21,10 +21,10 @@
|
|
|
21
21
|
"test:watch": "jest --watch"
|
|
22
22
|
},
|
|
23
23
|
"devDependencies": {
|
|
24
|
-
"pgpm": "^1.
|
|
24
|
+
"pgpm": "^1.2.2"
|
|
25
25
|
},
|
|
26
26
|
"dependencies": {
|
|
27
|
-
"@pgpm/verify": "0.15.
|
|
27
|
+
"@pgpm/verify": "0.15.4"
|
|
28
28
|
},
|
|
29
29
|
"repository": {
|
|
30
30
|
"type": "git",
|
|
@@ -34,5 +34,5 @@
|
|
|
34
34
|
"bugs": {
|
|
35
35
|
"url": "https://github.com/constructive-io/pgpm-modules/issues"
|
|
36
36
|
},
|
|
37
|
-
"gitHead": "
|
|
37
|
+
"gitHead": "aad0dbef0336d6c18d027120ef9addc418822edd"
|
|
38
38
|
}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
|
2
|
-
|
|
3
|
-
exports[`scheduled jobs schedule jobs 1`] = `
|
|
4
|
-
{
|
|
5
|
-
"attempts": 0,
|
|
6
|
-
"database_id": "5b720132-17d5-424d-9bcb-ee7b17c13d43",
|
|
7
|
-
"id": "1",
|
|
8
|
-
"key": null,
|
|
9
|
-
"last_error": null,
|
|
10
|
-
"locked_at": null,
|
|
11
|
-
"locked_by": null,
|
|
12
|
-
"max_attempts": 25,
|
|
13
|
-
"payload": {
|
|
14
|
-
"just": "run it",
|
|
15
|
-
},
|
|
16
|
-
"priority": 0,
|
|
17
|
-
"task_identifier": "my_job",
|
|
18
|
-
}
|
|
19
|
-
`;
|
package/__tests__/jobs.test.ts
DELETED
|
@@ -1,138 +0,0 @@
|
|
|
1
|
-
import { getConnections, PgTestClient } from 'pgsql-test';
|
|
2
|
-
|
|
3
|
-
let pg: PgTestClient;
|
|
4
|
-
let teardown: () => Promise<void>;
|
|
5
|
-
|
|
6
|
-
const database_id = '5b720132-17d5-424d-9bcb-ee7b17c13d43';
|
|
7
|
-
const objs: Record<string, any> = {};
|
|
8
|
-
|
|
9
|
-
describe('scheduled jobs', () => {
|
|
10
|
-
beforeAll(async () => {
|
|
11
|
-
({ pg, teardown } = await getConnections());
|
|
12
|
-
});
|
|
13
|
-
|
|
14
|
-
afterAll(async () => {
|
|
15
|
-
await teardown();
|
|
16
|
-
});
|
|
17
|
-
|
|
18
|
-
it('schedule jobs by cron', async () => {
|
|
19
|
-
const result = await pg.one(
|
|
20
|
-
`INSERT INTO app_jobs.scheduled_jobs (database_id, task_identifier, schedule_info)
|
|
21
|
-
VALUES ($1, $2, $3)
|
|
22
|
-
RETURNING *`,
|
|
23
|
-
[
|
|
24
|
-
database_id,
|
|
25
|
-
'my_job',
|
|
26
|
-
{
|
|
27
|
-
hour: Array.from({ length: 23 }, (_, i) => i),
|
|
28
|
-
minute: [0, 15, 30, 45],
|
|
29
|
-
dayOfWeek: Array.from({ length: 6 }, (_, i) => i)
|
|
30
|
-
}
|
|
31
|
-
]
|
|
32
|
-
);
|
|
33
|
-
objs.scheduled1 = result;
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
it('schedule jobs by rule', async () => {
|
|
37
|
-
const start = new Date(Date.now() + 10000); // 10s from now
|
|
38
|
-
const end = new Date(start.getTime() + 180000); // +3min
|
|
39
|
-
|
|
40
|
-
const result = await pg.one(
|
|
41
|
-
`INSERT INTO app_jobs.scheduled_jobs (database_id, task_identifier, payload, schedule_info)
|
|
42
|
-
VALUES ($1, $2, $3, $4)
|
|
43
|
-
RETURNING *`,
|
|
44
|
-
[
|
|
45
|
-
database_id,
|
|
46
|
-
'my_job',
|
|
47
|
-
{ just: 'run it' },
|
|
48
|
-
{ start, end, rule: '*/1 * * * *' }
|
|
49
|
-
]
|
|
50
|
-
);
|
|
51
|
-
objs.scheduled2 = result;
|
|
52
|
-
});
|
|
53
|
-
|
|
54
|
-
it('schedule jobs', async () => {
|
|
55
|
-
const [result] = await pg.any(
|
|
56
|
-
`SELECT * FROM app_jobs.run_scheduled_job($1)`,
|
|
57
|
-
[objs.scheduled2.id]
|
|
58
|
-
);
|
|
59
|
-
|
|
60
|
-
const { queue_name, run_at, created_at, updated_at, ...obj } = result;
|
|
61
|
-
expect(obj).toMatchSnapshot();
|
|
62
|
-
});
|
|
63
|
-
|
|
64
|
-
it('schedule jobs with keys', async () => {
|
|
65
|
-
const start = new Date(Date.now() + 10000); // 10s
|
|
66
|
-
const end = new Date(start.getTime() + 180000); // +3min
|
|
67
|
-
|
|
68
|
-
const [result] = await pg.any(
|
|
69
|
-
`SELECT * FROM app_jobs.add_scheduled_job(
|
|
70
|
-
db_id := $1::uuid,
|
|
71
|
-
identifier := $2::text,
|
|
72
|
-
payload := $3::json,
|
|
73
|
-
schedule_info := $4::json,
|
|
74
|
-
job_key := $5::text,
|
|
75
|
-
queue_name := $6::text,
|
|
76
|
-
max_attempts := $7::integer,
|
|
77
|
-
priority := $8::integer
|
|
78
|
-
)`,
|
|
79
|
-
[
|
|
80
|
-
database_id,
|
|
81
|
-
'my_job',
|
|
82
|
-
{ just: 'run it' },
|
|
83
|
-
{ start, end, rule: '*/1 * * * *' },
|
|
84
|
-
'new_key',
|
|
85
|
-
null,
|
|
86
|
-
25,
|
|
87
|
-
0
|
|
88
|
-
]
|
|
89
|
-
);
|
|
90
|
-
|
|
91
|
-
const {
|
|
92
|
-
queue_name,
|
|
93
|
-
run_at,
|
|
94
|
-
created_at,
|
|
95
|
-
updated_at,
|
|
96
|
-
schedule_info: sch,
|
|
97
|
-
start: s1,
|
|
98
|
-
end: d1,
|
|
99
|
-
...obj
|
|
100
|
-
} = result;
|
|
101
|
-
|
|
102
|
-
const [result2] = await pg.any(
|
|
103
|
-
`SELECT * FROM app_jobs.add_scheduled_job(
|
|
104
|
-
db_id := $1,
|
|
105
|
-
identifier := $2,
|
|
106
|
-
payload := $3,
|
|
107
|
-
schedule_info := $4,
|
|
108
|
-
job_key := $5,
|
|
109
|
-
queue_name := $6,
|
|
110
|
-
max_attempts := $7,
|
|
111
|
-
priority := $8
|
|
112
|
-
)`,
|
|
113
|
-
[
|
|
114
|
-
database_id,
|
|
115
|
-
'my_job',
|
|
116
|
-
{ just: 'run it' },
|
|
117
|
-
{ start, end, rule: '*/1 * * * *' },
|
|
118
|
-
'new_key',
|
|
119
|
-
null,
|
|
120
|
-
25,
|
|
121
|
-
0
|
|
122
|
-
]
|
|
123
|
-
);
|
|
124
|
-
|
|
125
|
-
const {
|
|
126
|
-
queue_name: qn,
|
|
127
|
-
created_at: ca,
|
|
128
|
-
updated_at: ua,
|
|
129
|
-
schedule_info: sch2,
|
|
130
|
-
start: s,
|
|
131
|
-
end: e,
|
|
132
|
-
...obj2
|
|
133
|
-
} = result2;
|
|
134
|
-
|
|
135
|
-
console.log('First insert:', obj);
|
|
136
|
-
console.log('Duplicate insert (job_key conflict):', obj2);
|
|
137
|
-
});
|
|
138
|
-
});
|
|
File without changes
|