@balena/pinejs 17.2.0-build-add-large-file-uploads-interfaces-79c46b87ed3b7ae630ed6e3f9aad554c6647e8f7-1 → 17.2.0-build-joshbwlng-tasks-2f4d8f53b469a2eb62ffb71cb3f9e82258e9430c-1
Sign up to get free protection for your applications and to get access to all the features.
- package/.pinejs-cache.json +1 -1
- package/.versionbot/CHANGELOG.yml +72 -4
- package/CHANGELOG.md +33 -2
- package/out/bin/utils.js +2 -2
- package/out/config-loader/config-loader.js +1 -1
- package/out/config-loader/env.d.ts +4 -1
- package/out/config-loader/env.js +5 -1
- package/out/config-loader/env.js.map +1 -1
- package/out/data-server/sbvr-server.d.ts +1 -1
- package/out/data-server/sbvr-server.js.map +1 -1
- package/out/database-layer/db.d.ts +3 -1
- package/out/database-layer/db.js +19 -2
- package/out/database-layer/db.js.map +1 -1
- package/out/extended-sbvr-parser/extended-sbvr-parser.d.ts +2 -2
- package/out/http-transactions/transactions.d.ts +1 -1
- package/out/http-transactions/transactions.js.map +1 -1
- package/out/migrator/utils.js +4 -4
- package/out/migrator/utils.js.map +1 -1
- package/out/passport-pinejs/passport-pinejs.d.ts +0 -1
- package/out/pinejs-session-store/pinejs-session-store.d.ts +6 -8
- package/out/sbvr-api/abstract-sql.d.ts +2 -2
- package/out/sbvr-api/errors.d.ts +2 -2
- package/out/sbvr-api/hooks.d.ts +5 -43
- package/out/sbvr-api/odata-response.d.ts +1 -1
- package/out/sbvr-api/odata-response.js +2 -2
- package/out/sbvr-api/odata-response.js.map +1 -1
- package/out/sbvr-api/permissions.d.ts +2 -2
- package/out/sbvr-api/permissions.js +2 -2
- package/out/sbvr-api/permissions.js.map +1 -1
- package/out/sbvr-api/sbvr-utils.d.ts +10 -11
- package/out/sbvr-api/sbvr-utils.js +4 -2
- package/out/sbvr-api/sbvr-utils.js.map +1 -1
- package/out/sbvr-api/uri-parser.d.ts +3 -6
- package/out/sbvr-api/uri-parser.js +2 -2
- package/out/sbvr-api/uri-parser.js.map +1 -1
- package/out/server-glue/module.d.ts +1 -0
- package/out/server-glue/module.js +4 -1
- package/out/server-glue/module.js.map +1 -1
- package/out/tasks/common.d.ts +4 -0
- package/out/tasks/common.js +11 -0
- package/out/tasks/common.js.map +1 -0
- package/out/tasks/index.d.ts +7 -0
- package/out/tasks/index.js +160 -0
- package/out/tasks/index.js.map +1 -0
- package/out/tasks/out.d.ts +40 -0
- package/out/tasks/out.js +3 -0
- package/out/tasks/out.js.map +1 -0
- package/out/tasks/tasks.sbvr +55 -0
- package/out/tasks/types.d.ts +40 -0
- package/out/tasks/types.js +3 -0
- package/out/tasks/types.js.map +1 -0
- package/out/tasks/worker.d.ts +32 -0
- package/out/tasks/worker.js +204 -0
- package/out/tasks/worker.js.map +1 -0
- package/out/webresource-handler/index.d.ts +0 -26
- package/out/webresource-handler/index.js +2 -3
- package/out/webresource-handler/index.js.map +1 -1
- package/package.json +10 -7
- package/src/bin/utils.ts +2 -2
- package/src/config-loader/config-loader.ts +1 -1
- package/src/config-loader/env.ts +6 -1
- package/src/database-layer/db.ts +25 -0
- package/src/sbvr-api/sbvr-utils.ts +2 -1
- package/src/server-glue/module.ts +3 -0
- package/src/tasks/common.ts +9 -0
- package/src/tasks/index.ts +176 -0
- package/src/tasks/out.ts +46 -0
- package/src/tasks/tasks.sbvr +55 -0
- package/src/tasks/types.ts +44 -0
- package/src/tasks/worker.ts +282 -0
- package/src/webresource-handler/index.ts +2 -38
@@ -6,6 +6,7 @@ import * as dbModule from '../database-layer/db';
|
|
6
6
|
import * as configLoader from '../config-loader/config-loader';
|
7
7
|
import * as migrator from '../migrator/sync';
|
8
8
|
import type * as migratorUtils from '../migrator/utils';
|
9
|
+
import * as tasks from '../tasks';
|
9
10
|
|
10
11
|
import * as sbvrUtils from '../sbvr-api/sbvr-utils';
|
11
12
|
import { PINEJS_ADVISORY_LOCK } from '../config-loader/env';
|
@@ -19,6 +20,7 @@ export * as errors from '../sbvr-api/errors';
|
|
19
20
|
export * as env from '../config-loader/env';
|
20
21
|
export * as types from '../sbvr-api/common-types';
|
21
22
|
export * as hooks from '../sbvr-api/hooks';
|
23
|
+
export * as tasks from '../tasks';
|
22
24
|
export * as webResourceHandler from '../webresource-handler';
|
23
25
|
export type { configLoader as ConfigLoader };
|
24
26
|
export type { migratorUtils as Migrator };
|
@@ -63,6 +65,7 @@ export const init = async <T extends string>(
|
|
63
65
|
await sbvrUtils.setup(app, db);
|
64
66
|
const cfgLoader = await configLoader.setup(app);
|
65
67
|
await cfgLoader.loadConfig(migrator.config);
|
68
|
+
await cfgLoader.loadConfig(tasks.config);
|
66
69
|
|
67
70
|
const promises: Array<Promise<void>> = [];
|
68
71
|
if (process.env.SBVR_SERVER_ENABLED) {
|
@@ -0,0 +1,176 @@
|
|
1
|
+
import type { Schema } from 'ajv';
|
2
|
+
import * as cronParser from 'cron-parser';
|
3
|
+
import { tasks as tasksEnv } from '../config-loader/env';
|
4
|
+
import { BadRequestError } from '../sbvr-api/errors';
|
5
|
+
import { addPureHook } from '../sbvr-api/hooks';
|
6
|
+
import * as sbvrUtils from '../sbvr-api/sbvr-utils';
|
7
|
+
import type { ConfigLoader } from '../server-glue/module';
|
8
|
+
import { ajv, apiRoot, channel } from './common';
|
9
|
+
import type { TaskHandler } from './worker';
|
10
|
+
import { Worker } from './worker';
|
11
|
+
|
12
|
+
export * from './types';
|
13
|
+
|
14
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
15
|
+
const modelText: string = require('./tasks.sbvr');
|
16
|
+
|
17
|
+
// Create trigger for handling new tasks
|
18
|
+
// Create index for polling tasks table
|
19
|
+
const initSql = `
|
20
|
+
CREATE OR REPLACE FUNCTION notify_task_insert()
|
21
|
+
RETURNS TRIGGER AS $$
|
22
|
+
BEGIN
|
23
|
+
PERFORM pg_notify('${channel}', NEW.id::text);
|
24
|
+
RETURN NEW;
|
25
|
+
END;
|
26
|
+
$$ LANGUAGE plpgsql;
|
27
|
+
|
28
|
+
CREATE OR REPLACE TRIGGER task_insert_trigger
|
29
|
+
AFTER INSERT ON task
|
30
|
+
FOR EACH ROW WHEN (NEW.status = 'queued' AND NEW."is scheduled to execute on-time" IS NULL)
|
31
|
+
EXECUTE FUNCTION notify_task_insert();
|
32
|
+
|
33
|
+
CREATE INDEX IF NOT EXISTS idx_task_poll ON task USING btree (
|
34
|
+
"is executed by-handler",
|
35
|
+
"is scheduled to execute on-time" ASC,
|
36
|
+
"id" ASC
|
37
|
+
) WHERE status = 'queued';
|
38
|
+
`;
|
39
|
+
|
40
|
+
export const config: ConfigLoader.Config = {
|
41
|
+
models: [
|
42
|
+
{
|
43
|
+
modelName: apiRoot,
|
44
|
+
apiRoot,
|
45
|
+
modelText,
|
46
|
+
customServerCode: exports,
|
47
|
+
initSql,
|
48
|
+
},
|
49
|
+
],
|
50
|
+
};
|
51
|
+
|
52
|
+
let worker: Worker | null = null;
|
53
|
+
export async function setup(): Promise<void> {
|
54
|
+
// Async task functionality is only supported on Postgres
|
55
|
+
if (sbvrUtils.db.engine !== 'postgres') {
|
56
|
+
console.warn('Skipping task setup as database not supported');
|
57
|
+
return;
|
58
|
+
}
|
59
|
+
|
60
|
+
const client = sbvrUtils.api[apiRoot];
|
61
|
+
worker = new Worker(client);
|
62
|
+
|
63
|
+
// Add resource hooks
|
64
|
+
addPureHook('POST', apiRoot, 'task', {
|
65
|
+
POSTPARSE: async ({ req, request }) => {
|
66
|
+
// Set the actor
|
67
|
+
request.values.is_created_by__actor =
|
68
|
+
req.user?.actor ?? req.apiKey?.actor;
|
69
|
+
if (request.values.is_created_by__actor == null) {
|
70
|
+
throw new BadRequestError(
|
71
|
+
'Creating tasks with missing actor on req is not allowed',
|
72
|
+
);
|
73
|
+
}
|
74
|
+
|
75
|
+
// Set defaults
|
76
|
+
request.values.status = 'queued';
|
77
|
+
request.values.attempt_count = 0;
|
78
|
+
request.values.attempt_limit ??= 1;
|
79
|
+
|
80
|
+
// Set scheduled start time using cron expression if provided
|
81
|
+
if (
|
82
|
+
request.values.is_scheduled_with__cron_expression != null &&
|
83
|
+
request.values.is_scheduled_to_execute_on__time == null
|
84
|
+
) {
|
85
|
+
try {
|
86
|
+
request.values.is_scheduled_to_execute_on__time = cronParser
|
87
|
+
.parseExpression(request.values.is_scheduled_with__cron_expression)
|
88
|
+
.next()
|
89
|
+
.toDate()
|
90
|
+
.toISOString();
|
91
|
+
} catch {
|
92
|
+
throw new BadRequestError(
|
93
|
+
`Invalid cron expression: ${request.values.is_scheduled_with__cron_expression}`,
|
94
|
+
);
|
95
|
+
}
|
96
|
+
}
|
97
|
+
|
98
|
+
// Assert that the provided start time is far enough in the future
|
99
|
+
if (request.values.is_scheduled_to_execute_on__time != null) {
|
100
|
+
const now = new Date(Date.now() + tasksEnv.queueIntervalMS);
|
101
|
+
const startTime = new Date(
|
102
|
+
request.values.is_scheduled_to_execute_on__time,
|
103
|
+
);
|
104
|
+
if (startTime < now) {
|
105
|
+
throw new BadRequestError(
|
106
|
+
`Task scheduled start time must be greater than ${tasksEnv.queueIntervalMS} milliseconds in the future`,
|
107
|
+
);
|
108
|
+
}
|
109
|
+
}
|
110
|
+
|
111
|
+
// Assert that the requested handler exists
|
112
|
+
const handlerName = request.values.is_executed_by__handler;
|
113
|
+
if (handlerName == null) {
|
114
|
+
throw new BadRequestError(`Must specify a task handler to execute`);
|
115
|
+
}
|
116
|
+
const handler = worker?.handlers[handlerName];
|
117
|
+
if (handler == null) {
|
118
|
+
throw new BadRequestError(
|
119
|
+
`No task handler with name '${handlerName}' registered`,
|
120
|
+
);
|
121
|
+
}
|
122
|
+
|
123
|
+
// Assert that the provided parameter set is valid
|
124
|
+
if (handler.validate != null) {
|
125
|
+
if (!handler.validate(request.values.is_executed_with__parameter_set)) {
|
126
|
+
throw new BadRequestError(
|
127
|
+
`Invalid parameter set: ${ajv.errorsText(handler.validate.errors)}`,
|
128
|
+
);
|
129
|
+
}
|
130
|
+
}
|
131
|
+
},
|
132
|
+
});
|
133
|
+
addPureHook('all', apiRoot, 'task', {
|
134
|
+
// Convert bigints to strings in responses
|
135
|
+
PRERESPOND: async ({ response }) => {
|
136
|
+
if (typeof response.body === 'object') {
|
137
|
+
convertBigIntsToStrings(response.body);
|
138
|
+
}
|
139
|
+
},
|
140
|
+
});
|
141
|
+
worker.start();
|
142
|
+
}
|
143
|
+
|
144
|
+
// Recursively stringify bigints in an object
|
145
|
+
function convertBigIntsToStrings(obj: any): void {
|
146
|
+
for (const [key, value] of Object.entries(obj)) {
|
147
|
+
if (value != null) {
|
148
|
+
const typeOfValue = typeof value;
|
149
|
+
if (typeOfValue === 'bigint') {
|
150
|
+
obj[key] = value.toString();
|
151
|
+
} else if (typeOfValue === 'object') {
|
152
|
+
convertBigIntsToStrings(value);
|
153
|
+
}
|
154
|
+
}
|
155
|
+
}
|
156
|
+
}
|
157
|
+
|
158
|
+
// Register a task handler
|
159
|
+
export function addTaskHandler(
|
160
|
+
name: string,
|
161
|
+
fn: TaskHandler['fn'],
|
162
|
+
schema?: Schema,
|
163
|
+
): void {
|
164
|
+
if (worker == null) {
|
165
|
+
throw new Error('Database does not support tasks');
|
166
|
+
}
|
167
|
+
|
168
|
+
if (worker.handlers[name] != null) {
|
169
|
+
throw new Error(`Task handler with name '${name}' already registered`);
|
170
|
+
}
|
171
|
+
worker.handlers[name] = {
|
172
|
+
name,
|
173
|
+
fn,
|
174
|
+
validate: schema != null ? ajv.compile(schema) : undefined,
|
175
|
+
};
|
176
|
+
}
|
package/src/tasks/out.ts
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
// These types were generated by @balena/abstract-sql-to-typescript v3.2.1
|
2
|
+
|
3
|
+
import type { Types } from '@balena/abstract-sql-to-typescript';
|
4
|
+
|
5
|
+
export interface Task {
|
6
|
+
Read: {
|
7
|
+
created_at: Types['Date Time']['Read'];
|
8
|
+
modified_at: Types['Date Time']['Read'];
|
9
|
+
id: Types['Big Serial']['Read'];
|
10
|
+
key: Types['Short Text']['Read'] | null;
|
11
|
+
is_created_by__actor: Types['Integer']['Read'];
|
12
|
+
is_executed_by__handler: Types['Short Text']['Read'];
|
13
|
+
is_executed_with__parameter_set: Types['JSON']['Read'] | null;
|
14
|
+
is_scheduled_with__cron_expression: Types['Short Text']['Read'] | null;
|
15
|
+
is_scheduled_to_execute_on__time: Types['Date Time']['Read'] | null;
|
16
|
+
status: 'queued' | 'cancelled' | 'succeeded' | 'failed';
|
17
|
+
started_on__time: Types['Date Time']['Read'] | null;
|
18
|
+
ended_on__time: Types['Date Time']['Read'] | null;
|
19
|
+
error_message: Types['Short Text']['Read'] | null;
|
20
|
+
attempt_count: Types['Integer']['Read'];
|
21
|
+
attempt_limit: Types['Integer']['Read'];
|
22
|
+
};
|
23
|
+
Write: {
|
24
|
+
created_at: Types['Date Time']['Write'];
|
25
|
+
modified_at: Types['Date Time']['Write'];
|
26
|
+
id: Types['Big Serial']['Write'];
|
27
|
+
key: Types['Short Text']['Write'] | null;
|
28
|
+
is_created_by__actor: Types['Integer']['Write'];
|
29
|
+
is_executed_by__handler: Types['Short Text']['Write'];
|
30
|
+
is_executed_with__parameter_set: Types['JSON']['Write'] | null;
|
31
|
+
is_scheduled_with__cron_expression: Types['Short Text']['Write'] | null;
|
32
|
+
is_scheduled_to_execute_on__time: Types['Date Time']['Write'] | null;
|
33
|
+
status: 'queued' | 'cancelled' | 'succeeded' | 'failed';
|
34
|
+
started_on__time: Types['Date Time']['Write'] | null;
|
35
|
+
ended_on__time: Types['Date Time']['Write'] | null;
|
36
|
+
error_message: Types['Short Text']['Write'] | null;
|
37
|
+
attempt_count: Types['Integer']['Write'];
|
38
|
+
attempt_limit: Types['Integer']['Write'];
|
39
|
+
};
|
40
|
+
}
|
41
|
+
|
42
|
+
export default interface $Model {
|
43
|
+
task: Task;
|
44
|
+
|
45
|
+
|
46
|
+
}
|
@@ -0,0 +1,55 @@
|
|
1
|
+
Vocabulary: tasks
|
2
|
+
|
3
|
+
Term: id
|
4
|
+
Concept Type: Big Serial (Type)
|
5
|
+
Term: actor
|
6
|
+
Concept Type: Integer (Type)
|
7
|
+
Term: attempt count
|
8
|
+
Concept Type: Integer (Type)
|
9
|
+
Term: attempt limit
|
10
|
+
Concept Type: Integer (Type)
|
11
|
+
Term: cron expression
|
12
|
+
Concept Type: Short Text (Type)
|
13
|
+
Term: error message
|
14
|
+
Concept Type: Short Text (Type)
|
15
|
+
Term: handler
|
16
|
+
Concept Type: Short Text (Type)
|
17
|
+
Term: key
|
18
|
+
Concept Type: Short Text (Type)
|
19
|
+
Term: parameter set
|
20
|
+
Concept Type: JSON (Type)
|
21
|
+
Term: status
|
22
|
+
Concept Type: Short Text (Type)
|
23
|
+
Term: time
|
24
|
+
Concept Type: Date Time (Type)
|
25
|
+
|
26
|
+
Term: task
|
27
|
+
Fact type: task has id
|
28
|
+
Necessity: each task has exactly one id
|
29
|
+
Fact type: task has key
|
30
|
+
Necessity: each task has at most one key
|
31
|
+
Fact type: task is created by actor
|
32
|
+
Necessity: each task is created by exactly one actor
|
33
|
+
Fact type: task is executed by handler
|
34
|
+
Necessity: each task is executed by exactly one handler
|
35
|
+
Fact type: task is executed with parameter set
|
36
|
+
Necessity: each task is executed with at most one parameter set
|
37
|
+
Fact type: task is scheduled with cron expression
|
38
|
+
Necessity: each task is scheduled with at most one cron expression
|
39
|
+
Fact type: task is scheduled to execute on time
|
40
|
+
Necessity: each task is scheduled to execute on at most one time
|
41
|
+
Fact type: task has status
|
42
|
+
Necessity: each task has exactly one status
|
43
|
+
Definition: "queued" or "cancelled" or "succeeded" or "failed"
|
44
|
+
Fact type: task started on time
|
45
|
+
Necessity: each task started on at most one time
|
46
|
+
Fact type: task ended on time
|
47
|
+
Necessity: each task ended on at most one time
|
48
|
+
Fact type: task has error message
|
49
|
+
Necessity: each task has at most one error message
|
50
|
+
Fact type: task has attempt count
|
51
|
+
Necessity: each task has exactly one attempt count
|
52
|
+
Fact type: task has attempt limit
|
53
|
+
Necessity: each task has exactly one attempt limit
|
54
|
+
Necessity: each task has an attempt limit that is greater than or equal to 1
|
55
|
+
|
@@ -0,0 +1,44 @@
|
|
1
|
+
// These types were generated by @balena/abstract-sql-to-typescript v3.2.1
|
2
|
+
|
3
|
+
import type { Types } from '@balena/abstract-sql-to-typescript';
|
4
|
+
|
5
|
+
export interface Task {
|
6
|
+
Read: {
|
7
|
+
created_at: Types['Date Time']['Read'];
|
8
|
+
modified_at: Types['Date Time']['Read'];
|
9
|
+
id: Types['Big Serial']['Read'];
|
10
|
+
key: Types['Short Text']['Read'] | null;
|
11
|
+
is_created_by__actor: Types['Integer']['Read'];
|
12
|
+
is_executed_by__handler: Types['Short Text']['Read'];
|
13
|
+
is_executed_with__parameter_set: Types['JSON']['Read'] | null;
|
14
|
+
is_scheduled_with__cron_expression: Types['Short Text']['Read'] | null;
|
15
|
+
is_scheduled_to_execute_on__time: Types['Date Time']['Read'] | null;
|
16
|
+
status: 'queued' | 'cancelled' | 'succeeded' | 'failed';
|
17
|
+
started_on__time: Types['Date Time']['Read'] | null;
|
18
|
+
ended_on__time: Types['Date Time']['Read'] | null;
|
19
|
+
error_message: Types['Short Text']['Read'] | null;
|
20
|
+
attempt_count: Types['Integer']['Read'];
|
21
|
+
attempt_limit: Types['Integer']['Read'];
|
22
|
+
};
|
23
|
+
Write: {
|
24
|
+
created_at: Types['Date Time']['Write'];
|
25
|
+
modified_at: Types['Date Time']['Write'];
|
26
|
+
id: Types['Big Serial']['Write'];
|
27
|
+
key: Types['Short Text']['Write'] | null;
|
28
|
+
is_created_by__actor: Types['Integer']['Write'];
|
29
|
+
is_executed_by__handler: Types['Short Text']['Write'];
|
30
|
+
is_executed_with__parameter_set: Types['JSON']['Write'] | null;
|
31
|
+
is_scheduled_with__cron_expression: Types['Short Text']['Write'] | null;
|
32
|
+
is_scheduled_to_execute_on__time: Types['Date Time']['Write'] | null;
|
33
|
+
status: 'queued' | 'cancelled' | 'succeeded' | 'failed';
|
34
|
+
started_on__time: Types['Date Time']['Write'] | null;
|
35
|
+
ended_on__time: Types['Date Time']['Write'] | null;
|
36
|
+
error_message: Types['Short Text']['Write'] | null;
|
37
|
+
attempt_count: Types['Integer']['Write'];
|
38
|
+
attempt_limit: Types['Integer']['Write'];
|
39
|
+
};
|
40
|
+
}
|
41
|
+
|
42
|
+
export default interface $Model {
|
43
|
+
task: Task;
|
44
|
+
}
|
@@ -0,0 +1,282 @@
|
|
1
|
+
import type { ValidateFunction } from 'ajv';
|
2
|
+
import { setTimeout } from 'node:timers/promises';
|
3
|
+
import type { AnyObject } from 'pinejs-client-core';
|
4
|
+
import { tasks as tasksEnv } from '../config-loader/env';
|
5
|
+
import type * as Db from '../database-layer/db';
|
6
|
+
import * as permissions from '../sbvr-api/permissions';
|
7
|
+
import { PinejsClient } from '../sbvr-api/sbvr-utils';
|
8
|
+
import { sbvrUtils } from '../server-glue/module';
|
9
|
+
import { ajv, channel } from './common';
|
10
|
+
import type { Task } from './types';
|
11
|
+
|
12
|
+
interface TaskArgs {
|
13
|
+
api: PinejsClient;
|
14
|
+
params: AnyObject;
|
15
|
+
}
|
16
|
+
|
17
|
+
type TaskResponse = Promise<{
|
18
|
+
status: Task['Read']['status'];
|
19
|
+
error?: string;
|
20
|
+
}>;
|
21
|
+
|
22
|
+
export interface TaskHandler {
|
23
|
+
name: string;
|
24
|
+
fn: (options: TaskArgs) => TaskResponse;
|
25
|
+
validate?: ValidateFunction;
|
26
|
+
}
|
27
|
+
|
28
|
+
type PartialTask = Pick<
|
29
|
+
Task['Read'],
|
30
|
+
| 'id'
|
31
|
+
| 'is_created_by__actor'
|
32
|
+
| 'is_executed_by__handler'
|
33
|
+
| 'is_executed_with__parameter_set'
|
34
|
+
| 'is_scheduled_with__cron_expression'
|
35
|
+
| 'attempt_count'
|
36
|
+
| 'attempt_limit'
|
37
|
+
>;
|
38
|
+
|
39
|
+
// Map of column names with SBVR names used in SELECT queries
|
40
|
+
const selectColumns = Object.entries({
|
41
|
+
id: 'id',
|
42
|
+
'is executed by-handler': 'is_executed_by__handler',
|
43
|
+
'is executed with-parameter set': 'is_executed_with__parameter_set',
|
44
|
+
'is scheduled with-cron expression': 'is_scheduled_with__cron_expression',
|
45
|
+
'attempt count': 'attempt_count',
|
46
|
+
'attempt limit': 'attempt_limit',
|
47
|
+
'is created by-actor': 'is_created_by__actor',
|
48
|
+
})
|
49
|
+
.map(([key, value]) => `t."${key}" AS "${value}"`)
|
50
|
+
.join(', ');
|
51
|
+
|
52
|
+
// The worker is responsible for executing tasks in the queue. It listens for
|
53
|
+
// notifications and polls the database for tasks to execute. It will execute
|
54
|
+
// tasks in parallel up to a certain concurrency limit.
|
55
|
+
export class Worker {
|
56
|
+
public handlers: Record<string, TaskHandler> = {};
|
57
|
+
private readonly concurrency: number;
|
58
|
+
private readonly interval: number;
|
59
|
+
private executing = 0;
|
60
|
+
|
61
|
+
constructor(private readonly client: PinejsClient) {
|
62
|
+
this.concurrency = tasksEnv.queueConcurrency;
|
63
|
+
this.interval = tasksEnv.queueIntervalMS;
|
64
|
+
}
|
65
|
+
|
66
|
+
// Check if instance can execute more tasks
|
67
|
+
private canExecute(): boolean {
|
68
|
+
return (
|
69
|
+
this.executing < this.concurrency && Object.keys(this.handlers).length > 0
|
70
|
+
);
|
71
|
+
}
|
72
|
+
|
73
|
+
private async execute(task: PartialTask, tx: Db.Tx): Promise<void> {
|
74
|
+
this.executing++;
|
75
|
+
try {
|
76
|
+
// Get specified handler
|
77
|
+
const handler = this.handlers[task.is_executed_by__handler];
|
78
|
+
const startedOnTime = new Date();
|
79
|
+
if (handler == null) {
|
80
|
+
await this.update(
|
81
|
+
tx,
|
82
|
+
task,
|
83
|
+
startedOnTime,
|
84
|
+
'failed',
|
85
|
+
'Matching task handler not found',
|
86
|
+
);
|
87
|
+
return;
|
88
|
+
}
|
89
|
+
|
90
|
+
// Validate parameters before execution so we can fail early if
|
91
|
+
// the parameter set is invalid. This can happen if the handler
|
92
|
+
// definition changes after a task is added to the queue.
|
93
|
+
if (
|
94
|
+
handler.validate != null &&
|
95
|
+
!handler.validate(task.is_executed_with__parameter_set)
|
96
|
+
) {
|
97
|
+
await this.update(
|
98
|
+
tx,
|
99
|
+
task,
|
100
|
+
startedOnTime,
|
101
|
+
'failed',
|
102
|
+
`Invalid parameter set: ${ajv.errorsText(handler.validate.errors)}`,
|
103
|
+
);
|
104
|
+
return;
|
105
|
+
}
|
106
|
+
|
107
|
+
// Execute handler and update task with results
|
108
|
+
let status: Task['Read']['status'] = 'queued';
|
109
|
+
let error: string | undefined;
|
110
|
+
try {
|
111
|
+
const results = await handler.fn({
|
112
|
+
api: new PinejsClient({}),
|
113
|
+
params: task.is_executed_with__parameter_set ?? {},
|
114
|
+
});
|
115
|
+
status = results.status;
|
116
|
+
error = results.error;
|
117
|
+
} finally {
|
118
|
+
await this.update(tx, task, startedOnTime, status, error);
|
119
|
+
}
|
120
|
+
} catch (err) {
|
121
|
+
// This shouldn't happen, but if it does we want to log and kill the process
|
122
|
+
console.error(
|
123
|
+
`Failed to execute task ${task.id} with handler ${task.is_executed_by__handler}:`,
|
124
|
+
err,
|
125
|
+
);
|
126
|
+
process.exit(1);
|
127
|
+
} finally {
|
128
|
+
this.executing--;
|
129
|
+
}
|
130
|
+
}
|
131
|
+
|
132
|
+
// Update task and schedule next attempt if needed
|
133
|
+
private async update(
|
134
|
+
tx: Db.Tx,
|
135
|
+
task: PartialTask,
|
136
|
+
startedOnTime: Date,
|
137
|
+
status: Task['Read']['status'],
|
138
|
+
errorMessage?: string,
|
139
|
+
): Promise<void> {
|
140
|
+
const attemptCount = task.attempt_count + 1;
|
141
|
+
const body: AnyObject = {
|
142
|
+
started_on__time: startedOnTime,
|
143
|
+
ended_on__time: new Date(),
|
144
|
+
status,
|
145
|
+
attempt_count: attemptCount,
|
146
|
+
...(errorMessage != null && { error_message: errorMessage }),
|
147
|
+
};
|
148
|
+
|
149
|
+
// Re-enqueue if the task failed but has retries left, remember that
|
150
|
+
// attemptCount includes the initial attempt while attempt_limit does not
|
151
|
+
if (status === 'failed' && attemptCount < task.attempt_limit) {
|
152
|
+
body.status = 'queued';
|
153
|
+
|
154
|
+
// Schedule next attempt using exponential backoff
|
155
|
+
body.is_scheduled_to_execute_on__time =
|
156
|
+
this.getNextAttemptTime(attemptCount);
|
157
|
+
}
|
158
|
+
|
159
|
+
// Patch current task
|
160
|
+
await this.client.patch({
|
161
|
+
resource: 'task',
|
162
|
+
passthrough: {
|
163
|
+
tx,
|
164
|
+
req: permissions.root,
|
165
|
+
},
|
166
|
+
id: task.id,
|
167
|
+
body,
|
168
|
+
});
|
169
|
+
|
170
|
+
// Create new task with same configuration if previous
|
171
|
+
// iteration completed and has a cron expression
|
172
|
+
if (
|
173
|
+
['failed', 'succeeded'].includes(body.status) &&
|
174
|
+
task.is_scheduled_with__cron_expression != null
|
175
|
+
) {
|
176
|
+
await this.client.post({
|
177
|
+
resource: 'task',
|
178
|
+
passthrough: {
|
179
|
+
tx,
|
180
|
+
req: permissions.root,
|
181
|
+
},
|
182
|
+
options: {
|
183
|
+
returnResource: false,
|
184
|
+
},
|
185
|
+
body: {
|
186
|
+
attempt_limit: task.attempt_limit,
|
187
|
+
is_created_by__actor: task.is_created_by__actor,
|
188
|
+
is_executed_by__handler: task.is_executed_by__handler,
|
189
|
+
is_executed_with__parameter_set: task.is_executed_with__parameter_set,
|
190
|
+
is_scheduled_with__cron_expression:
|
191
|
+
task.is_scheduled_with__cron_expression,
|
192
|
+
},
|
193
|
+
});
|
194
|
+
}
|
195
|
+
}
|
196
|
+
|
197
|
+
// Calculate next attempt time using exponential backoff
|
198
|
+
private getNextAttemptTime(attempt: number): Date | null {
|
199
|
+
const delay = Math.ceil(Math.exp(Math.min(10, attempt)));
|
200
|
+
return new Date(Date.now() + delay);
|
201
|
+
}
|
202
|
+
|
203
|
+
// Poll for tasks and execute them
|
204
|
+
// This is recursive and is spawned once per concurrency limit
|
205
|
+
private poll(): void {
|
206
|
+
let executed = false;
|
207
|
+
void (async () => {
|
208
|
+
try {
|
209
|
+
if (!this.canExecute()) {
|
210
|
+
return;
|
211
|
+
}
|
212
|
+
const handlerNames = Object.keys(this.handlers);
|
213
|
+
await sbvrUtils.db.transaction(async (tx) => {
|
214
|
+
const result = await sbvrUtils.db.executeSql(
|
215
|
+
`SELECT ${selectColumns}
|
216
|
+
FROM task AS t
|
217
|
+
WHERE
|
218
|
+
t."is executed by-handler" IN (${handlerNames.map((_, index) => `$${index + 1}`).join(', ')}) AND
|
219
|
+
t."status" = 'queued' AND
|
220
|
+
t."attempt count" <= t."attempt limit" AND
|
221
|
+
(
|
222
|
+
t."is scheduled to execute on-time" IS NULL OR
|
223
|
+
t."is scheduled to execute on-time" <= CURRENT_TIMESTAMP + $${handlerNames.length + 1} * INTERVAL '1 SECOND'
|
224
|
+
)
|
225
|
+
ORDER BY
|
226
|
+
t."is scheduled to execute on-time" ASC,
|
227
|
+
t."id" ASC
|
228
|
+
LIMIT 1 FOR UPDATE SKIP LOCKED`,
|
229
|
+
[...handlerNames, Math.ceil(this.interval / 1000)],
|
230
|
+
);
|
231
|
+
|
232
|
+
// Execute task if one was found
|
233
|
+
if (result.rows.length > 0) {
|
234
|
+
await this.execute(result.rows[0] as PartialTask, tx);
|
235
|
+
executed = true;
|
236
|
+
}
|
237
|
+
});
|
238
|
+
} catch (err) {
|
239
|
+
console.error('Failed polling for tasks:', err);
|
240
|
+
} finally {
|
241
|
+
if (!executed) {
|
242
|
+
await setTimeout(this.interval);
|
243
|
+
}
|
244
|
+
this.poll();
|
245
|
+
}
|
246
|
+
})();
|
247
|
+
}
|
248
|
+
|
249
|
+
// Start listening and polling for tasks
|
250
|
+
public start(): void {
|
251
|
+
// Tasks only support postgres for now
|
252
|
+
if (sbvrUtils.db.engine !== 'postgres' || sbvrUtils.db.on == null) {
|
253
|
+
throw new Error(
|
254
|
+
'Database does not support tasks, giving up on starting worker',
|
255
|
+
);
|
256
|
+
}
|
257
|
+
sbvrUtils.db.on(
|
258
|
+
'notification',
|
259
|
+
async (msg) => {
|
260
|
+
if (this.canExecute()) {
|
261
|
+
await sbvrUtils.db.transaction(async (tx) => {
|
262
|
+
const result = await sbvrUtils.db.executeSql(
|
263
|
+
`SELECT ${selectColumns} FROM task AS t WHERE id = $1 FOR UPDATE SKIP LOCKED`,
|
264
|
+
[msg.payload],
|
265
|
+
);
|
266
|
+
if (result.rows.length > 0) {
|
267
|
+
await this.execute(result.rows[0] as PartialTask, tx);
|
268
|
+
}
|
269
|
+
});
|
270
|
+
}
|
271
|
+
},
|
272
|
+
{
|
273
|
+
channel,
|
274
|
+
},
|
275
|
+
);
|
276
|
+
|
277
|
+
// Spawn children to poll for and execute tasks
|
278
|
+
for (let i = 0; i < this.concurrency; i++) {
|
279
|
+
this.poll();
|
280
|
+
}
|
281
|
+
}
|
282
|
+
}
|