@constructive-io/knative-job-worker 0.5.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,12 @@
1
+ # Change Log
2
+
3
+ All notable changes to this project will be documented in this file.
4
+ See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
5
+
6
+ ## 0.5.9 (2025-12-18)
7
+
8
+ **Note:** Version bump only for package @constructive-io/knative-job-worker
9
+
10
+ ## [0.5.8](https://github.com/constructive-io/jobs/compare/@launchql/knative-job-worker@0.5.7...@launchql/knative-job-worker@0.5.8) (2025-12-17)
11
+
12
+ **Note:** Version bump only for package @launchql/knative-job-worker
package/LICENSE ADDED
@@ -0,0 +1,23 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2025 Dan Lynch <pyramation@gmail.com>
4
+ Copyright (c) 2025 Constructive <developers@constructive.io>
5
+ Copyright (c) 2020-present, Interweb, Inc.
6
+
7
+ Permission is hereby granted, free of charge, to any person obtaining a copy
8
+ of this software and associated documentation files (the "Software"), to deal
9
+ in the Software without restriction, including without limitation the rights
10
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11
+ copies of the Software, and to permit persons to whom the Software is
12
+ furnished to do so, subject to the following conditions:
13
+
14
+ The above copyright notice and this permission notice shall be included in all
15
+ copies or substantial portions of the Software.
16
+
17
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,3 @@
1
+ # knative-job-worker
2
+
3
+ Knative-compatible job worker that uses the existing Constructive PostgreSQL job queue and job utilities, invoking HTTP functions via `KNATIVE_SERVICE_URL` (or `INTERNAL_GATEWAY_URL` as a fallback) while preserving the same headers and payload shape as the OpenFaaS worker.
@@ -0,0 +1,130 @@
1
+ const postMock = jest.fn();
2
+
3
+ jest.mock('request', () => ({
4
+ __esModule: true,
5
+ default: { post: postMock },
6
+ post: postMock
7
+ }));
8
+
9
+ describe('knative request wrapper', () => {
10
+ beforeEach(() => {
11
+ jest.resetModules();
12
+ postMock.mockReset();
13
+
14
+ process.env.PGUSER = 'postgres';
15
+ process.env.PGHOST = 'localhost';
16
+ process.env.PGPASSWORD = 'password';
17
+ process.env.PGPORT = '5432';
18
+ process.env.PGDATABASE = 'jobs';
19
+ process.env.JOBS_SCHEMA = 'app_jobs';
20
+ process.env.INTERNAL_JOBS_CALLBACK_URL =
21
+ 'http://callback.internal/jobs-complete';
22
+ process.env.NODE_ENV = 'test';
23
+ delete process.env.INTERNAL_GATEWAY_URL;
24
+ delete process.env.KNATIVE_SERVICE_URL;
25
+ delete process.env.INTERNAL_GATEWAY_DEVELOPMENT_MAP;
26
+ });
27
+
28
+ it('uses KNATIVE_SERVICE_URL as base and preserves headers and body', async () => {
29
+ process.env.KNATIVE_SERVICE_URL = 'http://knative.internal';
30
+
31
+ postMock.mockImplementation(
32
+ (options: any, callback: (err: any) => void) => callback(null)
33
+ );
34
+
35
+ const { request } = await import('../src/req');
36
+
37
+ await request('example-fn', {
38
+ body: { value: 1 },
39
+ databaseId: 'db-123',
40
+ workerId: 'worker-1',
41
+ jobId: 42
42
+ });
43
+
44
+ expect(postMock).toHaveBeenCalledTimes(1);
45
+ const [options] = postMock.mock.calls[0];
46
+
47
+ expect(options.url).toBe('http://knative.internal/example-fn');
48
+ expect(options.headers['Content-Type']).toBe('application/json');
49
+ expect(options.headers['X-Worker-Id']).toBe('worker-1');
50
+ expect(options.headers['X-Job-Id']).toBe(42);
51
+ expect(options.headers['X-Database-Id']).toBe('db-123');
52
+ expect(options.headers['X-Callback-Url']).toBe(
53
+ 'http://callback.internal/jobs-complete'
54
+ );
55
+ expect(options.body).toEqual({ value: 1 });
56
+ });
57
+
58
+ it('falls back to INTERNAL_GATEWAY_URL when KNATIVE_SERVICE_URL is not set', async () => {
59
+ process.env.INTERNAL_GATEWAY_URL =
60
+ 'http://gateway.internal/async-function';
61
+
62
+ postMock.mockImplementation(
63
+ (options: any, callback: (err: any) => void) => callback(null)
64
+ );
65
+
66
+ const { request } = await import('../src/req');
67
+
68
+ await request('example-fn', {
69
+ body: { value: 2 },
70
+ databaseId: 'db-456',
71
+ workerId: 'worker-2',
72
+ jobId: 43
73
+ });
74
+
75
+ const [options] = postMock.mock.calls[0];
76
+ expect(options.url).toBe(
77
+ 'http://gateway.internal/async-function/example-fn'
78
+ );
79
+ });
80
+
81
+ it('uses development map override when provided', async () => {
82
+ process.env.KNATIVE_SERVICE_URL = 'http://knative.internal';
83
+ process.env.INTERNAL_GATEWAY_DEVELOPMENT_MAP = JSON.stringify({
84
+ 'example-fn': 'http://localhost:3000/dev-fn'
85
+ });
86
+ process.env.NODE_ENV = 'development';
87
+
88
+ postMock.mockImplementation(
89
+ (options: any, callback: (err: any) => void) => callback(null)
90
+ );
91
+
92
+ const { request } = await import('../src/req');
93
+
94
+ await request('example-fn', {
95
+ body: {},
96
+ databaseId: 'db-789',
97
+ workerId: 'worker-3',
98
+ jobId: 44
99
+ });
100
+
101
+ const [options] = postMock.mock.calls[0];
102
+ expect(options.url).toBe('http://localhost:3000/dev-fn');
103
+ });
104
+
105
+ it('rejects when HTTP request errors', async () => {
106
+ process.env.KNATIVE_SERVICE_URL = 'http://knative.internal';
107
+
108
+ postMock.mockImplementation(
109
+ (options: any, callback: (err: any) => void) =>
110
+ callback(new Error('network failure'))
111
+ );
112
+
113
+ const { request } = await import('../src/req');
114
+
115
+ await expect(
116
+ request('example-fn', {
117
+ body: {},
118
+ databaseId: 'db-000',
119
+ workerId: 'worker-4',
120
+ jobId: 45
121
+ })
122
+ ).rejects.toThrow('network failure');
123
+ });
124
+
125
+ it('throws on startup when no base URL env vars are set', async () => {
126
+ await expect(import('../src/env')).rejects.toThrow(
127
+ /KNATIVE_SERVICE_URL \(or INTERNAL_GATEWAY_URL as fallback\) is required/
128
+ );
129
+ });
130
+ });
@@ -0,0 +1,117 @@
1
+ process.env.KNATIVE_SERVICE_URL =
2
+ process.env.KNATIVE_SERVICE_URL || 'knative.internal';
3
+ process.env.INTERNAL_JOBS_CALLBACK_URL =
4
+ process.env.INTERNAL_JOBS_CALLBACK_URL ||
5
+ 'http://callback.internal/jobs-complete';
6
+
7
+ const postMock = jest.fn();
8
+
9
+ jest.mock('request', () => ({
10
+ __esModule: true,
11
+ default: { post: postMock },
12
+ post: postMock
13
+ }));
14
+
15
+ jest.mock('@constructive-io/job-pg', () => ({
16
+ __esModule: true,
17
+ default: {
18
+ getPool: jest.fn(),
19
+ onClose: jest.fn(),
20
+ close: jest.fn()
21
+ }
22
+ }));
23
+
24
+ import path from 'path';
25
+ import { getConnections, seed } from 'pgsql-test';
26
+ import type { PgTestClient } from 'pgsql-test/test-client';
27
+ import * as jobUtils from '@constructive-io/job-utils';
28
+ import Worker from '../src';
29
+
30
+ let db: PgTestClient;
31
+ let teardown: () => Promise<void>;
32
+
33
+ beforeAll(async () => {
34
+ const modulePath = path.resolve(
35
+ __dirname,
36
+ '../../../extensions/@pgpm/database-jobs'
37
+ );
38
+ ({ db, teardown } = await getConnections({}, [seed.loadPgpm(modulePath)]));
39
+ db.setContext({ role: 'administrator' });
40
+ });
41
+
42
+ afterAll(async () => {
43
+ await teardown();
44
+ });
45
+
46
+ beforeEach(async () => {
47
+ await db.beforeEach();
48
+ postMock.mockReset();
49
+ });
50
+
51
+ afterEach(async () => {
52
+ await db.afterEach();
53
+ });
54
+
55
+ describe('knative worker integration with job queue', () => {
56
+ const databaseId = '5b720132-17d5-424d-9bcb-ee7b17c13d43';
57
+
58
+ it('pulls a job from the queue and posts to Knative service', async () => {
59
+ const insertedJob = await db.one(
60
+ 'SELECT * FROM app_jobs.add_job($1::uuid, $2::text, $3::json);',
61
+ [databaseId, 'example-fn', { hello: 'world' }]
62
+ );
63
+
64
+ const worker = new Worker({
65
+ tasks: ['example-fn'],
66
+ pgPool: {} as any,
67
+ workerId: 'worker-integration-1'
68
+ });
69
+
70
+ const job = await jobUtils.getJob(db as any, {
71
+ workerId: 'worker-integration-1',
72
+ supportedTaskNames: null
73
+ });
74
+
75
+ expect(job).toBeTruthy();
76
+ expect(job.id).toBe(insertedJob.id);
77
+
78
+ postMock.mockImplementation(
79
+ (options: any, callback: (err: any) => void) => callback(null)
80
+ );
81
+
82
+ await (worker as any).doWork(job);
83
+
84
+ expect(postMock).toHaveBeenCalledTimes(1);
85
+ const [options] = postMock.mock.calls[0];
86
+ expect(options.url).toBe('http://example-fn.knative.internal');
87
+ expect(options.headers['X-Job-Id']).toBe(job.id);
88
+ expect(options.headers['X-Database-Id']).toBe(databaseId);
89
+ });
90
+
91
+ it('propagates errors from failed Knative calls while keeping job queue interactions intact', async () => {
92
+ await db.one(
93
+ 'SELECT * FROM app_jobs.add_job($1::uuid, $2::text, $3::json);',
94
+ [databaseId, 'example-fn', { hello: 'world' }]
95
+ );
96
+
97
+ const worker = new Worker({
98
+ tasks: ['example-fn'],
99
+ pgPool: {} as any,
100
+ workerId: 'worker-integration-2'
101
+ });
102
+
103
+ const job = await jobUtils.getJob(db as any, {
104
+ workerId: 'worker-integration-2',
105
+ supportedTaskNames: null
106
+ });
107
+
108
+ postMock.mockImplementation(
109
+ (options: any, callback: (err: any) => void) =>
110
+ callback(new Error('knative failure'))
111
+ );
112
+
113
+ await expect((worker as any).doWork(job)).rejects.toThrow(
114
+ 'knative failure'
115
+ );
116
+ });
117
+ });
package/dist/env.d.ts ADDED
@@ -0,0 +1,20 @@
1
+ declare const _default: {
2
+ KNATIVE_SERVICE_URL: string;
3
+ INTERNAL_GATEWAY_DEVELOPMENT_MAP: string;
4
+ PGUSER: string;
5
+ PGHOST: string;
6
+ PGPASSWORD: string;
7
+ PGPORT: number;
8
+ PGDATABASE: string;
9
+ JOBS_SCHEMA: string;
10
+ JOBS_SUPPORT_ANY: boolean;
11
+ JOBS_SUPPORTED: string[];
12
+ HOSTNAME: string;
13
+ INTERNAL_JOBS_CALLBACK_URL: string;
14
+ isDevelopment: boolean;
15
+ isDev: boolean;
16
+ isTest: boolean;
17
+ isProduction: boolean;
18
+ isProd: boolean;
19
+ };
20
+ export default _default;
package/dist/env.js ADDED
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const envalid_1 = require("envalid");
4
+ const array = (0, envalid_1.makeValidator)((x) => x.split(',').filter((i) => i), '');
5
+ const baseEnv = (0, envalid_1.cleanEnv)(process.env, {
6
+ PGUSER: (0, envalid_1.str)({ default: 'postgres' }),
7
+ PGHOST: (0, envalid_1.str)({ default: 'localhost' }),
8
+ PGPASSWORD: (0, envalid_1.str)({ default: 'password' }),
9
+ PGPORT: (0, envalid_1.port)({ default: 5432 }),
10
+ PGDATABASE: (0, envalid_1.str)({ default: 'jobs' }),
11
+ JOBS_SCHEMA: (0, envalid_1.str)({ default: 'app_jobs' }),
12
+ JOBS_SUPPORT_ANY: (0, envalid_1.bool)({ default: true }),
13
+ JOBS_SUPPORTED: array({ default: '' }),
14
+ HOSTNAME: (0, envalid_1.str)({
15
+ default: 'worker-0'
16
+ }),
17
+ INTERNAL_JOBS_CALLBACK_URL: (0, envalid_1.url)()
18
+ }, { dotEnvPath: null });
19
+ const KNATIVE_SERVICE_URL = process.env.KNATIVE_SERVICE_URL;
20
+ if (!KNATIVE_SERVICE_URL) {
21
+ throw new Error('KNATIVE_SERVICE_URL (or INTERNAL_GATEWAY_URL as fallback) is required for @launchql/knative-job-worker');
22
+ }
23
+ const INTERNAL_GATEWAY_DEVELOPMENT_MAP = process.env.INTERNAL_GATEWAY_DEVELOPMENT_MAP;
24
+ exports.default = {
25
+ ...baseEnv,
26
+ KNATIVE_SERVICE_URL,
27
+ INTERNAL_GATEWAY_DEVELOPMENT_MAP
28
+ };
@@ -0,0 +1,41 @@
1
+ import type { PgClientLike } from '@constructive-io/job-utils';
2
+ import type { Pool } from 'pg';
3
+ export interface JobRow {
4
+ id: number | string;
5
+ task_identifier: string;
6
+ payload?: unknown;
7
+ database_id?: string;
8
+ }
9
+ export default class Worker {
10
+ idleDelay: number;
11
+ supportedTaskNames: string[];
12
+ workerId: string;
13
+ doNextTimer?: NodeJS.Timeout;
14
+ pgPool: Pool;
15
+ _initialized?: boolean;
16
+ constructor({ tasks, idleDelay, pgPool, workerId }: {
17
+ tasks: string[];
18
+ idleDelay?: number;
19
+ pgPool?: Pool;
20
+ workerId?: string;
21
+ });
22
+ initialize(client: PgClientLike): Promise<void>;
23
+ handleFatalError(client: PgClientLike, { err, fatalError, jobId }: {
24
+ err?: Error;
25
+ fatalError: unknown;
26
+ jobId: JobRow['id'];
27
+ }): Promise<void>;
28
+ handleError(client: PgClientLike, { err, job, duration }: {
29
+ err: Error;
30
+ job: JobRow;
31
+ duration: string;
32
+ }): Promise<void>;
33
+ handleSuccess(client: PgClientLike, { job, duration }: {
34
+ job: JobRow;
35
+ duration: string;
36
+ }): Promise<void>;
37
+ doWork(job: JobRow): Promise<void>;
38
+ doNext(client: PgClientLike): Promise<void>;
39
+ listen(): void;
40
+ }
41
+ export { Worker };
package/dist/index.js ADDED
@@ -0,0 +1,198 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ var __importDefault = (this && this.__importDefault) || function (mod) {
36
+ return (mod && mod.__esModule) ? mod : { "default": mod };
37
+ };
38
+ Object.defineProperty(exports, "__esModule", { value: true });
39
+ exports.Worker = void 0;
40
+ const job_pg_1 = __importDefault(require("@constructive-io/job-pg"));
41
+ const jobs = __importStar(require("@constructive-io/job-utils"));
42
+ const logger_1 = require("@pgpmjs/logger");
43
+ const req_1 = require("./req");
44
+ const log = new logger_1.Logger('jobs:worker');
45
+ class Worker {
46
+ idleDelay;
47
+ supportedTaskNames;
48
+ workerId;
49
+ doNextTimer;
50
+ pgPool;
51
+ _initialized;
52
+ constructor({ tasks, idleDelay = 15000, pgPool = job_pg_1.default.getPool(), workerId = 'worker-0' }) {
53
+ /*
54
+ * idleDelay: This is how long to wait between polling for jobs.
55
+ *
56
+ * Note: this does NOT need to be short, because we use LISTEN/NOTIFY to be
57
+ * notified when new jobs are added - this is just used in the case where
58
+ * LISTEN/NOTIFY fails for whatever reason.
59
+ */
60
+ this.idleDelay = idleDelay;
61
+ this.supportedTaskNames = tasks;
62
+ this.workerId = workerId;
63
+ this.doNextTimer = undefined;
64
+ this.pgPool = pgPool;
65
+ job_pg_1.default.onClose(async () => {
66
+ await jobs.releaseJobs(pgPool, { workerId: this.workerId });
67
+ });
68
+ }
69
+ async initialize(client) {
70
+ if (this._initialized === true)
71
+ return;
72
+ // release any jobs not finished from before if fatal error prevented cleanup
73
+ await jobs.releaseJobs(client, { workerId: this.workerId });
74
+ this._initialized = true;
75
+ await this.doNext(client);
76
+ }
77
+ async handleFatalError(client, { err, fatalError, jobId }) {
78
+ const when = err ? `after failure '${err.message}'` : 'after success';
79
+ log.error(`Failed to release job '${jobId}' ${when}; committing seppuku`);
80
+ await job_pg_1.default.close();
81
+ log.error(String(fatalError));
82
+ process.exit(1);
83
+ }
84
+ async handleError(client, { err, job, duration }) {
85
+ log.error(`Failed task ${job.id} (${job.task_identifier}) with error ${err.message} (${duration}ms)`);
86
+ if (err.stack) {
87
+ log.debug(err.stack);
88
+ }
89
+ await jobs.failJob(client, {
90
+ workerId: this.workerId,
91
+ jobId: job.id,
92
+ message: err.message
93
+ });
94
+ }
95
+ async handleSuccess(client, { job, duration }) {
96
+ log.info(`Async task ${job.id} (${job.task_identifier}) to be processed`);
97
+ }
98
+ async doWork(job) {
99
+ const { payload, task_identifier } = job;
100
+ log.debug('starting work on job', {
101
+ id: job.id,
102
+ task: task_identifier,
103
+ databaseId: job.database_id
104
+ });
105
+ if (!jobs.getJobSupportAny() &&
106
+ !this.supportedTaskNames.includes(task_identifier)) {
107
+ throw new Error('Unsupported task');
108
+ }
109
+ await (0, req_1.request)(task_identifier, {
110
+ body: payload,
111
+ databaseId: job.database_id,
112
+ workerId: this.workerId,
113
+ jobId: job.id
114
+ });
115
+ }
116
+ async doNext(client) {
117
+ if (!this._initialized) {
118
+ return await this.initialize(client);
119
+ }
120
+ log.debug('checking for jobs...');
121
+ if (this.doNextTimer) {
122
+ clearTimeout(this.doNextTimer);
123
+ this.doNextTimer = undefined;
124
+ }
125
+ try {
126
+ const job = (await jobs.getJob(client, {
127
+ workerId: this.workerId,
128
+ supportedTaskNames: jobs.getJobSupportAny()
129
+ ? null
130
+ : this.supportedTaskNames
131
+ }));
132
+ if (!job || !job.id) {
133
+ this.doNextTimer = setTimeout(() => this.doNext(client), this.idleDelay);
134
+ return;
135
+ }
136
+ const start = process.hrtime();
137
+ let err = null;
138
+ try {
139
+ await this.doWork(job);
140
+ }
141
+ catch (error) {
142
+ err = error;
143
+ }
144
+ const durationRaw = process.hrtime(start);
145
+ const duration = ((durationRaw[0] * 1e9 + durationRaw[1]) / 1e6).toFixed(2);
146
+ const jobId = job.id;
147
+ try {
148
+ if (err) {
149
+ await this.handleError(client, { err, job, duration });
150
+ }
151
+ else {
152
+ await this.handleSuccess(client, { job, duration });
153
+ }
154
+ }
155
+ catch (fatalError) {
156
+ await this.handleFatalError(client, { err, fatalError, jobId });
157
+ }
158
+ return this.doNext(client);
159
+ }
160
+ catch (err) {
161
+ this.doNextTimer = setTimeout(() => this.doNext(client), this.idleDelay);
162
+ }
163
+ }
164
+ listen() {
165
+ const listenForChanges = (err, client, release) => {
166
+ if (err) {
167
+ log.error('Error connecting with notify listener', err);
168
+ if (err instanceof Error && err.stack) {
169
+ log.debug(err.stack);
170
+ }
171
+ // Try again in 5 seconds
172
+ // should this really be done in the node process?
173
+ setTimeout(this.listen, 5000);
174
+ return;
175
+ }
176
+ client.on('notification', () => {
177
+ if (this.doNextTimer) {
178
+ // Must be idle, do something!
179
+ this.doNext(client);
180
+ }
181
+ });
182
+ client.query('LISTEN "jobs:insert"');
183
+ client.on('error', (e) => {
184
+ log.error('Error with database notify listener', e);
185
+ if (e instanceof Error && e.stack) {
186
+ log.debug(e.stack);
187
+ }
188
+ release();
189
+ this.listen();
190
+ });
191
+ log.info(`${this.workerId} connected and looking for jobs...`);
192
+ this.doNext(client);
193
+ };
194
+ this.pgPool.connect(listenForChanges);
195
+ }
196
+ }
197
+ exports.default = Worker;
198
+ exports.Worker = Worker;
package/dist/req.d.ts ADDED
@@ -0,0 +1,8 @@
1
+ interface RequestOptions {
2
+ body: unknown;
3
+ databaseId: string;
4
+ workerId: string;
5
+ jobId: string | number;
6
+ }
7
+ declare const request: (fn: string, { body, databaseId, workerId, jobId }: RequestOptions) => Promise<boolean>;
8
+ export { request };
package/dist/req.js ADDED
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.request = void 0;
7
+ const request_1 = __importDefault(require("request"));
8
+ const job_utils_1 = require("@constructive-io/job-utils");
9
+ const logger_1 = require("@pgpmjs/logger");
10
+ const log = new logger_1.Logger('jobs:req');
11
+ // callback URL for job completion
12
+ const completeUrl = (0, job_utils_1.getCallbackBaseUrl)();
13
+ // Development override map (e.g. point a function name at localhost)
14
+ const nodeEnv = (0, job_utils_1.getNodeEnvironment)();
15
+ const DEV_MAP = nodeEnv !== 'production' ? (0, job_utils_1.getJobGatewayDevMap)() : null;
16
+ const getFunctionUrl = (fn) => {
17
+ if (DEV_MAP && DEV_MAP[fn]) {
18
+ return DEV_MAP[fn] || completeUrl;
19
+ }
20
+ const { gatewayUrl } = (0, job_utils_1.getJobGatewayConfig)();
21
+ const base = gatewayUrl.replace(/\/$/, '');
22
+ return `${base}/${fn}`;
23
+ };
24
+ const request = (fn, { body, databaseId, workerId, jobId }) => {
25
+ const url = getFunctionUrl(fn);
26
+ log.info(`dispatching job`, {
27
+ fn,
28
+ url,
29
+ callbackUrl: completeUrl,
30
+ workerId,
31
+ jobId,
32
+ databaseId
33
+ });
34
+ return new Promise((resolve, reject) => {
35
+ request_1.default.post({
36
+ headers: {
37
+ 'Content-Type': 'application/json',
38
+ // these are used by job-worker/job-fn
39
+ 'X-Worker-Id': workerId,
40
+ 'X-Job-Id': jobId,
41
+ 'X-Database-Id': databaseId,
42
+ // async HTTP completion callback
43
+ 'X-Callback-Url': completeUrl
44
+ },
45
+ url,
46
+ json: true,
47
+ body
48
+ }, function (error) {
49
+ if (error) {
50
+ log.error(`request error for job[${jobId}] fn[${fn}]`, error);
51
+ if (error instanceof Error && error.stack) {
52
+ log.debug(error.stack);
53
+ }
54
+ return reject(error);
55
+ }
56
+ log.debug(`request success for job[${jobId}] fn[${fn}]`);
57
+ return resolve(true);
58
+ });
59
+ });
60
+ };
61
+ exports.request = request;
package/dist/run.d.ts ADDED
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
package/dist/run.js ADDED
@@ -0,0 +1,16 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __importDefault = (this && this.__importDefault) || function (mod) {
4
+ return (mod && mod.__esModule) ? mod : { "default": mod };
5
+ };
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ const index_1 = __importDefault(require("./index"));
8
+ const job_pg_1 = __importDefault(require("@constructive-io/job-pg"));
9
+ const job_utils_1 = require("@constructive-io/job-utils");
10
+ const pgPool = job_pg_1.default.getPool();
11
+ const worker = new index_1.default({
12
+ pgPool,
13
+ workerId: (0, job_utils_1.getWorkerHostname)(),
14
+ tasks: (0, job_utils_1.getJobSupported)()
15
+ });
16
+ worker.listen();
package/jest.config.js ADDED
@@ -0,0 +1,18 @@
1
+ /** @type {import('ts-jest').JestConfigWithTsJest} */
2
+ module.exports = {
3
+ preset: 'ts-jest',
4
+ testEnvironment: 'node',
5
+ transform: {
6
+ '^.+\\.tsx?$': [
7
+ 'ts-jest',
8
+ {
9
+ babelConfig: false,
10
+ tsconfig: 'tsconfig.json',
11
+ },
12
+ ],
13
+ },
14
+ transformIgnorePatterns: [`/node_modules/*`],
15
+ testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.(jsx?|tsx?)$',
16
+ moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
17
+ modulePathIgnorePatterns: ['dist/*'],
18
+ };
package/package.json ADDED
@@ -0,0 +1,45 @@
1
+ {
2
+ "name": "@constructive-io/knative-job-worker",
3
+ "version": "0.5.9",
4
+ "description": "knative job worker",
5
+ "author": "Constructive <developers@constructive.io>",
6
+ "homepage": "https://github.com/constructive-io/jobs/tree/master/packages/knative-job-worker#readme",
7
+ "license": "SEE LICENSE IN LICENSE",
8
+ "main": "dist/index.js",
9
+ "directories": {
10
+ "lib": "src",
11
+ "test": "__tests__"
12
+ },
13
+ "bin": {
14
+ "faas-jobs": "src/run.ts",
15
+ "knative-jobs": "src/run.ts"
16
+ },
17
+ "publishConfig": {
18
+ "access": "public"
19
+ },
20
+ "repository": {
21
+ "type": "git",
22
+ "url": "https://github.com/constructive-io/jobs"
23
+ },
24
+ "scripts": {
25
+ "test": "jest",
26
+ "test:watch": "jest --watch",
27
+ "test:debug": "node --inspect node_modules/.bin/jest --runInBand",
28
+ "build": "tsc -p tsconfig.json",
29
+ "build:watch": "tsc -p tsconfig.json -w"
30
+ },
31
+ "bugs": {
32
+ "url": "https://github.com/constructive-io/jobs/issues"
33
+ },
34
+ "dependencies": {
35
+ "@constructive-io/job-pg": "^0.3.5",
36
+ "@constructive-io/job-utils": "^0.5.0",
37
+ "@pgpmjs/logger": "^1.3.3",
38
+ "pg": "8.16.3",
39
+ "request": "2.88.2"
40
+ },
41
+ "devDependencies": {
42
+ "pgsql-test": "^2.17.1"
43
+ },
44
+ "gitHead": "86d74dc4fce9051df0d2b5bcc163607aba42f009"
45
+ }
package/src/index.ts ADDED
@@ -0,0 +1,210 @@
1
+ import poolManager from '@constructive-io/job-pg';
2
+ import * as jobs from '@constructive-io/job-utils';
3
+ import type { PgClientLike } from '@constructive-io/job-utils';
4
+ import type { Pool, PoolClient } from 'pg';
5
+ import { Logger } from '@pgpmjs/logger';
6
+ import { request as req } from './req';
7
+
8
+ export interface JobRow {
9
+ id: number | string;
10
+ task_identifier: string;
11
+ payload?: unknown;
12
+ database_id?: string;
13
+ }
14
+
15
+ const log = new Logger('jobs:worker');
16
+
17
+ export default class Worker {
18
+ idleDelay: number;
19
+ supportedTaskNames: string[];
20
+ workerId: string;
21
+ doNextTimer?: NodeJS.Timeout;
22
+ pgPool: Pool;
23
+ _initialized?: boolean;
24
+
25
+ constructor({
26
+ tasks,
27
+ idleDelay = 15000,
28
+ pgPool = poolManager.getPool(),
29
+ workerId = 'worker-0'
30
+ }: {
31
+ tasks: string[];
32
+ idleDelay?: number;
33
+ pgPool?: Pool;
34
+ workerId?: string;
35
+ }) {
36
+ /*
37
+ * idleDelay: This is how long to wait between polling for jobs.
38
+ *
39
+ * Note: this does NOT need to be short, because we use LISTEN/NOTIFY to be
40
+ * notified when new jobs are added - this is just used in the case where
41
+ * LISTEN/NOTIFY fails for whatever reason.
42
+ */
43
+
44
+ this.idleDelay = idleDelay;
45
+ this.supportedTaskNames = tasks;
46
+ this.workerId = workerId;
47
+ this.doNextTimer = undefined;
48
+ this.pgPool = pgPool;
49
+ poolManager.onClose(async () => {
50
+ await jobs.releaseJobs(pgPool, { workerId: this.workerId });
51
+ });
52
+ }
53
+ async initialize(client: PgClientLike) {
54
+ if (this._initialized === true) return;
55
+
56
+ // release any jobs not finished from before if fatal error prevented cleanup
57
+ await jobs.releaseJobs(client, { workerId: this.workerId });
58
+
59
+ this._initialized = true;
60
+ await this.doNext(client);
61
+ }
62
+ async handleFatalError(
63
+ client: PgClientLike,
64
+ {
65
+ err,
66
+ fatalError,
67
+ jobId
68
+ }: { err?: Error; fatalError: unknown; jobId: JobRow['id'] }
69
+ ) {
70
+ const when = err ? `after failure '${err.message}'` : 'after success';
71
+ log.error(`Failed to release job '${jobId}' ${when}; committing seppuku`);
72
+ await poolManager.close();
73
+ log.error(String(fatalError));
74
+ process.exit(1);
75
+ }
76
+ async handleError(
77
+ client: PgClientLike,
78
+ { err, job, duration }: { err: Error; job: JobRow; duration: string }
79
+ ) {
80
+ log.error(
81
+ `Failed task ${job.id} (${job.task_identifier}) with error ${err.message} (${duration}ms)`
82
+ );
83
+ if (err.stack) {
84
+ log.debug(err.stack);
85
+ }
86
+ await jobs.failJob(client, {
87
+ workerId: this.workerId,
88
+ jobId: job.id,
89
+ message: err.message
90
+ });
91
+ }
92
+ async handleSuccess(
93
+ client: PgClientLike,
94
+ { job, duration }: { job: JobRow; duration: string }
95
+ ) {
96
+ log.info(
97
+ `Async task ${job.id} (${job.task_identifier}) to be processed`
98
+ );
99
+ }
100
+ async doWork(job: JobRow) {
101
+ const { payload, task_identifier } = job;
102
+ log.debug('starting work on job', {
103
+ id: job.id,
104
+ task: task_identifier,
105
+ databaseId: job.database_id
106
+ });
107
+ if (
108
+ !jobs.getJobSupportAny() &&
109
+ !this.supportedTaskNames.includes(task_identifier)
110
+ ) {
111
+ throw new Error('Unsupported task');
112
+ }
113
+ await req(task_identifier, {
114
+ body: payload,
115
+ databaseId: job.database_id,
116
+ workerId: this.workerId,
117
+ jobId: job.id
118
+ });
119
+ }
120
+ async doNext(client: PgClientLike): Promise<void> {
121
+ if (!this._initialized) {
122
+ return await this.initialize(client);
123
+ }
124
+
125
+ log.debug('checking for jobs...');
126
+ if (this.doNextTimer) {
127
+ clearTimeout(this.doNextTimer);
128
+ this.doNextTimer = undefined;
129
+ }
130
+ try {
131
+ const job = (await jobs.getJob<JobRow>(client, {
132
+ workerId: this.workerId,
133
+ supportedTaskNames: jobs.getJobSupportAny()
134
+ ? null
135
+ : this.supportedTaskNames
136
+ })) as JobRow | undefined;
137
+
138
+ if (!job || !job.id) {
139
+ this.doNextTimer = setTimeout(
140
+ () => this.doNext(client),
141
+ this.idleDelay
142
+ );
143
+ return;
144
+ }
145
+ const start = process.hrtime();
146
+
147
+ let err: Error | null = null;
148
+ try {
149
+ await this.doWork(job);
150
+ } catch (error) {
151
+ err = error as Error;
152
+ }
153
+ const durationRaw = process.hrtime(start);
154
+ const duration = ((durationRaw[0] * 1e9 + durationRaw[1]) / 1e6).toFixed(
155
+ 2
156
+ );
157
+ const jobId = job.id;
158
+ try {
159
+ if (err) {
160
+ await this.handleError(client, { err, job, duration });
161
+ } else {
162
+ await this.handleSuccess(client, { job, duration });
163
+ }
164
+ } catch (fatalError: unknown) {
165
+ await this.handleFatalError(client, { err, fatalError, jobId });
166
+ }
167
+ return this.doNext(client);
168
+ } catch (err: unknown) {
169
+ this.doNextTimer = setTimeout(() => this.doNext(client), this.idleDelay);
170
+ }
171
+ }
172
+ listen() {
173
+ const listenForChanges = (
174
+ err: Error | null,
175
+ client: PoolClient,
176
+ release: () => void
177
+ ) => {
178
+ if (err) {
179
+ log.error('Error connecting with notify listener', err);
180
+ if (err instanceof Error && err.stack) {
181
+ log.debug(err.stack);
182
+ }
183
+ // Try again in 5 seconds
184
+ // should this really be done in the node process?
185
+ setTimeout(this.listen, 5000);
186
+ return;
187
+ }
188
+ client.on('notification', () => {
189
+ if (this.doNextTimer) {
190
+ // Must be idle, do something!
191
+ this.doNext(client);
192
+ }
193
+ });
194
+ client.query('LISTEN "jobs:insert"');
195
+ client.on('error', (e: unknown) => {
196
+ log.error('Error with database notify listener', e);
197
+ if (e instanceof Error && e.stack) {
198
+ log.debug(e.stack);
199
+ }
200
+ release();
201
+ this.listen();
202
+ });
203
+ log.info(`${this.workerId} connected and looking for jobs...`);
204
+ this.doNext(client);
205
+ };
206
+ this.pgPool.connect(listenForChanges);
207
+ }
208
+ }
209
+
210
+ export { Worker };
package/src/req.ts ADDED
@@ -0,0 +1,82 @@
1
+ import requestLib from 'request';
2
+ import {
3
+ getCallbackBaseUrl,
4
+ getJobGatewayConfig,
5
+ getJobGatewayDevMap,
6
+ getNodeEnvironment
7
+ } from '@constructive-io/job-utils';
8
+ import { Logger } from '@pgpmjs/logger';
9
+
10
+ const log = new Logger('jobs:req');
11
+
12
+ // callback URL for job completion
13
+ const completeUrl = getCallbackBaseUrl();
14
+
15
+ // Development override map (e.g. point a function name at localhost)
16
+ const nodeEnv = getNodeEnvironment();
17
+ const DEV_MAP = nodeEnv !== 'production' ? getJobGatewayDevMap() : null;
18
+
19
+ const getFunctionUrl = (fn: string): string => {
20
+ if (DEV_MAP && DEV_MAP[fn]) {
21
+ return DEV_MAP[fn] || completeUrl;
22
+ }
23
+
24
+ const { gatewayUrl } = getJobGatewayConfig();
25
+ const base = gatewayUrl.replace(/\/$/, '');
26
+ return `${base}/${fn}`;
27
+ };
28
+
29
+ interface RequestOptions {
30
+ body: unknown;
31
+ databaseId: string;
32
+ workerId: string;
33
+ jobId: string | number;
34
+ }
35
+
36
+ const request = (
37
+ fn: string,
38
+ { body, databaseId, workerId, jobId }: RequestOptions
39
+ ) => {
40
+ const url = getFunctionUrl(fn);
41
+ log.info(`dispatching job`, {
42
+ fn,
43
+ url,
44
+ callbackUrl: completeUrl,
45
+ workerId,
46
+ jobId,
47
+ databaseId
48
+ });
49
+ return new Promise<boolean>((resolve, reject) => {
50
+ requestLib.post(
51
+ {
52
+ headers: {
53
+ 'Content-Type': 'application/json',
54
+
55
+ // these are used by job-worker/job-fn
56
+ 'X-Worker-Id': workerId,
57
+ 'X-Job-Id': jobId,
58
+ 'X-Database-Id': databaseId,
59
+
60
+ // async HTTP completion callback
61
+ 'X-Callback-Url': completeUrl
62
+ },
63
+ url,
64
+ json: true,
65
+ body
66
+ },
67
+ function (error: unknown) {
68
+ if (error) {
69
+ log.error(`request error for job[${jobId}] fn[${fn}]`, error);
70
+ if (error instanceof Error && error.stack) {
71
+ log.debug(error.stack);
72
+ }
73
+ return reject(error);
74
+ }
75
+ log.debug(`request success for job[${jobId}] fn[${fn}]`);
76
+ return resolve(true);
77
+ }
78
+ );
79
+ });
80
+ };
81
+
82
+ export { request };
package/src/run.ts ADDED
@@ -0,0 +1,18 @@
1
+ #!/usr/bin/env node
2
+
3
+ import Worker from './index';
4
+ import poolManager from '@constructive-io/job-pg';
5
+ import {
6
+ getWorkerHostname,
7
+ getJobSupported
8
+ } from '@constructive-io/job-utils';
9
+
10
+ const pgPool = poolManager.getPool();
11
+
12
+ const worker = new Worker({
13
+ pgPool,
14
+ workerId: getWorkerHostname(),
15
+ tasks: getJobSupported()
16
+ });
17
+
18
+ worker.listen();
@@ -0,0 +1,9 @@
1
+ {
2
+ "extends": "./tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "dist/esm",
5
+ "module": "es2022",
6
+ "rootDir": "src/",
7
+ "declaration": false
8
+ }
9
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,9 @@
1
+ {
2
+ "extends": "../../tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "dist",
5
+ "rootDir": "src/"
6
+ },
7
+ "include": ["src/**/*.ts", "../../types/**/*.d.ts"],
8
+ "exclude": ["dist", "node_modules", "**/*.spec.*", "**/*.test.*"]
9
+ }