@dbos-inc/node-pg-datasource 3.0.6-preview
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +14 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +173 -0
- package/dist/index.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/index.ts +240 -0
- package/jest.config.js +8 -0
- package/package.json +32 -0
- package/tests/config.test.ts +31 -0
- package/tests/datasource.test.ts +320 -0
- package/tests/test-helpers.ts +12 -0
- package/tsconfig.json +9 -0
package/index.ts
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
// using https://github.com/brianc/node-postgres
|
|
2
|
+
|
|
3
|
+
import { DBOS, DBOSWorkflowConflictError } from '@dbos-inc/dbos-sdk';
|
|
4
|
+
import {
|
|
5
|
+
type DataSourceTransactionHandler,
|
|
6
|
+
createTransactionCompletionSchemaPG,
|
|
7
|
+
createTransactionCompletionTablePG,
|
|
8
|
+
isPGRetriableTransactionError,
|
|
9
|
+
isPGKeyConflictError,
|
|
10
|
+
registerTransaction,
|
|
11
|
+
runTransaction,
|
|
12
|
+
PGTransactionConfig as NodePostgresTransactionOptions,
|
|
13
|
+
DBOSDataSource,
|
|
14
|
+
registerDataSource,
|
|
15
|
+
} from '@dbos-inc/dbos-sdk/datasource';
|
|
16
|
+
import { Client, type ClientBase, type ClientConfig, Pool, type PoolConfig } from 'pg';
|
|
17
|
+
import { AsyncLocalStorage } from 'node:async_hooks';
|
|
18
|
+
import { SuperJSON } from 'superjson';
|
|
19
|
+
|
|
20
|
+
interface NodePostgresDataSourceContext {
|
|
21
|
+
client: ClientBase;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export { NodePostgresTransactionOptions };
|
|
25
|
+
|
|
26
|
+
const asyncLocalCtx = new AsyncLocalStorage<NodePostgresDataSourceContext>();
|
|
27
|
+
|
|
28
|
+
class NodePGDSTH implements DataSourceTransactionHandler {
|
|
29
|
+
readonly name: string;
|
|
30
|
+
readonly dsType = 'NodePostgresDataSource';
|
|
31
|
+
readonly #pool: Pool;
|
|
32
|
+
|
|
33
|
+
constructor(name: string, config: PoolConfig) {
|
|
34
|
+
this.name = name;
|
|
35
|
+
this.#pool = new Pool(config);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
initialize(): Promise<void> {
|
|
39
|
+
return Promise.resolve();
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
destroy(): Promise<void> {
|
|
43
|
+
return this.#pool.end();
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
static async #checkExecution(
|
|
47
|
+
client: ClientBase,
|
|
48
|
+
workflowID: string,
|
|
49
|
+
functionNum: number,
|
|
50
|
+
): Promise<{ output: string | null } | undefined> {
|
|
51
|
+
const { rows } = await client.query<{ output: string }>(
|
|
52
|
+
/*sql*/ `SELECT output FROM dbos.transaction_completion
|
|
53
|
+
WHERE workflow_id = $1 AND function_num = $2`,
|
|
54
|
+
[workflowID, functionNum],
|
|
55
|
+
);
|
|
56
|
+
return rows.length > 0 ? { output: rows[0].output } : undefined;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
static async #recordOutput(
|
|
60
|
+
client: ClientBase,
|
|
61
|
+
workflowID: string,
|
|
62
|
+
functionNum: number,
|
|
63
|
+
output: string | null,
|
|
64
|
+
): Promise<void> {
|
|
65
|
+
try {
|
|
66
|
+
await client.query(
|
|
67
|
+
/*sql*/
|
|
68
|
+
`INSERT INTO dbos.transaction_completion (workflow_id, function_num, output)
|
|
69
|
+
VALUES ($1, $2, $3)`,
|
|
70
|
+
[workflowID, functionNum, output],
|
|
71
|
+
);
|
|
72
|
+
} catch (error) {
|
|
73
|
+
if (isPGKeyConflictError(error)) {
|
|
74
|
+
throw new DBOSWorkflowConflictError(workflowID);
|
|
75
|
+
} else {
|
|
76
|
+
throw error;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async #transaction<Return>(
|
|
82
|
+
func: (client: ClientBase) => Promise<Return>,
|
|
83
|
+
config: NodePostgresTransactionOptions = {},
|
|
84
|
+
): Promise<Return> {
|
|
85
|
+
const isolationLevel = config?.isolationLevel ? `ISOLATION LEVEL ${config.isolationLevel}` : '';
|
|
86
|
+
const readOnly = config?.readOnly ?? false;
|
|
87
|
+
const accessMode = config?.readOnly === undefined ? '' : readOnly ? 'READ ONLY' : 'READ WRITE';
|
|
88
|
+
|
|
89
|
+
const client = await this.#pool.connect();
|
|
90
|
+
try {
|
|
91
|
+
await client.query(/*sql*/ `BEGIN ${isolationLevel} ${accessMode}`);
|
|
92
|
+
const result = await func(client);
|
|
93
|
+
await client.query(/*sql*/ `COMMIT`);
|
|
94
|
+
return result;
|
|
95
|
+
} catch (error) {
|
|
96
|
+
await client.query(/*sql*/ `ROLLBACK`);
|
|
97
|
+
throw error;
|
|
98
|
+
} finally {
|
|
99
|
+
client.release();
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
async invokeTransactionFunction<This, Args extends unknown[], Return>(
|
|
104
|
+
config: NodePostgresTransactionOptions | undefined,
|
|
105
|
+
target: This,
|
|
106
|
+
func: (this: This, ...args: Args) => Promise<Return>,
|
|
107
|
+
...args: Args
|
|
108
|
+
): Promise<Return> {
|
|
109
|
+
const workflowID = DBOS.workflowID;
|
|
110
|
+
if (workflowID === undefined) {
|
|
111
|
+
throw new Error('Workflow ID is not set.');
|
|
112
|
+
}
|
|
113
|
+
const functionNum = DBOS.stepID;
|
|
114
|
+
if (functionNum === undefined) {
|
|
115
|
+
throw new Error('Function Number is not set.');
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const readOnly = config?.readOnly ?? false;
|
|
119
|
+
let retryWaitMS = 1;
|
|
120
|
+
const backoffFactor = 1.5;
|
|
121
|
+
const maxRetryWaitMS = 2000;
|
|
122
|
+
|
|
123
|
+
while (true) {
|
|
124
|
+
try {
|
|
125
|
+
const result = await this.#transaction<Return>(
|
|
126
|
+
async (client) => {
|
|
127
|
+
// Check to see if this tx has already been executed
|
|
128
|
+
const previousResult =
|
|
129
|
+
readOnly || !workflowID ? undefined : await NodePGDSTH.#checkExecution(client, workflowID, functionNum);
|
|
130
|
+
if (previousResult) {
|
|
131
|
+
return (previousResult.output ? SuperJSON.parse(previousResult.output) : null) as Return;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// execute user's transaction function
|
|
135
|
+
const result = await asyncLocalCtx.run({ client }, async () => {
|
|
136
|
+
return (await func.call(target, ...args)) as Return;
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
// save the output of read/write transactions
|
|
140
|
+
if (!readOnly && workflowID) {
|
|
141
|
+
await NodePGDSTH.#recordOutput(client, workflowID, functionNum, SuperJSON.stringify(result));
|
|
142
|
+
|
|
143
|
+
// Note, existing code wraps #recordOutput call in a try/catch block that
|
|
144
|
+
// converts DB error with code 25P02 to DBOSFailedSqlTransactionError.
|
|
145
|
+
// However, existing code doesn't make any logic decisions based on that error type.
|
|
146
|
+
// DBOSFailedSqlTransactionError does stored WF ID and function name, so I assume that info is logged out somewhere
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return result;
|
|
150
|
+
},
|
|
151
|
+
{ isolationLevel: config?.isolationLevel, readOnly: config?.readOnly },
|
|
152
|
+
);
|
|
153
|
+
// TODO: span.setStatus({ code: SpanStatusCode.OK });
|
|
154
|
+
// TODO: this.tracer.endSpan(span);
|
|
155
|
+
|
|
156
|
+
return result;
|
|
157
|
+
} catch (error) {
|
|
158
|
+
if (isPGRetriableTransactionError(error)) {
|
|
159
|
+
// TODO: span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
|
|
160
|
+
await new Promise((resolve) => setTimeout(resolve, retryWaitMS));
|
|
161
|
+
retryWaitMS = Math.min(retryWaitMS * backoffFactor, maxRetryWaitMS);
|
|
162
|
+
continue;
|
|
163
|
+
} else {
|
|
164
|
+
// TODO: span.setStatus({ code: SpanStatusCode.ERROR, message: e.message });
|
|
165
|
+
// TODO: this.tracer.endSpan(span);
|
|
166
|
+
|
|
167
|
+
// TODO: currently, we are *not* recording errors in the txOutput table.
|
|
168
|
+
// For normal execution, this is fine because we also store tx step results (output and error) in the sysdb operation output table.
|
|
169
|
+
// However, I'm concerned that we have a dueling execution hole where one tx fails while another succeeds.
|
|
170
|
+
// This implies that we can end up in a situation where the step output records an error but the txOutput table records success.
|
|
171
|
+
|
|
172
|
+
throw error;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
export class NodePostgresDataSource implements DBOSDataSource<NodePostgresTransactionOptions> {
|
|
180
|
+
static get client(): ClientBase {
|
|
181
|
+
if (!DBOS.isInTransaction()) {
|
|
182
|
+
throw new Error('invalid use of NodePostgresDataSource.client outside of a DBOS transaction.');
|
|
183
|
+
}
|
|
184
|
+
const ctx = asyncLocalCtx.getStore();
|
|
185
|
+
if (!ctx) {
|
|
186
|
+
throw new Error('No async local context found.');
|
|
187
|
+
}
|
|
188
|
+
return ctx.client;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
static async initializeInternalSchema(config: ClientConfig): Promise<void> {
|
|
192
|
+
const client = new Client(config);
|
|
193
|
+
try {
|
|
194
|
+
await client.connect();
|
|
195
|
+
await client.query(createTransactionCompletionSchemaPG);
|
|
196
|
+
await client.query(createTransactionCompletionTablePG);
|
|
197
|
+
} finally {
|
|
198
|
+
await client.end();
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
readonly name: string;
|
|
203
|
+
#provider: NodePGDSTH;
|
|
204
|
+
|
|
205
|
+
constructor(name: string, config: PoolConfig) {
|
|
206
|
+
this.name = name;
|
|
207
|
+
this.#provider = new NodePGDSTH(name, config);
|
|
208
|
+
registerDataSource(this.#provider);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
async runTransaction<T>(callback: () => Promise<T>, funcName: string, config?: NodePostgresTransactionOptions) {
|
|
212
|
+
return await runTransaction(callback, funcName, { dsName: this.name, config });
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
registerTransaction<This, Args extends unknown[], Return>(
|
|
216
|
+
func: (this: This, ...args: Args) => Promise<Return>,
|
|
217
|
+
name: string,
|
|
218
|
+
config?: NodePostgresTransactionOptions,
|
|
219
|
+
): (this: This, ...args: Args) => Promise<Return> {
|
|
220
|
+
return registerTransaction(this.name, func, { name }, config);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
transaction(config?: NodePostgresTransactionOptions) {
|
|
224
|
+
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
225
|
+
const ds = this;
|
|
226
|
+
return function decorator<This, Args extends unknown[], Return>(
|
|
227
|
+
_target: object,
|
|
228
|
+
propertyKey: string,
|
|
229
|
+
descriptor: TypedPropertyDescriptor<(this: This, ...args: Args) => Promise<Return>>,
|
|
230
|
+
) {
|
|
231
|
+
if (!descriptor.value) {
|
|
232
|
+
throw Error('Use of decorator when original method is undefined');
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
descriptor.value = ds.registerTransaction(descriptor.value, propertyKey.toString(), config);
|
|
236
|
+
|
|
237
|
+
return descriptor;
|
|
238
|
+
};
|
|
239
|
+
}
|
|
240
|
+
}
|
package/jest.config.js
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@dbos-inc/node-pg-datasource",
|
|
3
|
+
"version": "3.0.6-preview",
|
|
4
|
+
"description": "",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"repository": {
|
|
7
|
+
"type": "git",
|
|
8
|
+
"url": "https://github.com/dbos-inc/dbos-transact-ts",
|
|
9
|
+
"directory": "packages/knex-datasource"
|
|
10
|
+
},
|
|
11
|
+
"homepage": "https://docs.dbos.dev/",
|
|
12
|
+
"main": "index.js",
|
|
13
|
+
"scripts": {
|
|
14
|
+
"build": "tsc --project tsconfig.json",
|
|
15
|
+
"test": "jest --detectOpenHandles"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"pg": "^8.11.3",
|
|
19
|
+
"pg-protocol": "^1.6.1",
|
|
20
|
+
"superjson": "^1.13"
|
|
21
|
+
},
|
|
22
|
+
"peerDependencies": {
|
|
23
|
+
"@dbos-inc/dbos-sdk": "*"
|
|
24
|
+
},
|
|
25
|
+
"devDependencies": {
|
|
26
|
+
"@types/jest": "^29.5.14",
|
|
27
|
+
"@types/node": "^20.11.25",
|
|
28
|
+
"@types/pg": "^8.11.2",
|
|
29
|
+
"jest": "^29.7.0",
|
|
30
|
+
"typescript": "^5.4.5"
|
|
31
|
+
}
|
|
32
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { Client } from 'pg';
|
|
2
|
+
import { NodePostgresDataSource } from '../index';
|
|
3
|
+
import { dropDB, ensureDB } from './test-helpers';
|
|
4
|
+
|
|
5
|
+
describe('NodePostgresDataSource.configure', () => {
|
|
6
|
+
const config = { user: 'postgres', database: 'nodepg_ds_config_test' };
|
|
7
|
+
|
|
8
|
+
beforeAll(async () => {
|
|
9
|
+
const client = new Client({ ...config, database: 'postgres' });
|
|
10
|
+
try {
|
|
11
|
+
await client.connect();
|
|
12
|
+
await dropDB(client, config.database);
|
|
13
|
+
await ensureDB(client, config.database);
|
|
14
|
+
} finally {
|
|
15
|
+
await client.end();
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
test('configure creates tx outputs table', async () => {
|
|
20
|
+
await NodePostgresDataSource.initializeInternalSchema(config);
|
|
21
|
+
|
|
22
|
+
const client = new Client(config);
|
|
23
|
+
try {
|
|
24
|
+
await client.connect();
|
|
25
|
+
const result = await client.query('SELECT workflow_id, function_num, output FROM dbos.transaction_completion');
|
|
26
|
+
expect(result.rows.length).toBe(0);
|
|
27
|
+
} finally {
|
|
28
|
+
await client.end();
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
});
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import { DBOS } from '@dbos-inc/dbos-sdk';
|
|
2
|
+
import { Client, Pool } from 'pg';
|
|
3
|
+
import { NodePostgresDataSource } from '..';
|
|
4
|
+
import { dropDB, ensureDB } from './test-helpers';
|
|
5
|
+
import { randomUUID } from 'crypto';
|
|
6
|
+
import { SuperJSON } from 'superjson';
|
|
7
|
+
|
|
8
|
+
const config = { user: 'postgres', database: 'nodepg_ds_test_userdb' };
|
|
9
|
+
const dataSource = new NodePostgresDataSource('app-db', config);
|
|
10
|
+
|
|
11
|
+
interface transaction_completion {
|
|
12
|
+
workflow_id: string;
|
|
13
|
+
function_num: number;
|
|
14
|
+
output: string | null;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
describe('NodePostgresDataSource', () => {
|
|
18
|
+
const userDB = new Pool(config);
|
|
19
|
+
|
|
20
|
+
beforeAll(async () => {
|
|
21
|
+
{
|
|
22
|
+
const client = new Client({ ...config, database: 'postgres' });
|
|
23
|
+
try {
|
|
24
|
+
await client.connect();
|
|
25
|
+
await dropDB(client, 'knex_ds_test');
|
|
26
|
+
await dropDB(client, 'knex_ds_test_dbos_sys');
|
|
27
|
+
await dropDB(client, config.database);
|
|
28
|
+
await ensureDB(client, config.database);
|
|
29
|
+
} finally {
|
|
30
|
+
await client.end();
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
{
|
|
35
|
+
const client = await userDB.connect();
|
|
36
|
+
try {
|
|
37
|
+
await client.query(
|
|
38
|
+
'CREATE TABLE greetings(name text NOT NULL, greet_count integer DEFAULT 0, PRIMARY KEY(name))',
|
|
39
|
+
);
|
|
40
|
+
} finally {
|
|
41
|
+
client.release();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
await NodePostgresDataSource.initializeInternalSchema(config);
|
|
46
|
+
DBOS.setConfig({ name: 'knex-ds-test' });
|
|
47
|
+
await DBOS.launch();
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
afterAll(async () => {
|
|
51
|
+
await DBOS.shutdown();
|
|
52
|
+
await userDB.end();
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
test('insert dataSource.register function', async () => {
|
|
56
|
+
const user = 'helloTest1';
|
|
57
|
+
|
|
58
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
59
|
+
const workflowID = randomUUID();
|
|
60
|
+
|
|
61
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regInsertWorfklowReg(user))).resolves.toEqual({
|
|
62
|
+
user,
|
|
63
|
+
greet_count: 1,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
const { rows } = await userDB.query<transaction_completion>(
|
|
67
|
+
'SELECT * FROM dbos.transaction_completion WHERE workflow_id = $1',
|
|
68
|
+
[workflowID],
|
|
69
|
+
);
|
|
70
|
+
expect(rows.length).toBe(1);
|
|
71
|
+
expect(rows[0].workflow_id).toBe(workflowID);
|
|
72
|
+
expect(rows[0].function_num).toBe(0);
|
|
73
|
+
expect(rows[0].output).not.toBeNull();
|
|
74
|
+
expect(SuperJSON.parse(rows[0].output!)).toEqual({ user, greet_count: 1 });
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
test('insert dataSource.runAsTx function', async () => {
|
|
78
|
+
const user = 'helloTest2';
|
|
79
|
+
|
|
80
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
81
|
+
const workflowID = randomUUID();
|
|
82
|
+
|
|
83
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regInsertWorfklowRunTx(user))).resolves.toEqual({
|
|
84
|
+
user,
|
|
85
|
+
greet_count: 1,
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
const { rows } = await userDB.query<transaction_completion>(
|
|
89
|
+
'SELECT * FROM dbos.transaction_completion WHERE workflow_id = $1',
|
|
90
|
+
[workflowID],
|
|
91
|
+
);
|
|
92
|
+
expect(rows.length).toBe(1);
|
|
93
|
+
expect(rows[0].workflow_id).toBe(workflowID);
|
|
94
|
+
expect(rows[0].function_num).toBe(0);
|
|
95
|
+
expect(rows[0].output).not.toBeNull();
|
|
96
|
+
expect(SuperJSON.parse(rows[0].output!)).toEqual({ user, greet_count: 1 });
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
test('error dataSource.register function', async () => {
|
|
100
|
+
const user = 'errorTest1';
|
|
101
|
+
|
|
102
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
103
|
+
await userDB.query('INSERT INTO greetings("name","greet_count") VALUES($1,10);', [user]);
|
|
104
|
+
const workflowID = randomUUID();
|
|
105
|
+
|
|
106
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regErrorWorkflowReg(user))).rejects.toThrow('test error');
|
|
107
|
+
|
|
108
|
+
const { rows: txOutput } = await userDB.query('SELECT * FROM dbos.transaction_completion WHERE workflow_id = $1', [
|
|
109
|
+
workflowID,
|
|
110
|
+
]);
|
|
111
|
+
expect(txOutput.length).toBe(0);
|
|
112
|
+
|
|
113
|
+
const { rows } = await userDB.query<greetings>('SELECT * FROM greetings WHERE name = $1', [user]);
|
|
114
|
+
expect(rows.length).toBe(1);
|
|
115
|
+
expect(rows[0].greet_count).toBe(10);
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
test('error dataSource.runAsTx function', async () => {
|
|
119
|
+
const user = 'errorTest2';
|
|
120
|
+
|
|
121
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
122
|
+
await userDB.query('INSERT INTO greetings("name","greet_count") VALUES($1,10);', [user]);
|
|
123
|
+
const workflowID = randomUUID();
|
|
124
|
+
|
|
125
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regErrorWorkflowRunTx(user))).rejects.toThrow('test error');
|
|
126
|
+
|
|
127
|
+
const { rows: txOutput } = await userDB.query<transaction_completion>(
|
|
128
|
+
'SELECT * FROM dbos.transaction_completion WHERE workflow_id = $1',
|
|
129
|
+
[workflowID],
|
|
130
|
+
);
|
|
131
|
+
expect(txOutput.length).toBe(0);
|
|
132
|
+
|
|
133
|
+
const { rows } = await userDB.query<greetings>('SELECT * FROM greetings WHERE name = $1', [user]);
|
|
134
|
+
expect(rows.length).toBe(1);
|
|
135
|
+
expect(rows[0].greet_count).toBe(10);
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
test('readonly dataSource.register function', async () => {
|
|
139
|
+
const user = 'readTest1';
|
|
140
|
+
|
|
141
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
142
|
+
await userDB.query('INSERT INTO greetings("name","greet_count") VALUES($1,10);', [user]);
|
|
143
|
+
|
|
144
|
+
const workflowID = randomUUID();
|
|
145
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regReadWorkflowReg(user))).resolves.toEqual({
|
|
146
|
+
user,
|
|
147
|
+
greet_count: 10,
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
const { rows } = await userDB.query<transaction_completion>(
|
|
151
|
+
'SELECT * FROM dbos.transaction_completion WHERE workflow_id = $1',
|
|
152
|
+
[workflowID],
|
|
153
|
+
);
|
|
154
|
+
expect(rows.length).toBe(0);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
test('readonly dataSource.runAsTx function', async () => {
|
|
158
|
+
const user = 'readTest2';
|
|
159
|
+
|
|
160
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
161
|
+
await userDB.query('INSERT INTO greetings("name","greet_count") VALUES($1,10);', [user]);
|
|
162
|
+
|
|
163
|
+
const workflowID = randomUUID();
|
|
164
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regReadWorkflowRunTx(user))).resolves.toEqual({
|
|
165
|
+
user,
|
|
166
|
+
greet_count: 10,
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const { rows } = await userDB.query<transaction_completion>(
|
|
170
|
+
'SELECT * FROM dbos.transaction_completion WHERE workflow_id = $1',
|
|
171
|
+
[workflowID],
|
|
172
|
+
);
|
|
173
|
+
expect(rows.length).toBe(0);
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
test('static dataSource.register methods', async () => {
|
|
177
|
+
const user = 'staticTest1';
|
|
178
|
+
|
|
179
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
180
|
+
|
|
181
|
+
const workflowID = randomUUID();
|
|
182
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regStaticWorkflow(user))).resolves.toEqual([
|
|
183
|
+
{ user, greet_count: 1 },
|
|
184
|
+
{ user, greet_count: 1 },
|
|
185
|
+
]);
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
test('instance dataSource.register methods', async () => {
|
|
189
|
+
const user = 'instanceTest1';
|
|
190
|
+
|
|
191
|
+
await userDB.query('DELETE FROM greetings WHERE name = $1', [user]);
|
|
192
|
+
|
|
193
|
+
const workflowID = randomUUID();
|
|
194
|
+
await expect(DBOS.withNextWorkflowID(workflowID, () => regInstanceWorkflow(user))).resolves.toEqual([
|
|
195
|
+
{ user, greet_count: 1 },
|
|
196
|
+
{ user, greet_count: 1 },
|
|
197
|
+
]);
|
|
198
|
+
});
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
export interface greetings {
|
|
202
|
+
name: string;
|
|
203
|
+
greet_count: number;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
async function insertFunction(user: string) {
|
|
207
|
+
const { rows } = await NodePostgresDataSource.client.query<Pick<greetings, 'greet_count'>>(
|
|
208
|
+
`
|
|
209
|
+
INSERT INTO greetings(name, greet_count)
|
|
210
|
+
VALUES($1, 1)
|
|
211
|
+
ON CONFLICT(name)
|
|
212
|
+
DO UPDATE SET greet_count = greetings.greet_count + 1
|
|
213
|
+
RETURNING greet_count`,
|
|
214
|
+
[user],
|
|
215
|
+
);
|
|
216
|
+
const row = rows.length > 0 ? rows[0] : undefined;
|
|
217
|
+
return { user, greet_count: row?.greet_count };
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
async function errorFunction(user: string) {
|
|
221
|
+
const result = await insertFunction(user);
|
|
222
|
+
throw new Error('test error');
|
|
223
|
+
return result;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
async function readFunction(user: string) {
|
|
227
|
+
const { rows } = await NodePostgresDataSource.client.query<Pick<greetings, 'greet_count'>>(
|
|
228
|
+
`
|
|
229
|
+
SELECT greet_count
|
|
230
|
+
FROM greetings
|
|
231
|
+
WHERE name = $1`,
|
|
232
|
+
[user],
|
|
233
|
+
);
|
|
234
|
+
const row = rows.length > 0 ? rows[0] : undefined;
|
|
235
|
+
return { user, greet_count: row?.greet_count };
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
const regInsertFunction = dataSource.registerTransaction(insertFunction, 'insertFunction');
|
|
239
|
+
const regErrorFunction = dataSource.registerTransaction(errorFunction, 'errorFunction');
|
|
240
|
+
const regReadFunction = dataSource.registerTransaction(readFunction, 'readFunction', { readOnly: true });
|
|
241
|
+
|
|
242
|
+
class StaticClass {
|
|
243
|
+
static async insertFunction(user: string) {
|
|
244
|
+
return await insertFunction(user);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
static async readFunction(user: string) {
|
|
248
|
+
return await readFunction(user);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
StaticClass.insertFunction = dataSource.registerTransaction(StaticClass.insertFunction, 'insertFunction');
|
|
253
|
+
StaticClass.readFunction = dataSource.registerTransaction(StaticClass.readFunction, 'readFunction');
|
|
254
|
+
|
|
255
|
+
class InstanceClass {
|
|
256
|
+
async insertFunction(user: string) {
|
|
257
|
+
return await insertFunction(user);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
async readFunction(user: string) {
|
|
261
|
+
return await readFunction(user);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
InstanceClass.prototype.insertFunction = dataSource.registerTransaction(
|
|
266
|
+
// eslint-disable-next-line @typescript-eslint/unbound-method
|
|
267
|
+
InstanceClass.prototype.insertFunction,
|
|
268
|
+
'insertFunction',
|
|
269
|
+
);
|
|
270
|
+
InstanceClass.prototype.readFunction = dataSource.registerTransaction(
|
|
271
|
+
// eslint-disable-next-line @typescript-eslint/unbound-method
|
|
272
|
+
InstanceClass.prototype.readFunction,
|
|
273
|
+
'readFunction',
|
|
274
|
+
);
|
|
275
|
+
|
|
276
|
+
async function insertWorkflowReg(user: string) {
|
|
277
|
+
return await regInsertFunction(user);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
async function insertWorkflowRunTx(user: string) {
|
|
281
|
+
return await dataSource.runTransaction(() => insertFunction(user), 'insertFunction');
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
async function errorWorkflowReg(user: string) {
|
|
285
|
+
return await regErrorFunction(user);
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
async function errorWorkflowRunTx(user: string) {
|
|
289
|
+
return await dataSource.runTransaction(() => errorFunction(user), 'errorFunction');
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
async function readWorkflowReg(user: string) {
|
|
293
|
+
return await regReadFunction(user);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
async function readWorkflowRunTx(user: string) {
|
|
297
|
+
return await dataSource.runTransaction(() => readFunction(user), 'readFunction', { readOnly: true });
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
async function staticWorkflow(user: string) {
|
|
301
|
+
const result = await StaticClass.insertFunction(user);
|
|
302
|
+
const readResult = await StaticClass.readFunction(user);
|
|
303
|
+
return [result, readResult];
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
async function instanceWorkflow(user: string) {
|
|
307
|
+
const instance = new InstanceClass();
|
|
308
|
+
const result = await instance.insertFunction(user);
|
|
309
|
+
const readResult = await instance.readFunction(user);
|
|
310
|
+
return [result, readResult];
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
const regInsertWorfklowReg = DBOS.registerWorkflow(insertWorkflowReg, 'insertWorkflowReg');
|
|
314
|
+
const regInsertWorfklowRunTx = DBOS.registerWorkflow(insertWorkflowRunTx, 'insertWorkflowRunTx');
|
|
315
|
+
const regErrorWorkflowReg = DBOS.registerWorkflow(errorWorkflowReg, 'errorWorkflowReg');
|
|
316
|
+
const regErrorWorkflowRunTx = DBOS.registerWorkflow(errorWorkflowRunTx, 'errorWorkflowRunTx');
|
|
317
|
+
const regReadWorkflowReg = DBOS.registerWorkflow(readWorkflowReg, 'readWorkflowReg');
|
|
318
|
+
const regReadWorkflowRunTx = DBOS.registerWorkflow(readWorkflowRunTx, 'readWorkflowRunTx');
|
|
319
|
+
const regStaticWorkflow = DBOS.registerWorkflow(staticWorkflow, 'staticWorkflow');
|
|
320
|
+
const regInstanceWorkflow = DBOS.registerWorkflow(instanceWorkflow, 'instanceWorkflow');
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Client } from 'pg';
|
|
2
|
+
|
|
3
|
+
export async function ensureDB(client: Client, name: string) {
|
|
4
|
+
const results = await client.query('SELECT 1 FROM pg_database WHERE datname = $1', [name]);
|
|
5
|
+
if (results.rows.length === 0) {
|
|
6
|
+
await client.query(`CREATE DATABASE ${name}`);
|
|
7
|
+
}
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export async function dropDB(client: Client, name: string) {
|
|
11
|
+
await client.query(`DROP DATABASE IF EXISTS ${name}`);
|
|
12
|
+
}
|