@dbos-inc/dbos-sdk 3.6.5-preview → 3.6.8-preview
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/adminserver.d.ts +26 -40
- package/dist/src/adminserver.d.ts.map +1 -1
- package/dist/src/adminserver.js +381 -276
- package/dist/src/adminserver.js.map +1 -1
- package/dist/src/cli/cli.d.ts.map +1 -1
- package/dist/src/cli/cli.js +41 -13
- package/dist/src/cli/cli.js.map +1 -1
- package/dist/src/config.d.ts +0 -5
- package/dist/src/config.d.ts.map +1 -1
- package/dist/src/config.js +1 -3
- package/dist/src/config.js.map +1 -1
- package/dist/src/context.d.ts +0 -1
- package/dist/src/context.d.ts.map +1 -1
- package/dist/src/datasource.d.ts.map +1 -1
- package/dist/src/datasource.js +7 -7
- package/dist/src/datasource.js.map +1 -1
- package/dist/src/dbos-executor.d.ts +15 -2
- package/dist/src/dbos-executor.d.ts.map +1 -1
- package/dist/src/dbos-executor.js +10 -11
- package/dist/src/dbos-executor.js.map +1 -1
- package/dist/src/dbos.d.ts +2 -17
- package/dist/src/dbos.d.ts.map +1 -1
- package/dist/src/dbos.js +2 -42
- package/dist/src/dbos.js.map +1 -1
- package/dist/src/system_database.d.ts +0 -2
- package/dist/src/system_database.d.ts.map +1 -1
- package/dist/src/system_database.js +147 -121
- package/dist/src/system_database.js.map +1 -1
- package/dist/src/telemetry/collector.d.ts +1 -4
- package/dist/src/telemetry/collector.d.ts.map +1 -1
- package/dist/src/telemetry/collector.js.map +1 -1
- package/dist/src/telemetry/exporters.d.ts +3 -7
- package/dist/src/telemetry/exporters.d.ts.map +1 -1
- package/dist/src/telemetry/exporters.js +27 -10
- package/dist/src/telemetry/exporters.js.map +1 -1
- package/dist/src/telemetry/logs.d.ts +5 -16
- package/dist/src/telemetry/logs.d.ts.map +1 -1
- package/dist/src/telemetry/logs.js +44 -15
- package/dist/src/telemetry/logs.js.map +1 -1
- package/dist/src/telemetry/traces.d.ts +42 -6
- package/dist/src/telemetry/traces.d.ts.map +1 -1
- package/dist/src/telemetry/traces.js +95 -31
- package/dist/src/telemetry/traces.js.map +1 -1
- package/dist/src/utils.d.ts +2 -0
- package/dist/src/utils.d.ts.map +1 -1
- package/dist/src/utils.js +7 -1
- package/dist/src/utils.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +2 -24
- package/dist/src/telemetry/index.d.ts +0 -12
- package/dist/src/telemetry/index.d.ts.map +0 -1
- package/dist/src/telemetry/index.js +0 -14
- package/dist/src/telemetry/index.js.map +0 -1
@@ -8,9 +8,6 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key,
|
|
8
8
|
var __metadata = (this && this.__metadata) || function (k, v) {
|
9
9
|
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
10
10
|
};
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
13
|
-
};
|
14
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
15
12
|
exports.PostgresSystemDatabase = exports.ensureSystemDatabase = exports.grantDbosSchemaPermissions = exports.DBOS_STREAM_CLOSED_SENTINEL = exports.DBOS_FUNCNAME_CLOSESTREAM = exports.DBOS_FUNCNAME_WRITESTREAM = exports.DBOS_FUNCNAME_GETSTATUS = exports.DBOS_FUNCNAME_SLEEP = exports.DBOS_FUNCNAME_GETEVENT = exports.DBOS_FUNCNAME_SETEVENT = exports.DBOS_FUNCNAME_RECV = exports.DBOS_FUNCNAME_SEND = void 0;
|
16
13
|
const dbos_executor_1 = require("./dbos-executor");
|
@@ -18,7 +15,6 @@ const pg_1 = require("pg");
|
|
18
15
|
const error_1 = require("./error");
|
19
16
|
const workflow_1 = require("./workflow");
|
20
17
|
const utils_1 = require("./utils");
|
21
|
-
const knex_1 = __importDefault(require("knex"));
|
22
18
|
const crypto_1 = require("crypto");
|
23
19
|
const utils_2 = require("./utils");
|
24
20
|
const database_utils_1 = require("./database_utils");
|
@@ -451,8 +447,6 @@ class PostgresSystemDatabase {
|
|
451
447
|
sysDbPoolSize;
|
452
448
|
pool;
|
453
449
|
systemPoolConfig;
|
454
|
-
// TODO: remove Knex connection in favor of just using Pool
|
455
|
-
knexDB;
|
456
450
|
/*
|
457
451
|
* Generally, notifications are asynchronous. One should:
|
458
452
|
* Subscribe to updates
|
@@ -498,15 +492,6 @@ class PostgresSystemDatabase {
|
|
498
492
|
this.logger.warn(`Unexpected error in idle client: ${err}`);
|
499
493
|
});
|
500
494
|
});
|
501
|
-
const knexConfig = {
|
502
|
-
client: 'pg',
|
503
|
-
connection: this.systemPoolConfig,
|
504
|
-
pool: {
|
505
|
-
min: 0,
|
506
|
-
max: this.sysDbPoolSize,
|
507
|
-
},
|
508
|
-
};
|
509
|
-
this.knexDB = (0, knex_1.default)(knexConfig);
|
510
495
|
}
|
511
496
|
async init(debugMode = false) {
|
512
497
|
await ensureSystemDatabase(this.systemDatabaseUrl, this.logger, debugMode);
|
@@ -515,7 +500,6 @@ class PostgresSystemDatabase {
|
|
515
500
|
}
|
516
501
|
}
|
517
502
|
async destroy() {
|
518
|
-
await this.knexDB.destroy();
|
519
503
|
if (this.reconnectTimeout) {
|
520
504
|
clearTimeout(this.reconnectTimeout);
|
521
505
|
}
|
@@ -1343,41 +1327,65 @@ class PostgresSystemDatabase {
|
|
1343
1327
|
selectColumns.push('output', 'error');
|
1344
1328
|
}
|
1345
1329
|
input.sortDesc = input.sortDesc ?? false; // By default, sort in ascending order
|
1346
|
-
|
1347
|
-
|
1348
|
-
|
1330
|
+
// Build WHERE clauses
|
1331
|
+
const whereClauses = [];
|
1332
|
+
const params = [];
|
1333
|
+
let paramCounter = 1;
|
1349
1334
|
if (input.workflowName) {
|
1350
|
-
|
1335
|
+
whereClauses.push(`name = $${paramCounter}`);
|
1336
|
+
params.push(input.workflowName);
|
1337
|
+
paramCounter++;
|
1351
1338
|
}
|
1352
1339
|
if (input.workflow_id_prefix) {
|
1353
|
-
|
1340
|
+
whereClauses.push(`workflow_uuid LIKE $${paramCounter}`);
|
1341
|
+
params.push(`${input.workflow_id_prefix}%`);
|
1342
|
+
paramCounter++;
|
1354
1343
|
}
|
1355
1344
|
if (input.workflowIDs) {
|
1356
|
-
|
1345
|
+
const placeholders = input.workflowIDs.map((_, i) => `$${paramCounter + i}`).join(', ');
|
1346
|
+
whereClauses.push(`workflow_uuid IN (${placeholders})`);
|
1347
|
+
params.push(...input.workflowIDs);
|
1348
|
+
paramCounter += input.workflowIDs.length;
|
1357
1349
|
}
|
1358
1350
|
if (input.authenticatedUser) {
|
1359
|
-
|
1351
|
+
whereClauses.push(`authenticated_user = $${paramCounter}`);
|
1352
|
+
params.push(input.authenticatedUser);
|
1353
|
+
paramCounter++;
|
1360
1354
|
}
|
1361
1355
|
if (input.startTime) {
|
1362
|
-
|
1356
|
+
whereClauses.push(`created_at >= $${paramCounter}`);
|
1357
|
+
params.push(new Date(input.startTime).getTime());
|
1358
|
+
paramCounter++;
|
1363
1359
|
}
|
1364
1360
|
if (input.endTime) {
|
1365
|
-
|
1361
|
+
whereClauses.push(`created_at <= $${paramCounter}`);
|
1362
|
+
params.push(new Date(input.endTime).getTime());
|
1363
|
+
paramCounter++;
|
1366
1364
|
}
|
1367
1365
|
if (input.status) {
|
1368
|
-
|
1366
|
+
whereClauses.push(`status = $${paramCounter}`);
|
1367
|
+
params.push(input.status);
|
1368
|
+
paramCounter++;
|
1369
1369
|
}
|
1370
1370
|
if (input.applicationVersion) {
|
1371
|
-
|
1372
|
-
|
1373
|
-
|
1374
|
-
|
1375
|
-
}
|
1376
|
-
|
1377
|
-
|
1378
|
-
}
|
1379
|
-
const
|
1380
|
-
|
1371
|
+
whereClauses.push(`application_version = $${paramCounter}`);
|
1372
|
+
params.push(input.applicationVersion);
|
1373
|
+
paramCounter++;
|
1374
|
+
}
|
1375
|
+
const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(' AND ')}` : '';
|
1376
|
+
const orderClause = `ORDER BY created_at ${input.sortDesc ? 'DESC' : 'ASC'}`;
|
1377
|
+
const limitClause = input.limit ? `LIMIT ${input.limit}` : '';
|
1378
|
+
const offsetClause = input.offset ? `OFFSET ${input.offset}` : '';
|
1379
|
+
const query = `
|
1380
|
+
SELECT ${selectColumns.join(', ')}
|
1381
|
+
FROM ${schemaName}.workflow_status
|
1382
|
+
${whereClause}
|
1383
|
+
${orderClause}
|
1384
|
+
${limitClause}
|
1385
|
+
${offsetClause}
|
1386
|
+
`;
|
1387
|
+
const result = await this.pool.query(query, params);
|
1388
|
+
return result.rows.map(mapWorkflowStatus);
|
1381
1389
|
}
|
1382
1390
|
async listQueuedWorkflows(input) {
|
1383
1391
|
const schemaName = dbos_executor_1.DBOSExecutor.systemDBSchemaName;
|
@@ -1405,34 +1413,54 @@ class PostgresSystemDatabase {
|
|
1405
1413
|
selectColumns.push('inputs', 'request');
|
1406
1414
|
}
|
1407
1415
|
const sortDesc = input.sortDesc ?? false; // By default, sort in ascending order
|
1408
|
-
|
1409
|
-
|
1410
|
-
|
1411
|
-
|
1412
|
-
|
1416
|
+
// Build WHERE clauses
|
1417
|
+
const whereClauses = [];
|
1418
|
+
const params = [];
|
1419
|
+
let paramCounter = 1;
|
1420
|
+
// Always filter for queued workflows
|
1421
|
+
whereClauses.push(`queue_name IS NOT NULL`);
|
1422
|
+
whereClauses.push(`status IN ($${paramCounter}, $${paramCounter + 1})`);
|
1423
|
+
params.push(workflow_1.StatusString.ENQUEUED, workflow_1.StatusString.PENDING);
|
1424
|
+
paramCounter += 2;
|
1413
1425
|
if (input.workflowName) {
|
1414
|
-
|
1426
|
+
whereClauses.push(`name = $${paramCounter}`);
|
1427
|
+
params.push(input.workflowName);
|
1428
|
+
paramCounter++;
|
1415
1429
|
}
|
1416
1430
|
if (input.queueName) {
|
1417
|
-
|
1431
|
+
whereClauses.push(`queue_name = $${paramCounter}`);
|
1432
|
+
params.push(input.queueName);
|
1433
|
+
paramCounter++;
|
1418
1434
|
}
|
1419
1435
|
if (input.startTime) {
|
1420
|
-
|
1436
|
+
whereClauses.push(`created_at >= $${paramCounter}`);
|
1437
|
+
params.push(new Date(input.startTime).getTime());
|
1438
|
+
paramCounter++;
|
1421
1439
|
}
|
1422
1440
|
if (input.endTime) {
|
1423
|
-
|
1441
|
+
whereClauses.push(`created_at <= $${paramCounter}`);
|
1442
|
+
params.push(new Date(input.endTime).getTime());
|
1443
|
+
paramCounter++;
|
1424
1444
|
}
|
1425
1445
|
if (input.status) {
|
1426
|
-
|
1427
|
-
|
1428
|
-
|
1429
|
-
|
1430
|
-
}
|
1431
|
-
|
1432
|
-
|
1433
|
-
}
|
1434
|
-
const
|
1435
|
-
|
1446
|
+
whereClauses.push(`status = $${paramCounter}`);
|
1447
|
+
params.push(input.status);
|
1448
|
+
paramCounter++;
|
1449
|
+
}
|
1450
|
+
const whereClause = `WHERE ${whereClauses.join(' AND ')}`;
|
1451
|
+
const orderClause = `ORDER BY created_at ${sortDesc ? 'DESC' : 'ASC'}`;
|
1452
|
+
const limitClause = input.limit ? `LIMIT ${input.limit}` : '';
|
1453
|
+
const offsetClause = input.offset ? `OFFSET ${input.offset}` : '';
|
1454
|
+
const query = `
|
1455
|
+
SELECT ${selectColumns.join(', ')}
|
1456
|
+
FROM ${schemaName}.workflow_status
|
1457
|
+
${whereClause}
|
1458
|
+
${orderClause}
|
1459
|
+
${limitClause}
|
1460
|
+
${offsetClause}
|
1461
|
+
`;
|
1462
|
+
const result = await this.pool.query(query, params);
|
1463
|
+
return result.rows.map(mapWorkflowStatus);
|
1436
1464
|
}
|
1437
1465
|
async clearQueueAssignment(workflowID) {
|
1438
1466
|
// Reset the status of the task from "PENDING" to "ENQUEUED"
|
@@ -1454,19 +1482,20 @@ class PostgresSystemDatabase {
|
|
1454
1482
|
const startTimeMs = Date.now();
|
1455
1483
|
const limiterPeriodMS = queue.rateLimit ? queue.rateLimit.periodSec * 1000 : 0;
|
1456
1484
|
const claimedIDs = [];
|
1457
|
-
await this.
|
1485
|
+
const client = await this.pool.connect();
|
1486
|
+
try {
|
1487
|
+
await client.query('BEGIN ISOLATION LEVEL REPEATABLE READ');
|
1458
1488
|
// If there is a rate limit, compute how many functions have started in its period.
|
1459
1489
|
let numRecentQueries = 0;
|
1460
1490
|
if (queue.rateLimit) {
|
1461
|
-
const
|
1462
|
-
|
1463
|
-
|
1464
|
-
|
1465
|
-
|
1466
|
-
.first()).count;
|
1467
|
-
numRecentQueries = parseInt(`${numRecentQueriesS}`);
|
1491
|
+
const countResult = await client.query(`SELECT COUNT(*) FROM ${dbos_executor_1.DBOSExecutor.systemDBSchemaName}.workflow_status
|
1492
|
+
WHERE queue_name = $1
|
1493
|
+
AND status <> $2
|
1494
|
+
AND started_at_epoch_ms > $3`, [queue.name, workflow_1.StatusString.ENQUEUED, startTimeMs - limiterPeriodMS]);
|
1495
|
+
numRecentQueries = Number(countResult.rows[0].count);
|
1468
1496
|
if (numRecentQueries >= queue.rateLimit.limitPerPeriod) {
|
1469
|
-
|
1497
|
+
await client.query('COMMIT');
|
1498
|
+
return claimedIDs;
|
1470
1499
|
}
|
1471
1500
|
}
|
1472
1501
|
// Dequeue functions eligible for this worker and ordered by the time at which they were enqueued.
|
@@ -1475,15 +1504,12 @@ class PostgresSystemDatabase {
|
|
1475
1504
|
let maxTasks = Infinity;
|
1476
1505
|
if (queue.workerConcurrency !== undefined || queue.concurrency !== undefined) {
|
1477
1506
|
// Count how many workflows on this queue are currently PENDING both locally and globally.
|
1478
|
-
const
|
1479
|
-
|
1480
|
-
|
1481
|
-
|
1482
|
-
.andWhere('status', workflow_1.StatusString.PENDING)
|
1483
|
-
.groupBy('executor_id');
|
1484
|
-
const runningTasksResult = await runningTasksSubquery;
|
1507
|
+
const runningTasksResult = await client.query(`SELECT executor_id, COUNT(*) as task_count
|
1508
|
+
FROM ${dbos_executor_1.DBOSExecutor.systemDBSchemaName}.workflow_status
|
1509
|
+
WHERE queue_name = $1 AND status = $2
|
1510
|
+
GROUP BY executor_id`, [queue.name, workflow_1.StatusString.PENDING]);
|
1485
1511
|
const runningTasksResultDict = {};
|
1486
|
-
runningTasksResult.forEach((row) => {
|
1512
|
+
runningTasksResult.rows.forEach((row) => {
|
1487
1513
|
runningTasksResultDict[row.executor_id] = Number(row.task_count);
|
1488
1514
|
});
|
1489
1515
|
const runningTasksForThisWorker = runningTasksResultDict[executorID] || 0;
|
@@ -1501,36 +1527,29 @@ class PostgresSystemDatabase {
|
|
1501
1527
|
}
|
1502
1528
|
// Return immediately if there are no available tasks due to flow control limits
|
1503
1529
|
if (maxTasks <= 0) {
|
1504
|
-
|
1530
|
+
await client.query('COMMIT');
|
1531
|
+
return claimedIDs;
|
1505
1532
|
}
|
1506
1533
|
// Retrieve the first max_tasks workflows in the queue.
|
1507
1534
|
// Only retrieve workflows of the local version (or without version set)
|
1508
|
-
|
1509
|
-
|
1510
|
-
|
1511
|
-
|
1512
|
-
|
1513
|
-
|
1514
|
-
|
1515
|
-
|
1516
|
-
|
1517
|
-
|
1518
|
-
|
1519
|
-
|
1520
|
-
|
1521
|
-
|
1522
|
-
|
1523
|
-
|
1524
|
-
|
1525
|
-
|
1526
|
-
}
|
1527
|
-
else {
|
1528
|
-
query = query.orderBy('created_at', 'asc');
|
1529
|
-
}
|
1530
|
-
if (maxTasks !== Infinity) {
|
1531
|
-
query = query.limit(maxTasks);
|
1532
|
-
}
|
1533
|
-
const rows = (await query.select(['workflow_uuid']));
|
1535
|
+
const lockMode = queue.concurrency ? 'FOR UPDATE NOWAIT' : 'FOR UPDATE SKIP LOCKED';
|
1536
|
+
const orderClause = queue.priorityEnabled ? 'ORDER BY priority ASC, created_at ASC' : 'ORDER BY created_at ASC';
|
1537
|
+
const limitClause = maxTasks !== Infinity ? `LIMIT ${maxTasks}` : '';
|
1538
|
+
const selectQuery = `
|
1539
|
+
SELECT workflow_uuid
|
1540
|
+
FROM ${dbos_executor_1.DBOSExecutor.systemDBSchemaName}.workflow_status
|
1541
|
+
WHERE status = $1
|
1542
|
+
AND queue_name = $2
|
1543
|
+
AND (application_version IS NULL OR application_version = $3)
|
1544
|
+
${orderClause}
|
1545
|
+
${limitClause}
|
1546
|
+
${lockMode}
|
1547
|
+
`;
|
1548
|
+
const { rows } = await client.query(selectQuery, [
|
1549
|
+
workflow_1.StatusString.ENQUEUED,
|
1550
|
+
queue.name,
|
1551
|
+
appVersion,
|
1552
|
+
]);
|
1534
1553
|
// Start the workflows
|
1535
1554
|
const workflowIDs = rows.map((row) => row.workflow_uuid);
|
1536
1555
|
for (const id of workflowIDs) {
|
@@ -1540,18 +1559,28 @@ class PostgresSystemDatabase {
|
|
1540
1559
|
break;
|
1541
1560
|
}
|
1542
1561
|
// Start the functions by marking them as pending and updating their executor IDs
|
1543
|
-
await
|
1544
|
-
|
1545
|
-
|
1546
|
-
|
1547
|
-
|
1548
|
-
|
1549
|
-
|
1550
|
-
|
1551
|
-
|
1562
|
+
await client.query(`UPDATE ${dbos_executor_1.DBOSExecutor.systemDBSchemaName}.workflow_status
|
1563
|
+
SET status = $1,
|
1564
|
+
executor_id = $2,
|
1565
|
+
application_version = $3,
|
1566
|
+
started_at_epoch_ms = $4,
|
1567
|
+
workflow_deadline_epoch_ms = CASE
|
1568
|
+
WHEN workflow_timeout_ms IS NOT NULL AND workflow_deadline_epoch_ms IS NULL
|
1569
|
+
THEN (EXTRACT(epoch FROM now()) * 1000)::bigint + workflow_timeout_ms
|
1570
|
+
ELSE workflow_deadline_epoch_ms
|
1571
|
+
END
|
1572
|
+
WHERE workflow_uuid = $5`, [workflow_1.StatusString.PENDING, executorID, appVersion, startTimeMs, id]);
|
1552
1573
|
claimedIDs.push(id);
|
1553
1574
|
}
|
1554
|
-
|
1575
|
+
await client.query('COMMIT');
|
1576
|
+
}
|
1577
|
+
catch (error) {
|
1578
|
+
await client.query('ROLLBACK');
|
1579
|
+
throw error;
|
1580
|
+
}
|
1581
|
+
finally {
|
1582
|
+
client.release();
|
1583
|
+
}
|
1555
1584
|
// Return the IDs of all functions we marked started
|
1556
1585
|
return claimedIDs;
|
1557
1586
|
}
|
@@ -1633,14 +1662,12 @@ class PostgresSystemDatabase {
|
|
1633
1662
|
async garbageCollect(cutoffEpochTimestampMs, rowsThreshold) {
|
1634
1663
|
if (rowsThreshold !== undefined) {
|
1635
1664
|
// Get the created_at timestamp of the rows_threshold newest row
|
1636
|
-
const result =
|
1637
|
-
|
1638
|
-
|
1639
|
-
|
1640
|
-
|
1641
|
-
.
|
1642
|
-
if (result !== undefined) {
|
1643
|
-
const rowsBasedCutoff = result.created_at;
|
1665
|
+
const result = await this.pool.query(`SELECT created_at
|
1666
|
+
FROM ${dbos_executor_1.DBOSExecutor.systemDBSchemaName}.workflow_status
|
1667
|
+
ORDER BY created_at DESC
|
1668
|
+
LIMIT 1 OFFSET $1`, [rowsThreshold - 1]);
|
1669
|
+
if (result.rows.length > 0) {
|
1670
|
+
const rowsBasedCutoff = result.rows[0].created_at;
|
1644
1671
|
// Use the more restrictive cutoff (higher timestamp = more recent = more deletion)
|
1645
1672
|
if (cutoffEpochTimestampMs === undefined || rowsBasedCutoff > cutoffEpochTimestampMs) {
|
1646
1673
|
cutoffEpochTimestampMs = rowsBasedCutoff;
|
@@ -1651,10 +1678,9 @@ class PostgresSystemDatabase {
|
|
1651
1678
|
return;
|
1652
1679
|
}
|
1653
1680
|
// Delete all workflows older than cutoff that are NOT PENDING or ENQUEUED
|
1654
|
-
await this.
|
1655
|
-
|
1656
|
-
|
1657
|
-
.del();
|
1681
|
+
await this.pool.query(`DELETE FROM ${dbos_executor_1.DBOSExecutor.systemDBSchemaName}.workflow_status
|
1682
|
+
WHERE created_at < $1
|
1683
|
+
AND status NOT IN ($2, $3)`, [cutoffEpochTimestampMs, workflow_1.StatusString.PENDING, workflow_1.StatusString.ENQUEUED]);
|
1658
1684
|
return;
|
1659
1685
|
}
|
1660
1686
|
}
|