@sonamu-kit/tasks 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.swcrc +17 -0
- package/README.md +7 -0
- package/dist/backend.d.ts +107 -0
- package/dist/backend.d.ts.map +1 -0
- package/dist/backend.js +3 -0
- package/dist/backend.js.map +1 -0
- package/dist/chaos.test.d.ts +2 -0
- package/dist/chaos.test.d.ts.map +1 -0
- package/dist/chaos.test.js +92 -0
- package/dist/chaos.test.js.map +1 -0
- package/dist/client.d.ts +178 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +223 -0
- package/dist/client.js.map +1 -0
- package/dist/client.test.d.ts +2 -0
- package/dist/client.test.d.ts.map +1 -0
- package/dist/client.test.js +339 -0
- package/dist/client.test.js.map +1 -0
- package/dist/config.d.ts +22 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +23 -0
- package/dist/config.js.map +1 -0
- package/dist/config.test.d.ts +2 -0
- package/dist/config.test.d.ts.map +1 -0
- package/dist/config.test.js +24 -0
- package/dist/config.test.js.map +1 -0
- package/dist/core/duration.d.ts +22 -0
- package/dist/core/duration.d.ts.map +1 -0
- package/dist/core/duration.js +64 -0
- package/dist/core/duration.js.map +1 -0
- package/dist/core/duration.test.d.ts +2 -0
- package/dist/core/duration.test.d.ts.map +1 -0
- package/dist/core/duration.test.js +265 -0
- package/dist/core/duration.test.js.map +1 -0
- package/dist/core/error.d.ts +15 -0
- package/dist/core/error.d.ts.map +1 -0
- package/dist/core/error.js +25 -0
- package/dist/core/error.js.map +1 -0
- package/dist/core/error.test.d.ts +2 -0
- package/dist/core/error.test.d.ts.map +1 -0
- package/dist/core/error.test.js +63 -0
- package/dist/core/error.test.js.map +1 -0
- package/dist/core/json.d.ts +5 -0
- package/dist/core/json.d.ts.map +1 -0
- package/dist/core/json.js +3 -0
- package/dist/core/json.js.map +1 -0
- package/dist/core/result.d.ts +22 -0
- package/dist/core/result.d.ts.map +1 -0
- package/dist/core/result.js +22 -0
- package/dist/core/result.js.map +1 -0
- package/dist/core/result.test.d.ts +2 -0
- package/dist/core/result.test.d.ts.map +1 -0
- package/dist/core/result.test.js +19 -0
- package/dist/core/result.test.js.map +1 -0
- package/dist/core/retry.d.ts +21 -0
- package/dist/core/retry.d.ts.map +1 -0
- package/dist/core/retry.js +25 -0
- package/dist/core/retry.js.map +1 -0
- package/dist/core/retry.test.d.ts +2 -0
- package/dist/core/retry.test.d.ts.map +1 -0
- package/dist/core/retry.test.js +37 -0
- package/dist/core/retry.test.js.map +1 -0
- package/dist/core/schema.d.ts +57 -0
- package/dist/core/schema.d.ts.map +1 -0
- package/dist/core/schema.js +4 -0
- package/dist/core/schema.js.map +1 -0
- package/dist/core/step.d.ts +96 -0
- package/dist/core/step.d.ts.map +1 -0
- package/dist/core/step.js +78 -0
- package/dist/core/step.js.map +1 -0
- package/dist/core/step.test.d.ts +2 -0
- package/dist/core/step.test.d.ts.map +1 -0
- package/dist/core/step.test.js +356 -0
- package/dist/core/step.test.js.map +1 -0
- package/dist/core/workflow.d.ts +78 -0
- package/dist/core/workflow.d.ts.map +1 -0
- package/dist/core/workflow.js +46 -0
- package/dist/core/workflow.js.map +1 -0
- package/dist/core/workflow.test.d.ts +2 -0
- package/dist/core/workflow.test.d.ts.map +1 -0
- package/dist/core/workflow.test.js +172 -0
- package/dist/core/workflow.test.js.map +1 -0
- package/dist/database/backend.d.ts +60 -0
- package/dist/database/backend.d.ts.map +1 -0
- package/dist/database/backend.js +387 -0
- package/dist/database/backend.js.map +1 -0
- package/dist/database/backend.test.d.ts +2 -0
- package/dist/database/backend.test.d.ts.map +1 -0
- package/dist/database/backend.test.js +17 -0
- package/dist/database/backend.test.js.map +1 -0
- package/dist/database/backend.testsuite.d.ts +20 -0
- package/dist/database/backend.testsuite.d.ts.map +1 -0
- package/dist/database/backend.testsuite.js +1174 -0
- package/dist/database/backend.testsuite.js.map +1 -0
- package/dist/database/base.d.ts +12 -0
- package/dist/database/base.d.ts.map +1 -0
- package/dist/database/base.js +19 -0
- package/dist/database/base.js.map +1 -0
- package/dist/database/migrations/20251212000000_0_init.js +9 -0
- package/dist/database/migrations/20251212000000_0_init.js.map +1 -0
- package/dist/database/migrations/20251212000000_1_tables.js +88 -0
- package/dist/database/migrations/20251212000000_1_tables.js.map +1 -0
- package/dist/database/migrations/20251212000000_2_fk.js +48 -0
- package/dist/database/migrations/20251212000000_2_fk.js.map +1 -0
- package/dist/database/migrations/20251212000000_3_indexes.js +107 -0
- package/dist/database/migrations/20251212000000_3_indexes.js.map +1 -0
- package/dist/database/pubsub.d.ts +17 -0
- package/dist/database/pubsub.d.ts.map +1 -0
- package/dist/database/pubsub.js +70 -0
- package/dist/database/pubsub.js.map +1 -0
- package/dist/database/pubsub.test.d.ts +2 -0
- package/dist/database/pubsub.test.d.ts.map +1 -0
- package/dist/database/pubsub.test.js +86 -0
- package/dist/database/pubsub.test.js.map +1 -0
- package/dist/errors.d.ts +8 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +21 -0
- package/dist/errors.js.map +1 -0
- package/dist/execution.d.ts +82 -0
- package/dist/execution.d.ts.map +1 -0
- package/dist/execution.js +182 -0
- package/dist/execution.js.map +1 -0
- package/dist/execution.test.d.ts +2 -0
- package/dist/execution.test.d.ts.map +1 -0
- package/dist/execution.test.js +556 -0
- package/dist/execution.test.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +6 -0
- package/dist/index.js.map +1 -0
- package/dist/internal.d.ts +12 -0
- package/dist/internal.d.ts.map +1 -0
- package/dist/internal.js +5 -0
- package/dist/internal.js.map +1 -0
- package/dist/practices/01-remote-workflow.d.ts +2 -0
- package/dist/practices/01-remote-workflow.d.ts.map +1 -0
- package/dist/practices/01-remote-workflow.js +69 -0
- package/dist/practices/01-remote-workflow.js.map +1 -0
- package/dist/practices/01-remote.d.ts +2 -0
- package/dist/practices/01-remote.d.ts.map +1 -0
- package/dist/practices/01-remote.js +87 -0
- package/dist/practices/01-remote.js.map +1 -0
- package/dist/practices/02-local.d.ts +2 -0
- package/dist/practices/02-local.d.ts.map +1 -0
- package/dist/practices/02-local.js +84 -0
- package/dist/practices/02-local.js.map +1 -0
- package/dist/practices/03-local-retry.d.ts +2 -0
- package/dist/practices/03-local-retry.d.ts.map +1 -0
- package/dist/practices/03-local-retry.js +85 -0
- package/dist/practices/03-local-retry.js.map +1 -0
- package/dist/practices/04-scheduler-dispose.d.ts +2 -0
- package/dist/practices/04-scheduler-dispose.d.ts.map +1 -0
- package/dist/practices/04-scheduler-dispose.js +65 -0
- package/dist/practices/04-scheduler-dispose.js.map +1 -0
- package/dist/practices/05-router.d.ts +2 -0
- package/dist/practices/05-router.d.ts.map +1 -0
- package/dist/practices/05-router.js +80 -0
- package/dist/practices/05-router.js.map +1 -0
- package/dist/registry.d.ts +33 -0
- package/dist/registry.d.ts.map +1 -0
- package/dist/registry.js +54 -0
- package/dist/registry.js.map +1 -0
- package/dist/registry.test.d.ts +2 -0
- package/dist/registry.test.d.ts.map +1 -0
- package/dist/registry.test.js +95 -0
- package/dist/registry.test.js.map +1 -0
- package/dist/scheduler.d.ts +22 -0
- package/dist/scheduler.d.ts.map +1 -0
- package/dist/scheduler.js +117 -0
- package/dist/scheduler.js.map +1 -0
- package/dist/tasks/index.d.ts +4 -0
- package/dist/tasks/index.d.ts.map +1 -0
- package/dist/tasks/index.js +5 -0
- package/dist/tasks/index.js.map +1 -0
- package/dist/tasks/local-task.d.ts +6 -0
- package/dist/tasks/local-task.d.ts.map +1 -0
- package/dist/tasks/local-task.js +95 -0
- package/dist/tasks/local-task.js.map +1 -0
- package/dist/tasks/remote-task.d.ts +11 -0
- package/dist/tasks/remote-task.d.ts.map +1 -0
- package/dist/tasks/remote-task.js +213 -0
- package/dist/tasks/remote-task.js.map +1 -0
- package/dist/tasks/shared.d.ts +8 -0
- package/dist/tasks/shared.d.ts.map +1 -0
- package/dist/tasks/shared.js +41 -0
- package/dist/tasks/shared.js.map +1 -0
- package/dist/testing/connection.d.ts +7 -0
- package/dist/testing/connection.d.ts.map +1 -0
- package/dist/testing/connection.js +38 -0
- package/dist/testing/connection.js.map +1 -0
- package/dist/types/config.d.ts +44 -0
- package/dist/types/config.d.ts.map +1 -0
- package/dist/types/config.js +3 -0
- package/dist/types/config.js.map +1 -0
- package/dist/types/context.d.ts +18 -0
- package/dist/types/context.d.ts.map +1 -0
- package/dist/types/context.js +4 -0
- package/dist/types/context.js.map +1 -0
- package/dist/types/events.d.ts +43 -0
- package/dist/types/events.d.ts.map +1 -0
- package/dist/types/events.js +3 -0
- package/dist/types/events.js.map +1 -0
- package/dist/types/index.d.ts +6 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +3 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/task-items.d.ts +12 -0
- package/dist/types/task-items.d.ts.map +1 -0
- package/dist/types/task-items.js +3 -0
- package/dist/types/task-items.js.map +1 -0
- package/dist/types/utils.d.ts +4 -0
- package/dist/types/utils.d.ts.map +1 -0
- package/dist/types/utils.js +8 -0
- package/dist/types/utils.js.map +1 -0
- package/dist/worker.d.ts +61 -0
- package/dist/worker.d.ts.map +1 -0
- package/dist/worker.js +206 -0
- package/dist/worker.js.map +1 -0
- package/dist/worker.test.d.ts +2 -0
- package/dist/worker.test.d.ts.map +1 -0
- package/dist/worker.test.js +1163 -0
- package/dist/worker.test.js.map +1 -0
- package/dist/workflow.d.ts +44 -0
- package/dist/workflow.d.ts.map +1 -0
- package/dist/workflow.js +21 -0
- package/dist/workflow.js.map +1 -0
- package/dist/workflow.test.d.ts +2 -0
- package/dist/workflow.test.d.ts.map +1 -0
- package/dist/workflow.test.js +73 -0
- package/dist/workflow.test.js.map +1 -0
- package/nodemon.json +6 -0
- package/package.json +63 -0
- package/scripts/migrate.ts +11 -0
- package/src/backend.ts +133 -0
- package/src/chaos.test.ts +108 -0
- package/src/client.test.ts +297 -0
- package/src/client.ts +331 -0
- package/src/config.test.ts +23 -0
- package/src/config.ts +35 -0
- package/src/core/duration.test.ts +326 -0
- package/src/core/duration.ts +86 -0
- package/src/core/error.test.ts +77 -0
- package/src/core/error.ts +30 -0
- package/src/core/json.ts +2 -0
- package/src/core/result.test.ts +13 -0
- package/src/core/result.ts +29 -0
- package/src/core/retry.test.ts +41 -0
- package/src/core/retry.ts +29 -0
- package/src/core/schema.ts +74 -0
- package/src/core/step.test.ts +362 -0
- package/src/core/step.ts +152 -0
- package/src/core/workflow.test.ts +184 -0
- package/src/core/workflow.ts +127 -0
- package/src/database/backend.test.ts +16 -0
- package/src/database/backend.testsuite.ts +1376 -0
- package/src/database/backend.ts +655 -0
- package/src/database/base.ts +23 -0
- package/src/database/migrations/20251212000000_0_init.ts +10 -0
- package/src/database/migrations/20251212000000_1_tables.ts +54 -0
- package/src/database/migrations/20251212000000_2_fk.ts +46 -0
- package/src/database/migrations/20251212000000_3_indexes.ts +82 -0
- package/src/database/pubsub.test.ts +92 -0
- package/src/database/pubsub.ts +92 -0
- package/src/execution.test.ts +508 -0
- package/src/execution.ts +291 -0
- package/src/index.ts +7 -0
- package/src/internal.ts +11 -0
- package/src/practices/01-remote-workflow.ts +61 -0
- package/src/registry.test.ts +122 -0
- package/src/registry.ts +65 -0
- package/src/testing/connection.ts +44 -0
- package/src/worker.test.ts +1138 -0
- package/src/worker.ts +281 -0
- package/src/workflow.test.ts +68 -0
- package/src/workflow.ts +84 -0
- package/table_ddl.sql +60 -0
- package/templates/openworkflow.config.ts +22 -0
- package/tsconfig.json +40 -0
- package/tsconfig.test.json +4 -0
- package/vite.config.ts +13 -0
|
@@ -0,0 +1,655 @@
|
|
|
1
|
+
import { camelize } from "inflection";
|
|
2
|
+
import knex, { type Knex } from "knex";
|
|
3
|
+
import {
|
|
4
|
+
type Backend,
|
|
5
|
+
type CancelWorkflowRunParams,
|
|
6
|
+
type ClaimWorkflowRunParams,
|
|
7
|
+
type CompleteStepAttemptParams,
|
|
8
|
+
type CompleteWorkflowRunParams,
|
|
9
|
+
type CreateStepAttemptParams,
|
|
10
|
+
type CreateWorkflowRunParams,
|
|
11
|
+
DEFAULT_NAMESPACE_ID,
|
|
12
|
+
type ExtendWorkflowRunLeaseParams,
|
|
13
|
+
type FailStepAttemptParams,
|
|
14
|
+
type FailWorkflowRunParams,
|
|
15
|
+
type GetStepAttemptParams,
|
|
16
|
+
type GetWorkflowRunParams,
|
|
17
|
+
type ListStepAttemptsParams,
|
|
18
|
+
type ListWorkflowRunsParams,
|
|
19
|
+
type PaginatedResponse,
|
|
20
|
+
type SleepWorkflowRunParams,
|
|
21
|
+
} from "../backend";
|
|
22
|
+
import { DEFAULT_RETRY_POLICY } from "../core/retry";
|
|
23
|
+
import type { StepAttempt } from "../core/step";
|
|
24
|
+
import type { WorkflowRun } from "../core/workflow";
|
|
25
|
+
import { DEFAULT_SCHEMA, migrate } from "./base";
|
|
26
|
+
import { type OnSubscribed, PostgresPubSub } from "./pubsub";
|
|
27
|
+
|
|
28
|
+
export const DEFAULT_LISTEN_CHANNEL = "new_tasks" as const;
|
|
29
|
+
const DEFAULT_PAGINATION_PAGE_SIZE = 100 as const;
|
|
30
|
+
|
|
31
|
+
interface BackendPostgresOptions {
|
|
32
|
+
namespaceId?: string;
|
|
33
|
+
runMigrations?: boolean;
|
|
34
|
+
|
|
35
|
+
// default: true
|
|
36
|
+
usePubSub?: boolean;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Manages a connection to a Postgres database for workflow operations.
|
|
41
|
+
*/
|
|
42
|
+
export class BackendPostgres implements Backend {
|
|
43
|
+
private knex: Knex;
|
|
44
|
+
private namespaceId: string;
|
|
45
|
+
private usePubSub: boolean;
|
|
46
|
+
private pubsub: PostgresPubSub | null = null;
|
|
47
|
+
|
|
48
|
+
private constructor(knex: Knex, namespaceId: string, usePubSub: boolean) {
|
|
49
|
+
this.knex = knex;
|
|
50
|
+
this.namespaceId = namespaceId;
|
|
51
|
+
this.usePubSub = usePubSub;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async subscribe(callback: OnSubscribed) {
|
|
55
|
+
if (!this.usePubSub) {
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (!this.pubsub) {
|
|
60
|
+
this.pubsub = await PostgresPubSub.create(this.knex);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
this.pubsub.listenEvent(DEFAULT_LISTEN_CHANNEL, callback);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async publish(payload?: string): Promise<void> {
|
|
67
|
+
if (!this.usePubSub) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
await this.knex.raw(
|
|
72
|
+
payload
|
|
73
|
+
? `NOTIFY ${DEFAULT_LISTEN_CHANNEL}, '${payload}'`
|
|
74
|
+
: `NOTIFY ${DEFAULT_LISTEN_CHANNEL}`,
|
|
75
|
+
);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Create and initialize a new BackendPostgres instance. This will
|
|
80
|
+
* automatically run migrations on startup unless `runMigrations` is set to
|
|
81
|
+
* false.
|
|
82
|
+
*/
|
|
83
|
+
static async connect(
|
|
84
|
+
dbConf: Knex.Config,
|
|
85
|
+
options?: BackendPostgresOptions,
|
|
86
|
+
): Promise<BackendPostgres> {
|
|
87
|
+
const postProcessResponse: Knex.Config["postProcessResponse"] = (result, _queryContext) => {
|
|
88
|
+
if (result === null || result === undefined) {
|
|
89
|
+
return result;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (dbConf?.postProcessResponse) {
|
|
93
|
+
result = dbConf.postProcessResponse(result, _queryContext);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const camelizeRow = (row: Record<string, unknown>) =>
|
|
97
|
+
Object.fromEntries(Object.entries(row).map(([key, value]) => [camelize(key, true), value]));
|
|
98
|
+
|
|
99
|
+
if (Array.isArray(result)) {
|
|
100
|
+
return result.map(camelizeRow);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return camelizeRow(result);
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
const { namespaceId, runMigrations, usePubSub } = {
|
|
107
|
+
namespaceId: DEFAULT_NAMESPACE_ID,
|
|
108
|
+
runMigrations: true,
|
|
109
|
+
usePubSub: true,
|
|
110
|
+
...options,
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
const knexInstance = knex({ ...dbConf, postProcessResponse });
|
|
114
|
+
if (runMigrations) {
|
|
115
|
+
await migrate(knexInstance, DEFAULT_SCHEMA);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
return new BackendPostgres(knexInstance, namespaceId, usePubSub);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async stop(): Promise<void> {
|
|
122
|
+
await this.pubsub?.destroy();
|
|
123
|
+
this.pubsub = null;
|
|
124
|
+
await this.knex.destroy();
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
async createWorkflowRun(params: CreateWorkflowRunParams): Promise<WorkflowRun> {
|
|
128
|
+
const qb = this.knex
|
|
129
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
130
|
+
.table("workflow_runs")
|
|
131
|
+
.insert({
|
|
132
|
+
namespace_id: this.namespaceId,
|
|
133
|
+
id: crypto.randomUUID(),
|
|
134
|
+
workflow_name: params.workflowName,
|
|
135
|
+
version: params.version,
|
|
136
|
+
status: "pending",
|
|
137
|
+
idempotency_key: params.idempotencyKey,
|
|
138
|
+
config: params.config,
|
|
139
|
+
context: params.context,
|
|
140
|
+
input: params.input,
|
|
141
|
+
attempts: 0,
|
|
142
|
+
available_at: params.availableAt ?? this.knex.fn.now(),
|
|
143
|
+
deadline_at: params.deadlineAt,
|
|
144
|
+
created_at: this.knex.fn.now(),
|
|
145
|
+
updated_at: this.knex.fn.now(),
|
|
146
|
+
})
|
|
147
|
+
.returning("*");
|
|
148
|
+
|
|
149
|
+
const workflowRun = await qb;
|
|
150
|
+
if (!workflowRun[0]) {
|
|
151
|
+
throw new Error("Failed to create workflow run");
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return workflowRun[0];
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async getWorkflowRun(params: GetWorkflowRunParams): Promise<WorkflowRun | null> {
|
|
158
|
+
const workflowRun = await this.knex
|
|
159
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
160
|
+
.table("workflow_runs")
|
|
161
|
+
.where("namespace_id", this.namespaceId)
|
|
162
|
+
.where("id", params.workflowRunId)
|
|
163
|
+
.select(
|
|
164
|
+
"namespace_id",
|
|
165
|
+
"id",
|
|
166
|
+
"workflow_name",
|
|
167
|
+
"version",
|
|
168
|
+
"status",
|
|
169
|
+
"idempotency_key",
|
|
170
|
+
"config",
|
|
171
|
+
"context",
|
|
172
|
+
"input",
|
|
173
|
+
"output",
|
|
174
|
+
"error",
|
|
175
|
+
"attempts",
|
|
176
|
+
"parent_step_attempt_namespace_id",
|
|
177
|
+
"parent_step_attempt_id",
|
|
178
|
+
"worker_id",
|
|
179
|
+
"available_at",
|
|
180
|
+
"deadline_at",
|
|
181
|
+
"started_at",
|
|
182
|
+
"finished_at",
|
|
183
|
+
"created_at",
|
|
184
|
+
"updated_at",
|
|
185
|
+
)
|
|
186
|
+
.first();
|
|
187
|
+
|
|
188
|
+
return workflowRun ?? null;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
async listWorkflowRuns(params: ListWorkflowRunsParams): Promise<PaginatedResponse<WorkflowRun>> {
|
|
192
|
+
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
193
|
+
const { after, before } = params;
|
|
194
|
+
|
|
195
|
+
let cursor: Cursor | null = null;
|
|
196
|
+
if (after) {
|
|
197
|
+
cursor = decodeCursor(after);
|
|
198
|
+
} else if (before) {
|
|
199
|
+
cursor = decodeCursor(before);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const qb = this.buildListWorkflowRunsWhere(params, cursor);
|
|
203
|
+
const rows = await qb
|
|
204
|
+
.orderBy("created_at", before ? "desc" : "asc")
|
|
205
|
+
.orderBy("id", before ? "desc" : "asc")
|
|
206
|
+
.limit(limit + 1);
|
|
207
|
+
|
|
208
|
+
return this.processPaginationResults(
|
|
209
|
+
rows,
|
|
210
|
+
limit,
|
|
211
|
+
typeof after === "string",
|
|
212
|
+
typeof before === "string",
|
|
213
|
+
);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
private buildListWorkflowRunsWhere(params: ListWorkflowRunsParams, cursor: Cursor | null) {
|
|
217
|
+
const { after } = params;
|
|
218
|
+
const qb = this.knex
|
|
219
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
220
|
+
.table("workflow_runs")
|
|
221
|
+
.where("namespace_id", this.namespaceId);
|
|
222
|
+
|
|
223
|
+
if (cursor) {
|
|
224
|
+
const operator = after ? ">" : "<";
|
|
225
|
+
return qb.whereRaw(`("created_at", "id") ${operator} (?, ?)`, [
|
|
226
|
+
cursor.createdAt.toISOString(),
|
|
227
|
+
cursor.id,
|
|
228
|
+
]);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
return qb;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async claimWorkflowRun(params: ClaimWorkflowRunParams): Promise<WorkflowRun | null> {
|
|
235
|
+
const claimed = await this.knex
|
|
236
|
+
.with("expired", (qb) =>
|
|
237
|
+
qb
|
|
238
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
239
|
+
.table("workflow_runs")
|
|
240
|
+
.update({
|
|
241
|
+
status: "failed",
|
|
242
|
+
error: JSON.stringify({ message: "Workflow run deadline exceeded" }),
|
|
243
|
+
worker_id: null,
|
|
244
|
+
available_at: null,
|
|
245
|
+
finished_at: this.knex.raw("NOW()"),
|
|
246
|
+
updated_at: this.knex.raw("NOW()"),
|
|
247
|
+
})
|
|
248
|
+
.where("namespace_id", this.namespaceId)
|
|
249
|
+
.whereIn("status", ["pending", "running", "sleeping"])
|
|
250
|
+
.whereNotNull("deadline_at")
|
|
251
|
+
.where("deadline_at", "<=", this.knex.raw("NOW()"))
|
|
252
|
+
.returning("id"),
|
|
253
|
+
)
|
|
254
|
+
.with("candidate", (qb) =>
|
|
255
|
+
qb
|
|
256
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
257
|
+
.select("id")
|
|
258
|
+
.from("workflow_runs")
|
|
259
|
+
.where("namespace_id", this.namespaceId)
|
|
260
|
+
.whereIn("status", ["pending", "running", "sleeping"])
|
|
261
|
+
.where("available_at", "<=", this.knex.raw("NOW()"))
|
|
262
|
+
.where((qb2) => {
|
|
263
|
+
qb2.whereNull("deadline_at").orWhere("deadline_at", ">", this.knex.raw("NOW()"));
|
|
264
|
+
})
|
|
265
|
+
.orderByRaw("CASE WHEN status = 'pending' THEN 0 ELSE 1 END")
|
|
266
|
+
.orderBy("available_at", "asc")
|
|
267
|
+
.orderBy("created_at", "asc")
|
|
268
|
+
.limit(1)
|
|
269
|
+
.forUpdate()
|
|
270
|
+
.skipLocked(),
|
|
271
|
+
)
|
|
272
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
273
|
+
.table("workflow_runs as wr")
|
|
274
|
+
.where("wr.namespace_id", this.namespaceId)
|
|
275
|
+
.where("wr.id", this.knex.ref("candidate.id"))
|
|
276
|
+
.update({
|
|
277
|
+
status: "running",
|
|
278
|
+
attempts: this.knex.raw("wr.attempts + 1"),
|
|
279
|
+
worker_id: params.workerId,
|
|
280
|
+
available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),
|
|
281
|
+
started_at: this.knex.raw("COALESCE(wr.started_at, NOW())"),
|
|
282
|
+
updated_at: this.knex.raw("NOW()"),
|
|
283
|
+
})
|
|
284
|
+
.updateFrom("candidate")
|
|
285
|
+
.returning("wr.*");
|
|
286
|
+
|
|
287
|
+
return claimed[0] ?? null;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
async extendWorkflowRunLease(params: ExtendWorkflowRunLeaseParams): Promise<WorkflowRun> {
|
|
291
|
+
const [updated] = await this.knex
|
|
292
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
293
|
+
.table("workflow_runs")
|
|
294
|
+
.where("namespace_id", this.namespaceId)
|
|
295
|
+
.where("id", params.workflowRunId)
|
|
296
|
+
.where("status", "running")
|
|
297
|
+
.where("worker_id", params.workerId)
|
|
298
|
+
.update({
|
|
299
|
+
available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),
|
|
300
|
+
updated_at: this.knex.fn.now(),
|
|
301
|
+
})
|
|
302
|
+
.returning("*");
|
|
303
|
+
|
|
304
|
+
if (!updated) {
|
|
305
|
+
throw new Error("Failed to extend lease for workflow run");
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
return updated;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
async sleepWorkflowRun(params: SleepWorkflowRunParams): Promise<WorkflowRun> {
|
|
312
|
+
// 'succeeded' status is deprecated
|
|
313
|
+
const [updated] = await this.knex
|
|
314
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
315
|
+
.table("workflow_runs")
|
|
316
|
+
.where("namespace_id", this.namespaceId)
|
|
317
|
+
.where("id", params.workflowRunId)
|
|
318
|
+
.whereNotIn("status", ["succeeded", "completed", "failed", "canceled"])
|
|
319
|
+
.where("worker_id", params.workerId)
|
|
320
|
+
.update({
|
|
321
|
+
status: "sleeping",
|
|
322
|
+
available_at: params.availableAt,
|
|
323
|
+
worker_id: null,
|
|
324
|
+
updated_at: this.knex.fn.now(),
|
|
325
|
+
})
|
|
326
|
+
.returning("*");
|
|
327
|
+
|
|
328
|
+
if (!updated) {
|
|
329
|
+
throw new Error("Failed to sleep workflow run");
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
return updated;
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
async completeWorkflowRun(params: CompleteWorkflowRunParams): Promise<WorkflowRun> {
|
|
336
|
+
const [updated] = await this.knex
|
|
337
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
338
|
+
.table("workflow_runs")
|
|
339
|
+
.where("namespace_id", this.namespaceId)
|
|
340
|
+
.where("id", params.workflowRunId)
|
|
341
|
+
.where("status", "running")
|
|
342
|
+
.where("worker_id", params.workerId)
|
|
343
|
+
.update({
|
|
344
|
+
status: "completed",
|
|
345
|
+
output: JSON.stringify(params.output),
|
|
346
|
+
error: null,
|
|
347
|
+
worker_id: params.workerId,
|
|
348
|
+
available_at: null,
|
|
349
|
+
finished_at: this.knex.fn.now(),
|
|
350
|
+
updated_at: this.knex.fn.now(),
|
|
351
|
+
})
|
|
352
|
+
.returning("*");
|
|
353
|
+
|
|
354
|
+
if (!updated) {
|
|
355
|
+
throw new Error("Failed to complete workflow run");
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
return updated;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
async failWorkflowRun(params: FailWorkflowRunParams): Promise<WorkflowRun> {
|
|
362
|
+
const { workflowRunId, error } = params;
|
|
363
|
+
const { initialIntervalMs, backoffCoefficient, maximumIntervalMs } = DEFAULT_RETRY_POLICY;
|
|
364
|
+
|
|
365
|
+
// this beefy query updates a workflow's status, available_at, and
|
|
366
|
+
// finished_at based on the workflow's deadline and retry policy
|
|
367
|
+
//
|
|
368
|
+
// if the next retry would exceed the deadline, the run is marked as
|
|
369
|
+
// 'failed' and finalized, otherwise, the run is rescheduled with an updated
|
|
370
|
+
// 'available_at' timestamp for the next retry
|
|
371
|
+
const retryIntervalExpr = `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, "attempts" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;
|
|
372
|
+
const deadlineExceededCondition = `"deadline_at" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= "deadline_at"`;
|
|
373
|
+
|
|
374
|
+
const [updated] = await this.knex
|
|
375
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
376
|
+
.table("workflow_runs")
|
|
377
|
+
.where("namespace_id", this.namespaceId)
|
|
378
|
+
.where("id", workflowRunId)
|
|
379
|
+
.where("status", "running")
|
|
380
|
+
.where("worker_id", params.workerId)
|
|
381
|
+
.update({
|
|
382
|
+
status: this.knex.raw(
|
|
383
|
+
`CASE WHEN ${deadlineExceededCondition} THEN 'failed' ELSE 'pending' END`,
|
|
384
|
+
),
|
|
385
|
+
available_at: this.knex.raw(
|
|
386
|
+
`CASE WHEN ${deadlineExceededCondition} THEN NULL ELSE NOW() + (${retryIntervalExpr}) END`,
|
|
387
|
+
),
|
|
388
|
+
finished_at: this.knex.raw(
|
|
389
|
+
`CASE WHEN ${deadlineExceededCondition} THEN NOW() ELSE NULL END`,
|
|
390
|
+
),
|
|
391
|
+
error: JSON.stringify(error),
|
|
392
|
+
worker_id: null,
|
|
393
|
+
started_at: null,
|
|
394
|
+
updated_at: this.knex.fn.now(),
|
|
395
|
+
})
|
|
396
|
+
.returning("*");
|
|
397
|
+
|
|
398
|
+
if (!updated) {
|
|
399
|
+
throw new Error("Failed to mark workflow run failed");
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
return updated;
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
async cancelWorkflowRun(params: CancelWorkflowRunParams): Promise<WorkflowRun> {
|
|
406
|
+
const [updated] = await this.knex
|
|
407
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
408
|
+
.table("workflow_runs")
|
|
409
|
+
.where("namespace_id", this.namespaceId)
|
|
410
|
+
.where("id", params.workflowRunId)
|
|
411
|
+
.whereIn("status", ["pending", "running", "sleeping"])
|
|
412
|
+
.update({
|
|
413
|
+
status: "canceled",
|
|
414
|
+
worker_id: null,
|
|
415
|
+
available_at: null,
|
|
416
|
+
finished_at: this.knex.fn.now(),
|
|
417
|
+
updated_at: this.knex.fn.now(),
|
|
418
|
+
})
|
|
419
|
+
.returning("*");
|
|
420
|
+
|
|
421
|
+
if (!updated) {
|
|
422
|
+
// workflow may already be in a terminal state
|
|
423
|
+
const existing = await this.getWorkflowRun({
|
|
424
|
+
workflowRunId: params.workflowRunId,
|
|
425
|
+
});
|
|
426
|
+
if (!existing) {
|
|
427
|
+
throw new Error(`Workflow run ${params.workflowRunId} does not exist`);
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
// if already canceled, just return it
|
|
431
|
+
if (existing.status === "canceled") {
|
|
432
|
+
return existing;
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
// throw error for completed/failed workflows
|
|
436
|
+
// 'succeeded' status is deprecated
|
|
437
|
+
if (["succeeded", "completed", "failed"].includes(existing.status)) {
|
|
438
|
+
throw new Error(
|
|
439
|
+
`Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`,
|
|
440
|
+
);
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
throw new Error("Failed to cancel workflow run");
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
return updated;
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
async createStepAttempt(params: CreateStepAttemptParams): Promise<StepAttempt> {
|
|
450
|
+
const [stepAttempt] = await this.knex
|
|
451
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
452
|
+
.table("step_attempts")
|
|
453
|
+
.insert({
|
|
454
|
+
namespace_id: this.namespaceId,
|
|
455
|
+
id: crypto.randomUUID(),
|
|
456
|
+
workflow_run_id: params.workflowRunId,
|
|
457
|
+
step_name: params.stepName,
|
|
458
|
+
kind: params.kind,
|
|
459
|
+
status: "running",
|
|
460
|
+
config: JSON.stringify(params.config),
|
|
461
|
+
context: JSON.stringify(params.context),
|
|
462
|
+
started_at: this.knex.fn.now(),
|
|
463
|
+
created_at: this.knex.raw("date_trunc('milliseconds', NOW())"),
|
|
464
|
+
updated_at: this.knex.fn.now(),
|
|
465
|
+
})
|
|
466
|
+
.returning("*");
|
|
467
|
+
|
|
468
|
+
if (!stepAttempt) {
|
|
469
|
+
throw new Error("Failed to create step attempt");
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
return stepAttempt;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
async getStepAttempt(params: GetStepAttemptParams): Promise<StepAttempt | null> {
|
|
476
|
+
const stepAttempt = await this.knex
|
|
477
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
478
|
+
.table("step_attempts")
|
|
479
|
+
.where("namespace_id", this.namespaceId)
|
|
480
|
+
.where("id", params.stepAttemptId)
|
|
481
|
+
.first();
|
|
482
|
+
|
|
483
|
+
return stepAttempt ?? null;
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
async listStepAttempts(params: ListStepAttemptsParams): Promise<PaginatedResponse<StepAttempt>> {
|
|
487
|
+
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
488
|
+
const { after, before } = params;
|
|
489
|
+
|
|
490
|
+
let cursor: Cursor | null = null;
|
|
491
|
+
if (after) {
|
|
492
|
+
cursor = decodeCursor(after);
|
|
493
|
+
} else if (before) {
|
|
494
|
+
cursor = decodeCursor(before);
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
const qb = this.buildListStepAttemptsWhere(params, cursor);
|
|
498
|
+
const rows = await qb
|
|
499
|
+
.orderBy("created_at", before ? "desc" : "asc")
|
|
500
|
+
.orderBy("id", before ? "desc" : "asc")
|
|
501
|
+
.limit(limit + 1);
|
|
502
|
+
|
|
503
|
+
return this.processPaginationResults(
|
|
504
|
+
rows,
|
|
505
|
+
limit,
|
|
506
|
+
typeof after === "string",
|
|
507
|
+
typeof before === "string",
|
|
508
|
+
);
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
private buildListStepAttemptsWhere(params: ListStepAttemptsParams, cursor: Cursor | null) {
|
|
512
|
+
const { after } = params;
|
|
513
|
+
const qb = this.knex
|
|
514
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
515
|
+
.table("step_attempts")
|
|
516
|
+
.where("namespace_id", this.namespaceId)
|
|
517
|
+
.where("workflow_run_id", params.workflowRunId);
|
|
518
|
+
|
|
519
|
+
if (cursor) {
|
|
520
|
+
const operator = after ? ">" : "<";
|
|
521
|
+
return qb.whereRaw(`("created_at", "id") ${operator} (?, ?)`, [
|
|
522
|
+
cursor.createdAt.toISOString(),
|
|
523
|
+
cursor.id,
|
|
524
|
+
]);
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
return qb;
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
private processPaginationResults<T extends Cursor>(
|
|
531
|
+
rows: T[],
|
|
532
|
+
limit: number,
|
|
533
|
+
hasAfter: boolean,
|
|
534
|
+
hasBefore: boolean,
|
|
535
|
+
): PaginatedResponse<T> {
|
|
536
|
+
const data = rows;
|
|
537
|
+
let hasNext = false;
|
|
538
|
+
let hasPrev = false;
|
|
539
|
+
|
|
540
|
+
if (hasBefore) {
|
|
541
|
+
data.reverse();
|
|
542
|
+
if (data.length > limit) {
|
|
543
|
+
hasPrev = true;
|
|
544
|
+
data.shift();
|
|
545
|
+
}
|
|
546
|
+
hasNext = true;
|
|
547
|
+
} else {
|
|
548
|
+
if (data.length > limit) {
|
|
549
|
+
hasNext = true;
|
|
550
|
+
data.pop();
|
|
551
|
+
}
|
|
552
|
+
if (hasAfter) {
|
|
553
|
+
hasPrev = true;
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
const lastItem = data.at(-1);
|
|
558
|
+
const nextCursor = hasNext && lastItem ? encodeCursor(lastItem) : null;
|
|
559
|
+
const firstItem = data[0];
|
|
560
|
+
const prevCursor = hasPrev && firstItem ? encodeCursor(firstItem) : null;
|
|
561
|
+
|
|
562
|
+
return {
|
|
563
|
+
data,
|
|
564
|
+
pagination: {
|
|
565
|
+
next: nextCursor,
|
|
566
|
+
prev: prevCursor,
|
|
567
|
+
},
|
|
568
|
+
};
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
async completeStepAttempt(params: CompleteStepAttemptParams): Promise<StepAttempt> {
|
|
572
|
+
const [updated] = await this.knex
|
|
573
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
574
|
+
.table("step_attempts as sa")
|
|
575
|
+
.update({
|
|
576
|
+
status: "completed",
|
|
577
|
+
output: JSON.stringify(params.output),
|
|
578
|
+
error: null,
|
|
579
|
+
finished_at: this.knex.fn.now(),
|
|
580
|
+
updated_at: this.knex.fn.now(),
|
|
581
|
+
})
|
|
582
|
+
.updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`)
|
|
583
|
+
.where("sa.namespace_id", this.namespaceId)
|
|
584
|
+
.where("sa.workflow_run_id", params.workflowRunId)
|
|
585
|
+
.where("sa.id", params.stepAttemptId)
|
|
586
|
+
.where("sa.status", "running")
|
|
587
|
+
.where("wr.namespace_id", this.knex.ref("sa.namespace_id"))
|
|
588
|
+
.where("wr.id", this.knex.ref("sa.workflow_run_id"))
|
|
589
|
+
.where("wr.status", "running")
|
|
590
|
+
.where("wr.worker_id", params.workerId)
|
|
591
|
+
.returning("sa.*");
|
|
592
|
+
|
|
593
|
+
if (!updated) {
|
|
594
|
+
throw new Error("Failed to mark step attempt completed");
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
return updated;
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
async failStepAttempt(params: FailStepAttemptParams): Promise<StepAttempt> {
|
|
601
|
+
const [updated] = await this.knex
|
|
602
|
+
.withSchema(DEFAULT_SCHEMA)
|
|
603
|
+
.table("step_attempts as sa")
|
|
604
|
+
.update({
|
|
605
|
+
status: "failed",
|
|
606
|
+
output: null,
|
|
607
|
+
error: JSON.stringify(params.error),
|
|
608
|
+
finished_at: this.knex.fn.now(),
|
|
609
|
+
updated_at: this.knex.fn.now(),
|
|
610
|
+
})
|
|
611
|
+
.updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`)
|
|
612
|
+
.where("sa.namespace_id", this.namespaceId)
|
|
613
|
+
.where("sa.workflow_run_id", params.workflowRunId)
|
|
614
|
+
.where("sa.id", params.stepAttemptId)
|
|
615
|
+
.where("sa.status", "running")
|
|
616
|
+
.where("wr.namespace_id", this.knex.ref("sa.namespace_id"))
|
|
617
|
+
.where("wr.id", this.knex.ref("sa.workflow_run_id"))
|
|
618
|
+
.where("wr.status", "running")
|
|
619
|
+
.where("wr.worker_id", params.workerId)
|
|
620
|
+
.returning("sa.*");
|
|
621
|
+
|
|
622
|
+
if (!updated) {
|
|
623
|
+
throw new Error("Failed to mark step attempt failed");
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
return updated;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
/**
|
|
631
|
+
* Cursor used for pagination. Requires created_at and id fields. Because JS
|
|
632
|
+
* Date does not natively support microsecond precision dates, created_at should
|
|
633
|
+
* be stored with millisecond precision in paginated tables to avoid issues with
|
|
634
|
+
* cursor comparisons.
|
|
635
|
+
*/
|
|
636
|
+
interface Cursor {
|
|
637
|
+
createdAt: Date;
|
|
638
|
+
id: string;
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
function encodeCursor(item: Cursor): string {
|
|
642
|
+
const encoded = Buffer.from(
|
|
643
|
+
JSON.stringify({ createdAt: item.createdAt.toISOString(), id: item.id }),
|
|
644
|
+
).toString("base64");
|
|
645
|
+
return encoded;
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
export function decodeCursor(cursor: string): Cursor {
|
|
649
|
+
const decoded = Buffer.from(cursor, "base64").toString("utf8");
|
|
650
|
+
const parsed = JSON.parse(decoded) as { createdAt: string; id: string };
|
|
651
|
+
return {
|
|
652
|
+
createdAt: new Date(parsed.createdAt),
|
|
653
|
+
id: parsed.id,
|
|
654
|
+
};
|
|
655
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import type { Knex } from "knex";
|
|
3
|
+
|
|
4
|
+
export const DEFAULT_SCHEMA = "sonamu_tasks";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* migrate applies pending migrations to the database. Does nothing if the
|
|
8
|
+
* database is already up to date.
|
|
9
|
+
*/
|
|
10
|
+
export async function migrate(knex: Knex, schema: string) {
|
|
11
|
+
await knex.schema.createSchemaIfNotExists(schema);
|
|
12
|
+
await knex.migrate.latest({
|
|
13
|
+
directory: path.join(import.meta.dirname, "migrations"),
|
|
14
|
+
schemaName: schema,
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* dropSchema drops the specified schema from the database.
|
|
20
|
+
*/
|
|
21
|
+
export async function dropSchema(knex: Knex, schema: string) {
|
|
22
|
+
await knex.schema.dropSchemaIfExists(schema, true);
|
|
23
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Knex } from "knex";
|
|
2
|
+
import { DEFAULT_SCHEMA } from "../base";
|
|
3
|
+
|
|
4
|
+
export async function up(knex: Knex): Promise<void> {
|
|
5
|
+
await knex.schema.createSchemaIfNotExists(DEFAULT_SCHEMA);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export async function down(knex: Knex): Promise<void> {
|
|
9
|
+
await knex.schema.dropSchemaIfExists(DEFAULT_SCHEMA, true);
|
|
10
|
+
}
|