@gencow/core 0.1.22 → 0.1.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/crud.js +1 -1
- package/dist/index.d.ts +6 -1
- package/dist/index.js +3 -0
- package/dist/reactive.js +6 -0
- package/dist/rls-db.d.ts +43 -4
- package/dist/rls-db.js +212 -7
- package/dist/rls.d.ts +1 -1
- package/dist/rls.js +1 -1
- package/dist/scheduler.d.ts +35 -5
- package/dist/scheduler.js +83 -42
- package/dist/workflow-types.d.ts +81 -0
- package/dist/workflow-types.js +12 -0
- package/dist/workflow.d.ts +30 -0
- package/dist/workflow.js +157 -0
- package/dist/workflows-api.d.ts +13 -0
- package/dist/workflows-api.js +328 -0
- package/package.json +1 -1
- package/src/__tests__/crud-owner-rls.test.ts +6 -6
- package/src/__tests__/dist-exports.test.ts +6 -0
- package/src/__tests__/fixtures/basic/migrations/{0000_faithful_silver_sable.sql → 0000_last_warstar.sql} +9 -0
- package/src/__tests__/fixtures/basic/migrations/meta/0000_snapshot.json +60 -1
- package/src/__tests__/fixtures/basic/migrations/meta/_journal.json +2 -2
- package/src/__tests__/fixtures/basic/schema.ts +19 -3
- package/src/__tests__/helpers/basic-rls-fixture.ts +133 -0
- package/src/__tests__/helpers/test-gencow-ctx-rls.ts +1 -1
- package/src/__tests__/reactive.test.ts +161 -0
- package/src/__tests__/rls-crud-basic.test.ts +120 -161
- package/src/__tests__/rls-crud-no-owner-rls-pglite.test.ts +117 -0
- package/src/__tests__/rls-custom-mutation-handlers.test.ts +189 -0
- package/src/__tests__/rls-custom-query-handlers.test.ts +128 -0
- package/src/__tests__/rls-db-leased-connection.test.ts +122 -0
- package/src/__tests__/rls-session-and-policies.test.ts +246 -0
- package/src/__tests__/scheduler-durable-v2.test.ts +270 -0
- package/src/__tests__/scheduler-durable.test.ts +173 -0
- package/src/__tests__/workflow.test.ts +583 -0
- package/src/crud.ts +1 -1
- package/src/index.ts +6 -4
- package/src/reactive.ts +8 -0
- package/src/rls-db.ts +277 -10
- package/src/rls.ts +1 -1
- package/src/scheduler.ts +124 -46
- package/src/workflow-types.ts +111 -0
- package/src/workflow.ts +205 -0
- package/src/workflows-api.ts +425 -0
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export function deriveWorkflowStatus(status, currentStep) {
|
|
2
|
+
if (status !== "pending") {
|
|
3
|
+
return status;
|
|
4
|
+
}
|
|
5
|
+
if (currentStep?.startsWith("sleep#")) {
|
|
6
|
+
return "sleeping";
|
|
7
|
+
}
|
|
8
|
+
if (currentStep?.startsWith("wait:")) {
|
|
9
|
+
return "waiting";
|
|
10
|
+
}
|
|
11
|
+
return "queued";
|
|
12
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import type { MutationDef } from "./reactive.js";
|
|
2
|
+
import type { WorkflowDef, WorkflowDuration, WorkflowOptions, WorkflowStartResult } from "./workflow-types.js";
|
|
3
|
+
declare global {
|
|
4
|
+
var __gencow_workflowRegistry: Map<string, WorkflowDef<any, any>>;
|
|
5
|
+
}
|
|
6
|
+
export declare const DEFAULT_WORKFLOW_MAX_DURATION_MS: number;
|
|
7
|
+
export declare const DEFAULT_WORKFLOW_MAX_RETRIES = 3;
|
|
8
|
+
export declare const WORKFLOW_RESUME_ACTION_PREFIX = "__gencow.workflow.resume";
|
|
9
|
+
export declare const WORKFLOW_REALTIME_KEY_PREFIX = "__gencow.workflow.state";
|
|
10
|
+
type SerializedWorkflowValue = {
|
|
11
|
+
__gencowUndefined: true;
|
|
12
|
+
} | {
|
|
13
|
+
value: unknown;
|
|
14
|
+
};
|
|
15
|
+
export declare function serializeWorkflowValue(value: unknown): SerializedWorkflowValue;
|
|
16
|
+
export declare function deserializeWorkflowValue(value: unknown): unknown;
|
|
17
|
+
export declare function parseWorkflowDurationMs(raw: WorkflowDuration, label?: string): number;
|
|
18
|
+
export declare function getWorkflowResumeActionName(name: string): string;
|
|
19
|
+
export declare function createWorkflowRealtimeToken(): string;
|
|
20
|
+
export declare function getWorkflowRealtimeKey(workflowId: string, realtimeToken: string): string;
|
|
21
|
+
export declare function getWorkflowDef(name: string): WorkflowDef | undefined;
|
|
22
|
+
export declare function getRegisteredWorkflows(): WorkflowDef[];
|
|
23
|
+
/**
|
|
24
|
+
* workflow() — durable multi-step execution with step memoization.
|
|
25
|
+
*
|
|
26
|
+
* The returned value is still a mutation definition, so existing API codegen and
|
|
27
|
+
* frontend hooks keep working without extra workflow-specific tooling.
|
|
28
|
+
*/
|
|
29
|
+
export declare function workflow<TSchema = any, TReturn = any>(name: string, options: WorkflowOptions<TSchema, TReturn>): MutationDef<TSchema, WorkflowStartResult>;
|
|
30
|
+
export {};
|
package/dist/workflow.js
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import { sql } from "drizzle-orm";
|
|
2
|
+
import { mutation } from "./reactive.js";
|
|
3
|
+
import { registerWorkflowsApi } from "./workflows-api.js";
|
|
4
|
+
const workflowRegistry = globalThis.__gencow_workflowRegistry ??= new Map();
|
|
5
|
+
export const DEFAULT_WORKFLOW_MAX_DURATION_MS = 30 * 60 * 1000;
|
|
6
|
+
export const DEFAULT_WORKFLOW_MAX_RETRIES = 3;
|
|
7
|
+
export const WORKFLOW_RESUME_ACTION_PREFIX = "__gencow.workflow.resume";
|
|
8
|
+
export const WORKFLOW_REALTIME_KEY_PREFIX = "__gencow.workflow.state";
|
|
9
|
+
function isSerializedWorkflowValue(value) {
|
|
10
|
+
return !!value && typeof value === "object" && ("__gencowUndefined" in value ||
|
|
11
|
+
"value" in value);
|
|
12
|
+
}
|
|
13
|
+
export function serializeWorkflowValue(value) {
|
|
14
|
+
const payload = value === undefined
|
|
15
|
+
? { __gencowUndefined: true }
|
|
16
|
+
: { value };
|
|
17
|
+
try {
|
|
18
|
+
return JSON.parse(JSON.stringify(payload));
|
|
19
|
+
}
|
|
20
|
+
catch (error) {
|
|
21
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
22
|
+
throw new Error(`workflow() only persists JSON-serializable values. Failed to serialize workflow payload: ${reason}`);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
export function deserializeWorkflowValue(value) {
|
|
26
|
+
if (!isSerializedWorkflowValue(value))
|
|
27
|
+
return value;
|
|
28
|
+
if ("__gencowUndefined" in value)
|
|
29
|
+
return undefined;
|
|
30
|
+
return value.value;
|
|
31
|
+
}
|
|
32
|
+
function clampRetries(retries) {
|
|
33
|
+
if (retries == null)
|
|
34
|
+
return DEFAULT_WORKFLOW_MAX_RETRIES;
|
|
35
|
+
if (!Number.isFinite(retries) || retries < 0) {
|
|
36
|
+
throw new Error(`workflow() retries must be a non-negative finite number, got "${retries}"`);
|
|
37
|
+
}
|
|
38
|
+
return Math.floor(retries);
|
|
39
|
+
}
|
|
40
|
+
function parseDurationString(raw, label) {
|
|
41
|
+
const normalized = raw.trim().toLowerCase();
|
|
42
|
+
const match = normalized.match(/^(\d+)(ms|s|m|h|d)$/);
|
|
43
|
+
if (!match) {
|
|
44
|
+
throw new Error(`${label} must be a number of ms or a string like "30m", "90s", "1h" — got "${raw}"`);
|
|
45
|
+
}
|
|
46
|
+
const value = Number(match[1]);
|
|
47
|
+
const unit = match[2];
|
|
48
|
+
const unitMs = unit === "ms" ? 1 :
|
|
49
|
+
unit === "s" ? 1_000 :
|
|
50
|
+
unit === "m" ? 60_000 :
|
|
51
|
+
unit === "h" ? 3_600_000 :
|
|
52
|
+
86_400_000;
|
|
53
|
+
return value * unitMs;
|
|
54
|
+
}
|
|
55
|
+
export function parseWorkflowDurationMs(raw, label = "workflow duration") {
|
|
56
|
+
if (typeof raw === "number") {
|
|
57
|
+
if (!Number.isFinite(raw) || raw <= 0) {
|
|
58
|
+
throw new Error(`${label} must be a positive finite number, got "${raw}"`);
|
|
59
|
+
}
|
|
60
|
+
return Math.floor(raw);
|
|
61
|
+
}
|
|
62
|
+
if (typeof raw !== "string") {
|
|
63
|
+
throw new Error(`${label} must be a positive finite number or a string like "30m", "90s", "1h" — got "${String(raw)}"`);
|
|
64
|
+
}
|
|
65
|
+
return parseDurationString(raw, label);
|
|
66
|
+
}
|
|
67
|
+
function normalizeMaxDurationMs(maxDuration) {
|
|
68
|
+
if (maxDuration == null)
|
|
69
|
+
return DEFAULT_WORKFLOW_MAX_DURATION_MS;
|
|
70
|
+
return parseWorkflowDurationMs(maxDuration, "workflow() maxDuration");
|
|
71
|
+
}
|
|
72
|
+
export function getWorkflowResumeActionName(name) {
|
|
73
|
+
return `${WORKFLOW_RESUME_ACTION_PREFIX}.${name}`;
|
|
74
|
+
}
|
|
75
|
+
export function createWorkflowRealtimeToken() {
|
|
76
|
+
return crypto.randomUUID().replace(/-/g, "");
|
|
77
|
+
}
|
|
78
|
+
export function getWorkflowRealtimeKey(workflowId, realtimeToken) {
|
|
79
|
+
return `${WORKFLOW_REALTIME_KEY_PREFIX}.${workflowId}.${realtimeToken}`;
|
|
80
|
+
}
|
|
81
|
+
export function getWorkflowDef(name) {
|
|
82
|
+
return workflowRegistry.get(name);
|
|
83
|
+
}
|
|
84
|
+
export function getRegisteredWorkflows() {
|
|
85
|
+
return Array.from(workflowRegistry.values());
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* workflow() — durable multi-step execution with step memoization.
|
|
89
|
+
*
|
|
90
|
+
* The returned value is still a mutation definition, so existing API codegen and
|
|
91
|
+
* frontend hooks keep working without extra workflow-specific tooling.
|
|
92
|
+
*/
|
|
93
|
+
export function workflow(name, options) {
|
|
94
|
+
registerWorkflowsApi();
|
|
95
|
+
const maxDurationMs = normalizeMaxDurationMs(options.maxDuration);
|
|
96
|
+
const maxRetries = clampRetries(options.retries);
|
|
97
|
+
const def = {
|
|
98
|
+
name,
|
|
99
|
+
argsSchema: options.args,
|
|
100
|
+
isPublic: options.public === true,
|
|
101
|
+
maxDurationMs,
|
|
102
|
+
maxRetries,
|
|
103
|
+
handler: options.handler,
|
|
104
|
+
};
|
|
105
|
+
workflowRegistry.set(name, def);
|
|
106
|
+
return mutation(name, {
|
|
107
|
+
args: options.args,
|
|
108
|
+
public: options.public,
|
|
109
|
+
handler: async (ctx, args) => {
|
|
110
|
+
const workflowId = crypto.randomUUID();
|
|
111
|
+
const resumeAction = getWorkflowResumeActionName(name);
|
|
112
|
+
const ownerId = ctx.auth.getUserIdentity()?.id ?? null;
|
|
113
|
+
const persistedArgs = serializeWorkflowValue(args ?? {});
|
|
114
|
+
const realtimeToken = createWorkflowRealtimeToken();
|
|
115
|
+
await ctx.unsafeDb.execute(sql `
|
|
116
|
+
INSERT INTO _gencow_workflows (
|
|
117
|
+
id,
|
|
118
|
+
name,
|
|
119
|
+
args,
|
|
120
|
+
realtime_token,
|
|
121
|
+
status,
|
|
122
|
+
retry_count,
|
|
123
|
+
max_retries,
|
|
124
|
+
max_duration_ms,
|
|
125
|
+
user_id
|
|
126
|
+
)
|
|
127
|
+
VALUES (
|
|
128
|
+
${workflowId},
|
|
129
|
+
${name},
|
|
130
|
+
${JSON.stringify(persistedArgs)}::jsonb,
|
|
131
|
+
${realtimeToken},
|
|
132
|
+
'pending',
|
|
133
|
+
0,
|
|
134
|
+
${maxRetries},
|
|
135
|
+
${maxDurationMs},
|
|
136
|
+
${ownerId}
|
|
137
|
+
)
|
|
138
|
+
`);
|
|
139
|
+
try {
|
|
140
|
+
const scheduledJobId = ctx.scheduler.runAfter(0, resumeAction, { workflowId });
|
|
141
|
+
return {
|
|
142
|
+
id: workflowId,
|
|
143
|
+
name,
|
|
144
|
+
status: "pending",
|
|
145
|
+
scheduledJobId,
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
catch (error) {
|
|
149
|
+
await ctx.unsafeDb.execute(sql `
|
|
150
|
+
DELETE FROM _gencow_workflows
|
|
151
|
+
WHERE id = ${workflowId}
|
|
152
|
+
`);
|
|
153
|
+
throw error;
|
|
154
|
+
}
|
|
155
|
+
},
|
|
156
|
+
});
|
|
157
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { WorkflowSnapshot } from "./workflow-types.js";
|
|
2
|
+
declare global {
|
|
3
|
+
var __gencow_workflowsApiRegistered: boolean | undefined;
|
|
4
|
+
}
|
|
5
|
+
type WorkflowDbLike = {
|
|
6
|
+
execute: (query: unknown) => Promise<unknown>;
|
|
7
|
+
};
|
|
8
|
+
export declare function loadWorkflowSnapshot(db: WorkflowDbLike, workflowId: string, options?: {
|
|
9
|
+
viewerUserId?: string | null;
|
|
10
|
+
requireViewerMatch?: boolean;
|
|
11
|
+
}): Promise<WorkflowSnapshot | null>;
|
|
12
|
+
export declare function registerWorkflowsApi(): void;
|
|
13
|
+
export {};
|
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
import { sql } from "drizzle-orm";
|
|
2
|
+
import { mutation, query } from "./reactive.js";
|
|
3
|
+
import { createWorkflowRealtimeToken, deserializeWorkflowValue, getWorkflowResumeActionName, getWorkflowRealtimeKey, serializeWorkflowValue, } from "./workflow.js";
|
|
4
|
+
import { GencowValidationError, v } from "./v.js";
|
|
5
|
+
import { deriveWorkflowStatus } from "./workflow-types.js";
|
|
6
|
+
const WORKFLOW_STATUSES = new Set([
|
|
7
|
+
"pending",
|
|
8
|
+
"running",
|
|
9
|
+
"completed",
|
|
10
|
+
"failed",
|
|
11
|
+
]);
|
|
12
|
+
const WORKFLOW_DERIVED_PENDING_STATUSES = new Set([
|
|
13
|
+
"queued",
|
|
14
|
+
"waiting",
|
|
15
|
+
"sleeping",
|
|
16
|
+
]);
|
|
17
|
+
function rowsFromResult(result) {
|
|
18
|
+
if (Array.isArray(result))
|
|
19
|
+
return result;
|
|
20
|
+
if (result && typeof result === "object" && Array.isArray(result.rows)) {
|
|
21
|
+
return result.rows;
|
|
22
|
+
}
|
|
23
|
+
return [];
|
|
24
|
+
}
|
|
25
|
+
function parseJsonField(value) {
|
|
26
|
+
if (typeof value !== "string")
|
|
27
|
+
return value;
|
|
28
|
+
try {
|
|
29
|
+
return JSON.parse(value);
|
|
30
|
+
}
|
|
31
|
+
catch {
|
|
32
|
+
return value;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function toIsoString(value) {
|
|
36
|
+
if (value instanceof Date)
|
|
37
|
+
return value.toISOString();
|
|
38
|
+
const parsed = new Date(value);
|
|
39
|
+
return Number.isFinite(parsed.getTime()) ? parsed.toISOString() : String(value);
|
|
40
|
+
}
|
|
41
|
+
function toOptionalIsoString(value) {
|
|
42
|
+
return value ? toIsoString(value) : null;
|
|
43
|
+
}
|
|
44
|
+
function mapWorkflowSummary(row) {
|
|
45
|
+
return {
|
|
46
|
+
id: row.id,
|
|
47
|
+
name: row.name,
|
|
48
|
+
status: row.status,
|
|
49
|
+
derivedStatus: deriveWorkflowStatus(row.status, row.current_step),
|
|
50
|
+
currentStep: row.current_step,
|
|
51
|
+
error: row.error,
|
|
52
|
+
retryCount: row.retry_count,
|
|
53
|
+
maxRetries: row.max_retries,
|
|
54
|
+
maxDurationMs: Number(row.max_duration_ms),
|
|
55
|
+
startedAt: toIsoString(row.started_at),
|
|
56
|
+
updatedAt: toIsoString(row.updated_at),
|
|
57
|
+
completedAt: toOptionalIsoString(row.completed_at),
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
function mapWorkflowStep(row) {
|
|
61
|
+
return {
|
|
62
|
+
name: row.step_name,
|
|
63
|
+
status: row.status,
|
|
64
|
+
output: deserializeWorkflowValue(parseJsonField(row.output)),
|
|
65
|
+
error: row.error,
|
|
66
|
+
startedAt: toOptionalIsoString(row.started_at),
|
|
67
|
+
updatedAt: toIsoString(row.updated_at),
|
|
68
|
+
completedAt: toOptionalIsoString(row.completed_at),
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
function normalizeListLimit(limit) {
|
|
72
|
+
if (limit == null)
|
|
73
|
+
return 20;
|
|
74
|
+
if (!Number.isFinite(limit)) {
|
|
75
|
+
throw new GencowValidationError(`Argument "limit": expected a finite number, got ${limit}`);
|
|
76
|
+
}
|
|
77
|
+
return Math.max(1, Math.min(100, Math.floor(limit)));
|
|
78
|
+
}
|
|
79
|
+
function normalizeStatus(status) {
|
|
80
|
+
if (status == null)
|
|
81
|
+
return undefined;
|
|
82
|
+
if (!WORKFLOW_STATUSES.has(status)) {
|
|
83
|
+
throw new GencowValidationError(`Argument "status": expected one of pending, running, completed, failed`);
|
|
84
|
+
}
|
|
85
|
+
return status;
|
|
86
|
+
}
|
|
87
|
+
function normalizeDerivedStatus(status) {
|
|
88
|
+
if (status == null)
|
|
89
|
+
return undefined;
|
|
90
|
+
if (WORKFLOW_DERIVED_PENDING_STATUSES.has(status)) {
|
|
91
|
+
return status;
|
|
92
|
+
}
|
|
93
|
+
return normalizeStatus(status);
|
|
94
|
+
}
|
|
95
|
+
function toWorkflowStatusFilter(status) {
|
|
96
|
+
if (status == null)
|
|
97
|
+
return undefined;
|
|
98
|
+
if (WORKFLOW_DERIVED_PENDING_STATUSES.has(status)) {
|
|
99
|
+
return "pending";
|
|
100
|
+
}
|
|
101
|
+
return status;
|
|
102
|
+
}
|
|
103
|
+
async function ensureWorkflowRealtimeToken(db, workflowId, currentToken) {
|
|
104
|
+
if (currentToken && currentToken.trim() !== "")
|
|
105
|
+
return currentToken;
|
|
106
|
+
const nextToken = createWorkflowRealtimeToken();
|
|
107
|
+
const updateResult = await db.execute(sql `
|
|
108
|
+
UPDATE _gencow_workflows
|
|
109
|
+
SET realtime_token = ${nextToken}
|
|
110
|
+
WHERE id = ${workflowId}
|
|
111
|
+
AND (realtime_token IS NULL OR realtime_token = '')
|
|
112
|
+
RETURNING realtime_token
|
|
113
|
+
`);
|
|
114
|
+
const updatedToken = rowsFromResult(updateResult)[0]?.realtime_token ?? null;
|
|
115
|
+
if (updatedToken && updatedToken.trim() !== "")
|
|
116
|
+
return updatedToken;
|
|
117
|
+
const rereadResult = await db.execute(sql `
|
|
118
|
+
SELECT realtime_token
|
|
119
|
+
FROM _gencow_workflows
|
|
120
|
+
WHERE id = ${workflowId}
|
|
121
|
+
LIMIT 1
|
|
122
|
+
`);
|
|
123
|
+
const rereadToken = rowsFromResult(rereadResult)[0]?.realtime_token ?? null;
|
|
124
|
+
return rereadToken && rereadToken.trim() !== "" ? rereadToken : null;
|
|
125
|
+
}
|
|
126
|
+
async function loadWorkflowSignalTarget(db, workflowId) {
|
|
127
|
+
const result = await db.execute(sql `
|
|
128
|
+
SELECT
|
|
129
|
+
id,
|
|
130
|
+
name,
|
|
131
|
+
status,
|
|
132
|
+
current_step,
|
|
133
|
+
user_id
|
|
134
|
+
FROM _gencow_workflows
|
|
135
|
+
WHERE id = ${workflowId}
|
|
136
|
+
LIMIT 1
|
|
137
|
+
`);
|
|
138
|
+
return rowsFromResult(result)[0] ?? null;
|
|
139
|
+
}
|
|
140
|
+
export async function loadWorkflowSnapshot(db, workflowId, options) {
|
|
141
|
+
const workflowResult = await db.execute(sql `
|
|
142
|
+
SELECT
|
|
143
|
+
id,
|
|
144
|
+
name,
|
|
145
|
+
args,
|
|
146
|
+
status,
|
|
147
|
+
current_step,
|
|
148
|
+
result,
|
|
149
|
+
error,
|
|
150
|
+
retry_count,
|
|
151
|
+
max_retries,
|
|
152
|
+
max_duration_ms,
|
|
153
|
+
started_at,
|
|
154
|
+
updated_at,
|
|
155
|
+
completed_at,
|
|
156
|
+
realtime_token,
|
|
157
|
+
user_id
|
|
158
|
+
FROM _gencow_workflows
|
|
159
|
+
WHERE id = ${workflowId}
|
|
160
|
+
LIMIT 1
|
|
161
|
+
`);
|
|
162
|
+
const row = rowsFromResult(workflowResult)[0] ?? null;
|
|
163
|
+
if (!row)
|
|
164
|
+
return null;
|
|
165
|
+
const viewerUserId = options?.viewerUserId ?? null;
|
|
166
|
+
if (options?.requireViewerMatch && row.user_id && row.user_id !== viewerUserId) {
|
|
167
|
+
return null;
|
|
168
|
+
}
|
|
169
|
+
const realtimeToken = await ensureWorkflowRealtimeToken(db, workflowId, row.realtime_token);
|
|
170
|
+
if (!realtimeToken)
|
|
171
|
+
return null;
|
|
172
|
+
const stepsResult = await db.execute(sql `
|
|
173
|
+
SELECT
|
|
174
|
+
step_name,
|
|
175
|
+
status,
|
|
176
|
+
output,
|
|
177
|
+
error,
|
|
178
|
+
started_at,
|
|
179
|
+
updated_at,
|
|
180
|
+
completed_at
|
|
181
|
+
FROM _gencow_workflow_steps
|
|
182
|
+
WHERE workflow_id = ${workflowId}
|
|
183
|
+
ORDER BY COALESCE(started_at, updated_at) ASC, step_name ASC
|
|
184
|
+
`);
|
|
185
|
+
return {
|
|
186
|
+
...mapWorkflowSummary(row),
|
|
187
|
+
args: deserializeWorkflowValue(parseJsonField(row.args)),
|
|
188
|
+
result: deserializeWorkflowValue(parseJsonField(row.result)),
|
|
189
|
+
steps: rowsFromResult(stepsResult).map(mapWorkflowStep),
|
|
190
|
+
realtimeKey: getWorkflowRealtimeKey(row.id, realtimeToken),
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
export function registerWorkflowsApi() {
|
|
194
|
+
if (globalThis.__gencow_workflowsApiRegistered)
|
|
195
|
+
return;
|
|
196
|
+
globalThis.__gencow_workflowsApiRegistered = true;
|
|
197
|
+
query("workflows.get", {
|
|
198
|
+
args: { id: v.string() },
|
|
199
|
+
public: true,
|
|
200
|
+
handler: async (ctx, args) => {
|
|
201
|
+
return loadWorkflowSnapshot(ctx.unsafeDb, args.id, {
|
|
202
|
+
viewerUserId: ctx.auth.getUserIdentity()?.id ?? null,
|
|
203
|
+
requireViewerMatch: true,
|
|
204
|
+
});
|
|
205
|
+
},
|
|
206
|
+
});
|
|
207
|
+
mutation("workflows.signal", {
|
|
208
|
+
args: {
|
|
209
|
+
id: v.string(),
|
|
210
|
+
event: v.string(),
|
|
211
|
+
payload: v.optional(v.any()),
|
|
212
|
+
},
|
|
213
|
+
public: true,
|
|
214
|
+
handler: async (ctx, args) => {
|
|
215
|
+
const normalizedEvent = args.event.trim();
|
|
216
|
+
if (!normalizedEvent) {
|
|
217
|
+
throw new GencowValidationError(`Argument "event": expected a non-empty string`);
|
|
218
|
+
}
|
|
219
|
+
const workflow = await loadWorkflowSignalTarget(ctx.unsafeDb, args.id);
|
|
220
|
+
const viewerUserId = ctx.auth.getUserIdentity()?.id ?? null;
|
|
221
|
+
if (!workflow || (workflow.user_id && workflow.user_id !== viewerUserId)) {
|
|
222
|
+
return {
|
|
223
|
+
ok: false,
|
|
224
|
+
workflowId: args.id,
|
|
225
|
+
event: normalizedEvent,
|
|
226
|
+
scheduledJobId: null,
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
if (workflow.status === "completed" || workflow.status === "failed") {
|
|
230
|
+
return {
|
|
231
|
+
ok: false,
|
|
232
|
+
workflowId: workflow.id,
|
|
233
|
+
event: normalizedEvent,
|
|
234
|
+
scheduledJobId: null,
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
const persistedPayload = serializeWorkflowValue(args.payload);
|
|
238
|
+
await ctx.unsafeDb.execute(sql `
|
|
239
|
+
INSERT INTO _gencow_workflow_events (
|
|
240
|
+
id,
|
|
241
|
+
workflow_id,
|
|
242
|
+
event_name,
|
|
243
|
+
payload
|
|
244
|
+
)
|
|
245
|
+
VALUES (
|
|
246
|
+
${crypto.randomUUID()},
|
|
247
|
+
${workflow.id},
|
|
248
|
+
${normalizedEvent},
|
|
249
|
+
${JSON.stringify(persistedPayload)}::jsonb
|
|
250
|
+
)
|
|
251
|
+
`);
|
|
252
|
+
let scheduledJobId = null;
|
|
253
|
+
if (workflow.status === "pending" && workflow.current_step?.startsWith("wait:")) {
|
|
254
|
+
try {
|
|
255
|
+
scheduledJobId = ctx.scheduler.runAfter(0, getWorkflowResumeActionName(workflow.name), { workflowId: workflow.id });
|
|
256
|
+
}
|
|
257
|
+
catch {
|
|
258
|
+
scheduledJobId = null;
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
return {
|
|
262
|
+
ok: true,
|
|
263
|
+
workflowId: workflow.id,
|
|
264
|
+
event: normalizedEvent,
|
|
265
|
+
scheduledJobId,
|
|
266
|
+
};
|
|
267
|
+
},
|
|
268
|
+
});
|
|
269
|
+
query("workflows.list", {
|
|
270
|
+
args: {
|
|
271
|
+
limit: v.optional(v.number()),
|
|
272
|
+
status: v.optional(v.string()),
|
|
273
|
+
},
|
|
274
|
+
handler: async (ctx, args) => {
|
|
275
|
+
const userId = ctx.auth.requireAuth().id;
|
|
276
|
+
const limit = normalizeListLimit(args.limit);
|
|
277
|
+
const requestedStatus = normalizeDerivedStatus(args.status);
|
|
278
|
+
const status = toWorkflowStatusFilter(requestedStatus);
|
|
279
|
+
const result = status == null
|
|
280
|
+
? await ctx.unsafeDb.execute(sql `
|
|
281
|
+
SELECT
|
|
282
|
+
id,
|
|
283
|
+
name,
|
|
284
|
+
args,
|
|
285
|
+
status,
|
|
286
|
+
current_step,
|
|
287
|
+
result,
|
|
288
|
+
error,
|
|
289
|
+
retry_count,
|
|
290
|
+
max_retries,
|
|
291
|
+
max_duration_ms,
|
|
292
|
+
started_at,
|
|
293
|
+
updated_at,
|
|
294
|
+
completed_at,
|
|
295
|
+
user_id
|
|
296
|
+
FROM _gencow_workflows
|
|
297
|
+
WHERE user_id = ${userId}
|
|
298
|
+
ORDER BY started_at DESC
|
|
299
|
+
LIMIT ${limit}
|
|
300
|
+
`)
|
|
301
|
+
: await ctx.unsafeDb.execute(sql `
|
|
302
|
+
SELECT
|
|
303
|
+
id,
|
|
304
|
+
name,
|
|
305
|
+
args,
|
|
306
|
+
status,
|
|
307
|
+
current_step,
|
|
308
|
+
result,
|
|
309
|
+
error,
|
|
310
|
+
retry_count,
|
|
311
|
+
max_retries,
|
|
312
|
+
max_duration_ms,
|
|
313
|
+
started_at,
|
|
314
|
+
updated_at,
|
|
315
|
+
completed_at,
|
|
316
|
+
user_id
|
|
317
|
+
FROM _gencow_workflows
|
|
318
|
+
WHERE user_id = ${userId}
|
|
319
|
+
AND status = ${status}
|
|
320
|
+
ORDER BY started_at DESC
|
|
321
|
+
LIMIT ${limit}
|
|
322
|
+
`);
|
|
323
|
+
return rowsFromResult(result)
|
|
324
|
+
.map(mapWorkflowSummary)
|
|
325
|
+
.filter((row) => requestedStatus == null || row.derivedStatus === requestedStatus);
|
|
326
|
+
},
|
|
327
|
+
});
|
|
328
|
+
}
|
package/package.json
CHANGED
|
@@ -236,17 +236,17 @@ describe("crud() + ownerRls — 데이터 격리", () => {
|
|
|
236
236
|
expect(values.userId).toBe("user-A");
|
|
237
237
|
});
|
|
238
238
|
|
|
239
|
-
it("create:
|
|
239
|
+
it("create: 타인 user_id 주입 시도는 거부되고 insert까지 가지 않음 (보안)", async () => {
|
|
240
240
|
const mutations = getRegisteredMutations();
|
|
241
241
|
const createDef = mutations.find((m: any) => m.name === "rls_tasks.create");
|
|
242
242
|
|
|
243
243
|
const { ctx, getCapturedValues } = createMockCtx("user-A");
|
|
244
|
-
// 해커가 user_id를 "hacker-id"로 조작 시도
|
|
245
|
-
await
|
|
244
|
+
// 해커가 user_id를 "hacker-id"로 조작 시도 — Layer 1은 즉시 Forbidden (덮어쓰기 전 차단)
|
|
245
|
+
await expect(
|
|
246
|
+
createDef!.handler(ctx, { title: "Spoofed", user_id: "hacker-id" }),
|
|
247
|
+
).rejects.toThrow("Forbidden: cannot create resource for another user");
|
|
246
248
|
|
|
247
|
-
|
|
248
|
-
// 인증된 사용자 ID로 강제 덮어씀 (JS 프로퍼티명)
|
|
249
|
-
expect(values.userId).toBe("user-A");
|
|
249
|
+
expect(getCapturedValues()).toBeNull();
|
|
250
250
|
});
|
|
251
251
|
|
|
252
252
|
// ── update 격리 ──
|
|
@@ -91,6 +91,12 @@ describe("dist/index.js 필수 export", () => {
|
|
|
91
91
|
expect(typeof distModule.mutation).toBe("function");
|
|
92
92
|
});
|
|
93
93
|
|
|
94
|
+
it("workflow export가 존재하고 함수이다", async () => {
|
|
95
|
+
if (!distModule) distModule = await import(DIST_INDEX);
|
|
96
|
+
expect(distModule.workflow).toBeDefined();
|
|
97
|
+
expect(typeof distModule.workflow).toBe("function");
|
|
98
|
+
});
|
|
99
|
+
|
|
94
100
|
it("v export가 존재하고 객체이다", async () => {
|
|
95
101
|
if (!distModule) distModule = await import(DIST_INDEX);
|
|
96
102
|
expect(distModule.v).toBeDefined();
|
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
CREATE TABLE "news" (
|
|
2
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
3
|
+
"title" text NOT NULL,
|
|
4
|
+
"user_id" text NOT NULL,
|
|
5
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
6
|
+
"updated_at" timestamp DEFAULT now() NOT NULL
|
|
7
|
+
);
|
|
8
|
+
--> statement-breakpoint
|
|
1
9
|
CREATE TABLE "tasks" (
|
|
2
10
|
"id" text PRIMARY KEY NOT NULL,
|
|
3
11
|
"title" text NOT NULL,
|
|
@@ -57,6 +65,7 @@ CREATE TABLE "verification" (
|
|
|
57
65
|
"updated_at" timestamp DEFAULT now()
|
|
58
66
|
);
|
|
59
67
|
--> statement-breakpoint
|
|
68
|
+
ALTER TABLE "news" ADD CONSTRAINT "news_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
60
69
|
ALTER TABLE "tasks" ADD CONSTRAINT "tasks_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
61
70
|
ALTER TABLE "account" ADD CONSTRAINT "account_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
62
71
|
ALTER TABLE "session" ADD CONSTRAINT "session_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
@@ -1,9 +1,68 @@
|
|
|
1
1
|
{
|
|
2
|
-
"id": "
|
|
2
|
+
"id": "d57dfdaa-8d90-493e-834f-580b33548adc",
|
|
3
3
|
"prevId": "00000000-0000-0000-0000-000000000000",
|
|
4
4
|
"version": "7",
|
|
5
5
|
"dialect": "postgresql",
|
|
6
6
|
"tables": {
|
|
7
|
+
"public.news": {
|
|
8
|
+
"name": "news",
|
|
9
|
+
"schema": "",
|
|
10
|
+
"columns": {
|
|
11
|
+
"id": {
|
|
12
|
+
"name": "id",
|
|
13
|
+
"type": "text",
|
|
14
|
+
"primaryKey": true,
|
|
15
|
+
"notNull": true
|
|
16
|
+
},
|
|
17
|
+
"title": {
|
|
18
|
+
"name": "title",
|
|
19
|
+
"type": "text",
|
|
20
|
+
"primaryKey": false,
|
|
21
|
+
"notNull": true
|
|
22
|
+
},
|
|
23
|
+
"user_id": {
|
|
24
|
+
"name": "user_id",
|
|
25
|
+
"type": "text",
|
|
26
|
+
"primaryKey": false,
|
|
27
|
+
"notNull": true
|
|
28
|
+
},
|
|
29
|
+
"created_at": {
|
|
30
|
+
"name": "created_at",
|
|
31
|
+
"type": "timestamp",
|
|
32
|
+
"primaryKey": false,
|
|
33
|
+
"notNull": true,
|
|
34
|
+
"default": "now()"
|
|
35
|
+
},
|
|
36
|
+
"updated_at": {
|
|
37
|
+
"name": "updated_at",
|
|
38
|
+
"type": "timestamp",
|
|
39
|
+
"primaryKey": false,
|
|
40
|
+
"notNull": true,
|
|
41
|
+
"default": "now()"
|
|
42
|
+
}
|
|
43
|
+
},
|
|
44
|
+
"indexes": {},
|
|
45
|
+
"foreignKeys": {
|
|
46
|
+
"news_user_id_user_id_fk": {
|
|
47
|
+
"name": "news_user_id_user_id_fk",
|
|
48
|
+
"tableFrom": "news",
|
|
49
|
+
"tableTo": "user",
|
|
50
|
+
"columnsFrom": [
|
|
51
|
+
"user_id"
|
|
52
|
+
],
|
|
53
|
+
"columnsTo": [
|
|
54
|
+
"id"
|
|
55
|
+
],
|
|
56
|
+
"onDelete": "cascade",
|
|
57
|
+
"onUpdate": "no action"
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
"compositePrimaryKeys": {},
|
|
61
|
+
"uniqueConstraints": {},
|
|
62
|
+
"policies": {},
|
|
63
|
+
"checkConstraints": {},
|
|
64
|
+
"isRLSEnabled": false
|
|
65
|
+
},
|
|
7
66
|
"public.tasks": {
|
|
8
67
|
"name": "tasks",
|
|
9
68
|
"schema": "",
|