@meetploy/cli 1.5.0 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dashboard-dist/assets/main-Bsiap3cJ.css +1 -0
- package/dist/dashboard-dist/assets/main-X-CWS305.js +176 -0
- package/dist/dashboard-dist/index.html +13 -0
- package/dist/dev.d.ts +1 -2
- package/dist/dev.js +1065 -2
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2992 -55
- package/package.json +19 -5
- package/dist/commands/build.d.ts +0 -7
- package/dist/commands/build.js +0 -94
- package/dist/commands/build.js.map +0 -1
- package/dist/commands/types.d.ts +0 -7
- package/dist/commands/types.js +0 -145
- package/dist/commands/types.js.map +0 -1
- package/dist/dev.js.map +0 -1
- package/dist/index.js.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,72 +1,3009 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
2
|
+
import { createRequire } from 'module';
|
|
3
|
+
import { mkdir, writeFile, readFile, access } from 'fs/promises';
|
|
4
|
+
import { join, dirname } from 'path';
|
|
5
|
+
import { existsSync, readFileSync, readFile as readFile$1, mkdirSync, writeFileSync } from 'fs';
|
|
6
|
+
import { promisify } from 'util';
|
|
7
|
+
import { parse } from 'yaml';
|
|
8
|
+
import { build } from 'esbuild';
|
|
9
|
+
import { watch } from 'chokidar';
|
|
10
|
+
import { fileURLToPath } from 'url';
|
|
11
|
+
import { randomUUID, createHash } from 'crypto';
|
|
12
|
+
import { serve } from '@hono/node-server';
|
|
13
|
+
import { Hono } from 'hono';
|
|
14
|
+
import { tmpdir } from 'os';
|
|
15
|
+
import Database from 'better-sqlite3';
|
|
16
|
+
import { spawn } from 'child_process';
|
|
17
|
+
|
|
18
|
+
createRequire(import.meta.url);
|
|
19
|
+
var __defProp = Object.defineProperty;
|
|
20
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
21
|
+
var __esm = (fn, res) => function __init() {
|
|
22
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
23
|
+
};
|
|
24
|
+
var __export = (target, all) => {
|
|
25
|
+
for (var name in all)
|
|
26
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
27
|
+
};
|
|
28
|
+
async function isPnpmWorkspace(projectDir) {
|
|
29
|
+
try {
|
|
30
|
+
await access(join(projectDir, "pnpm-workspace.yaml"));
|
|
31
|
+
return true;
|
|
32
|
+
} catch {
|
|
33
|
+
return false;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function getAddDevDependencyCommand(packageManager, packages, isPnpmWorkspace2 = false) {
|
|
37
|
+
const packageList = packages.join(" ");
|
|
38
|
+
switch (packageManager) {
|
|
39
|
+
case "npm":
|
|
40
|
+
return `npm install --save-dev ${packageList}`;
|
|
41
|
+
case "yarn":
|
|
42
|
+
return `yarn add -D ${packageList}`;
|
|
43
|
+
case "pnpm":
|
|
44
|
+
return isPnpmWorkspace2 ? `pnpm add -w -D ${packageList}` : `pnpm add -D ${packageList}`;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
function getRunCommand(packageManager, command2) {
|
|
48
|
+
switch (packageManager) {
|
|
49
|
+
case "npm":
|
|
50
|
+
return `npx ${command2}`;
|
|
51
|
+
case "yarn":
|
|
52
|
+
return `yarn ${command2}`;
|
|
53
|
+
case "pnpm":
|
|
54
|
+
return `pnpm ${command2}`;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
var init_package_manager = __esm({
|
|
58
|
+
"../tools/dist/package-manager.js"() {
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
function validateBindings(bindings, bindingType, configFile) {
|
|
62
|
+
if (bindings === void 0) {
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
if (typeof bindings !== "object" || bindings === null) {
|
|
66
|
+
throw new Error(`'${bindingType}' in ${configFile} must be an object`);
|
|
67
|
+
}
|
|
68
|
+
for (const [bindingName, resourceName] of Object.entries(bindings)) {
|
|
69
|
+
if (!BINDING_NAME_REGEX.test(bindingName)) {
|
|
70
|
+
throw new Error(`Invalid ${bindingType} binding name '${bindingName}' in ${configFile}. Binding names must be uppercase with underscores (e.g., DB, USERS_DB)`);
|
|
71
|
+
}
|
|
72
|
+
if (typeof resourceName !== "string") {
|
|
73
|
+
throw new Error(`${bindingType} binding '${bindingName}' in ${configFile} must have a string value`);
|
|
74
|
+
}
|
|
75
|
+
if (!RESOURCE_NAME_REGEX.test(resourceName)) {
|
|
76
|
+
throw new Error(`Invalid ${bindingType} resource name '${resourceName}' for binding '${bindingName}' in ${configFile}. Resource names must be lowercase with underscores (e.g., default, users_db)`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
function validateRelativePath(path, fieldName, configFile) {
|
|
81
|
+
if (path === void 0) {
|
|
82
|
+
return void 0;
|
|
83
|
+
}
|
|
84
|
+
if (typeof path !== "string") {
|
|
85
|
+
throw new Error(`'${fieldName}' in ${configFile} must be a string`);
|
|
86
|
+
}
|
|
87
|
+
const normalized = path.replace(/\/+$/, "");
|
|
88
|
+
if (normalized.startsWith("/")) {
|
|
89
|
+
throw new Error(`'${fieldName}' in ${configFile} must be a relative path, not absolute`);
|
|
90
|
+
}
|
|
91
|
+
if (normalized.includes("..")) {
|
|
92
|
+
throw new Error(`'${fieldName}' in ${configFile} cannot contain path traversal (..)`);
|
|
93
|
+
}
|
|
94
|
+
return normalized;
|
|
95
|
+
}
|
|
96
|
+
function validatePloyConfig(config, configFile = "ploy.yaml", options = {}) {
|
|
97
|
+
const { requireKind = false, requireBuildForStatic = true, allowedKinds = ["nextjs", "static", "dynamic", "worker"] } = options;
|
|
98
|
+
const validatedConfig = { ...config };
|
|
99
|
+
if (requireKind && !config.kind) {
|
|
100
|
+
throw new Error(`Missing required field 'kind' in ${configFile}`);
|
|
101
|
+
}
|
|
102
|
+
if (config.kind !== void 0) {
|
|
103
|
+
if (!allowedKinds.includes(config.kind)) {
|
|
104
|
+
throw new Error(`Invalid kind '${config.kind}' in ${configFile}. Must be one of: ${allowedKinds.join(", ")}`);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
if (requireBuildForStatic && config.kind === "static" && !config.build) {
|
|
108
|
+
throw new Error(`Build command is required when kind is 'static' in ${configFile}`);
|
|
109
|
+
}
|
|
110
|
+
if (config.build !== void 0 && typeof config.build !== "string") {
|
|
111
|
+
throw new Error(`'build' in ${configFile} must be a string`);
|
|
112
|
+
}
|
|
113
|
+
validatedConfig.out = validateRelativePath(config.out, "out", configFile);
|
|
114
|
+
validatedConfig.base = validateRelativePath(config.base, "base", configFile);
|
|
115
|
+
validatedConfig.main = validateRelativePath(config.main, "main", configFile);
|
|
116
|
+
validateBindings(config.db, "db", configFile);
|
|
117
|
+
validateBindings(config.queue, "queue", configFile);
|
|
118
|
+
validateBindings(config.workflow, "workflow", configFile);
|
|
119
|
+
if (config.ai !== void 0 && typeof config.ai !== "boolean") {
|
|
120
|
+
throw new Error(`'ai' in ${configFile} must be a boolean`);
|
|
121
|
+
}
|
|
122
|
+
if (config.monorepo !== void 0 && typeof config.monorepo !== "boolean") {
|
|
123
|
+
throw new Error(`'monorepo' in ${configFile} must be a boolean`);
|
|
124
|
+
}
|
|
125
|
+
return validatedConfig;
|
|
126
|
+
}
|
|
127
|
+
async function readPloyConfig(projectDir, configPath) {
|
|
128
|
+
const configFile = configPath || "ploy.yaml";
|
|
129
|
+
const fullPath = join(projectDir, configFile);
|
|
130
|
+
try {
|
|
131
|
+
const content = await readFileAsync(fullPath, "utf-8");
|
|
132
|
+
return parse(content);
|
|
133
|
+
} catch (error2) {
|
|
134
|
+
if (error2 && typeof error2 === "object" && "code" in error2) {
|
|
135
|
+
if (error2.code === "ENOENT") {
|
|
136
|
+
return null;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
throw error2;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
function readPloyConfigSync(projectDir, configPath) {
|
|
143
|
+
const configFile = configPath || "ploy.yaml";
|
|
144
|
+
const fullPath = join(projectDir, configFile);
|
|
145
|
+
if (!existsSync(fullPath)) {
|
|
146
|
+
throw new Error(`Config file not found: ${fullPath}`);
|
|
147
|
+
}
|
|
148
|
+
const content = readFileSync(fullPath, "utf-8");
|
|
149
|
+
return parse(content);
|
|
150
|
+
}
|
|
151
|
+
async function readAndValidatePloyConfig(projectDir, configPath, validationOptions) {
|
|
152
|
+
const configFile = configPath || "ploy.yaml";
|
|
153
|
+
const config = await readPloyConfig(projectDir, configPath);
|
|
154
|
+
if (!config) {
|
|
155
|
+
return null;
|
|
156
|
+
}
|
|
157
|
+
return validatePloyConfig(config, configFile, validationOptions);
|
|
158
|
+
}
|
|
159
|
+
function readAndValidatePloyConfigSync(projectDir, configPath, validationOptions) {
|
|
160
|
+
const configFile = configPath || "ploy.yaml";
|
|
161
|
+
const config = readPloyConfigSync(projectDir, configPath);
|
|
162
|
+
return validatePloyConfig(config, configFile, validationOptions);
|
|
163
|
+
}
|
|
164
|
+
function hasBindings(config) {
|
|
165
|
+
return !!(config.db || config.queue || config.workflow || config.ai);
|
|
166
|
+
}
|
|
167
|
+
function getWorkerEntryPoint(projectDir, config) {
|
|
168
|
+
if (config.main) {
|
|
169
|
+
return join(projectDir, config.main);
|
|
170
|
+
}
|
|
171
|
+
const defaultPaths = [
|
|
172
|
+
join(projectDir, "index.ts"),
|
|
173
|
+
join(projectDir, "index.js"),
|
|
174
|
+
join(projectDir, "src", "index.ts"),
|
|
175
|
+
join(projectDir, "src", "index.js")
|
|
176
|
+
];
|
|
177
|
+
for (const path of defaultPaths) {
|
|
178
|
+
if (existsSync(path)) {
|
|
179
|
+
return path;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
throw new Error("Could not find worker entry point. Specify 'main' in ploy.yaml");
|
|
183
|
+
}
|
|
184
|
+
var readFileAsync, BINDING_NAME_REGEX, RESOURCE_NAME_REGEX;
|
|
185
|
+
var init_ploy_config = __esm({
|
|
186
|
+
"../tools/dist/ploy-config.js"() {
|
|
187
|
+
readFileAsync = promisify(readFile$1);
|
|
188
|
+
BINDING_NAME_REGEX = /^[A-Z][A-Z0-9_]*$/;
|
|
189
|
+
RESOURCE_NAME_REGEX = /^[a-z][a-z0-9_]*$/;
|
|
190
|
+
}
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
// ../tools/dist/cli.js
|
|
194
|
+
var cli_exports = {};
|
|
195
|
+
__export(cli_exports, {
|
|
196
|
+
getAddDevDependencyCommand: () => getAddDevDependencyCommand,
|
|
197
|
+
getRunCommand: () => getRunCommand,
|
|
198
|
+
getWorkerEntryPoint: () => getWorkerEntryPoint,
|
|
199
|
+
hasBindings: () => hasBindings,
|
|
200
|
+
isPnpmWorkspace: () => isPnpmWorkspace,
|
|
201
|
+
readAndValidatePloyConfig: () => readAndValidatePloyConfig,
|
|
202
|
+
readAndValidatePloyConfigSync: () => readAndValidatePloyConfigSync,
|
|
203
|
+
readPloyConfig: () => readPloyConfig,
|
|
204
|
+
readPloyConfigSync: () => readPloyConfigSync,
|
|
205
|
+
validatePloyConfig: () => validatePloyConfig
|
|
206
|
+
});
|
|
207
|
+
var init_cli = __esm({
|
|
208
|
+
"../tools/dist/cli.js"() {
|
|
209
|
+
init_package_manager();
|
|
210
|
+
init_ploy_config();
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
// ../shared/dist/d1-runtime.js
|
|
215
|
+
var DB_RUNTIME_CODE, DB_RUNTIME_CODE_PRODUCTION;
|
|
216
|
+
var init_d1_runtime = __esm({
|
|
217
|
+
"../shared/dist/d1-runtime.js"() {
|
|
218
|
+
DB_RUNTIME_CODE = `
|
|
219
|
+
interface DBResult {
|
|
220
|
+
results: unknown[];
|
|
221
|
+
success: boolean;
|
|
222
|
+
meta: {
|
|
223
|
+
duration: number;
|
|
224
|
+
rows_read: number;
|
|
225
|
+
rows_written: number;
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
interface DBPreparedStatement {
|
|
230
|
+
bind: (...values: unknown[]) => DBPreparedStatement;
|
|
231
|
+
run: () => Promise<DBResult>;
|
|
232
|
+
all: () => Promise<DBResult>;
|
|
233
|
+
first: (colName?: string) => Promise<unknown | null>;
|
|
234
|
+
raw: (options?: { columnNames?: boolean }) => Promise<unknown[][]>;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
interface DBDatabase {
|
|
238
|
+
prepare: (query: string) => DBPreparedStatement;
|
|
239
|
+
dump: () => Promise<ArrayBuffer>;
|
|
240
|
+
batch: (statements: DBPreparedStatement[]) => Promise<DBResult[]>;
|
|
241
|
+
exec: (query: string) => Promise<DBResult>;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
export function initializeDB(bindingName: string, serviceUrl: string): DBDatabase {
|
|
245
|
+
return {
|
|
246
|
+
prepare(query: string): DBPreparedStatement {
|
|
247
|
+
let boundParams: unknown[] = [];
|
|
248
|
+
|
|
249
|
+
const stmt: DBPreparedStatement = {
|
|
250
|
+
bind(...values: unknown[]) {
|
|
251
|
+
boundParams = values;
|
|
252
|
+
return this;
|
|
253
|
+
},
|
|
254
|
+
|
|
255
|
+
async run(): Promise<DBResult> {
|
|
256
|
+
const response = await fetch(serviceUrl, {
|
|
257
|
+
method: "POST",
|
|
258
|
+
headers: { "Content-Type": "application/json" },
|
|
259
|
+
body: JSON.stringify({
|
|
260
|
+
bindingName,
|
|
261
|
+
method: "prepare",
|
|
262
|
+
query,
|
|
263
|
+
params: boundParams,
|
|
264
|
+
}),
|
|
265
|
+
});
|
|
266
|
+
|
|
267
|
+
if (!response.ok) {
|
|
268
|
+
const errorText = await response.text();
|
|
269
|
+
throw new Error("DB query failed: " + errorText);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
return await response.json();
|
|
273
|
+
},
|
|
274
|
+
|
|
275
|
+
async all(): Promise<DBResult> {
|
|
276
|
+
return await this.run();
|
|
277
|
+
},
|
|
278
|
+
|
|
279
|
+
async first(colName?: string): Promise<unknown | null> {
|
|
280
|
+
const result = await this.run();
|
|
281
|
+
if (result.results.length === 0) {
|
|
282
|
+
return null;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
const firstRow = result.results[0] as Record<string, unknown>;
|
|
286
|
+
if (colName) {
|
|
287
|
+
return firstRow[colName] ?? null;
|
|
288
|
+
}
|
|
289
|
+
return firstRow;
|
|
290
|
+
},
|
|
291
|
+
|
|
292
|
+
async raw(options?: { columnNames?: boolean }): Promise<unknown[][]> {
|
|
293
|
+
const result = await this.run();
|
|
294
|
+
const rows = result.results as Record<string, unknown>[];
|
|
295
|
+
|
|
296
|
+
if (rows.length === 0) {
|
|
297
|
+
return [];
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
const keys = Object.keys(rows[0]);
|
|
301
|
+
const arrayRows = rows.map((row) => keys.map((key) => row[key]));
|
|
302
|
+
|
|
303
|
+
if (options?.columnNames) {
|
|
304
|
+
return [keys, ...arrayRows];
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
return arrayRows;
|
|
308
|
+
},
|
|
309
|
+
};
|
|
310
|
+
|
|
311
|
+
return stmt;
|
|
312
|
+
},
|
|
313
|
+
|
|
314
|
+
async dump(): Promise<ArrayBuffer> {
|
|
315
|
+
const response = await fetch(serviceUrl, {
|
|
316
|
+
method: "POST",
|
|
317
|
+
headers: { "Content-Type": "application/json" },
|
|
318
|
+
body: JSON.stringify({
|
|
319
|
+
bindingName,
|
|
320
|
+
method: "dump",
|
|
321
|
+
}),
|
|
322
|
+
});
|
|
323
|
+
|
|
324
|
+
if (!response.ok) {
|
|
325
|
+
throw new Error("DB dump failed: " + response.statusText);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return await response.arrayBuffer();
|
|
329
|
+
},
|
|
330
|
+
|
|
331
|
+
async batch(statements: DBPreparedStatement[]): Promise<DBResult[]> {
|
|
332
|
+
const results: DBResult[] = [];
|
|
333
|
+
for (const stmt of statements) {
|
|
334
|
+
results.push(await stmt.run());
|
|
335
|
+
}
|
|
336
|
+
return results;
|
|
337
|
+
},
|
|
338
|
+
|
|
339
|
+
async exec(query: string): Promise<DBResult> {
|
|
340
|
+
const response = await fetch(serviceUrl, {
|
|
341
|
+
method: "POST",
|
|
342
|
+
headers: { "Content-Type": "application/json" },
|
|
343
|
+
body: JSON.stringify({
|
|
344
|
+
bindingName,
|
|
345
|
+
method: "exec",
|
|
346
|
+
query,
|
|
347
|
+
}),
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
if (!response.ok) {
|
|
351
|
+
throw new Error("DB exec failed: " + response.statusText);
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
return await response.json();
|
|
355
|
+
},
|
|
356
|
+
};
|
|
357
|
+
}
|
|
358
|
+
`;
|
|
359
|
+
DB_RUNTIME_CODE_PRODUCTION = `export function initializeDB(databaseId, organizationId, serviceUrl) {
|
|
360
|
+
return {
|
|
361
|
+
prepare(query) {
|
|
362
|
+
let boundParams = [];
|
|
363
|
+
return {
|
|
364
|
+
bind(...values) {
|
|
365
|
+
boundParams = values;
|
|
366
|
+
return this;
|
|
367
|
+
},
|
|
368
|
+
async run() {
|
|
369
|
+
const response = await fetch(serviceUrl, {
|
|
370
|
+
method: "POST",
|
|
371
|
+
headers: { "Content-Type": "application/json" },
|
|
372
|
+
body: JSON.stringify({
|
|
373
|
+
databaseId,
|
|
374
|
+
organizationId,
|
|
375
|
+
method: "prepare",
|
|
376
|
+
query,
|
|
377
|
+
params: boundParams,
|
|
378
|
+
}),
|
|
379
|
+
});
|
|
380
|
+
if (!response.ok) {
|
|
381
|
+
const errorText = await response.text();
|
|
382
|
+
throw new Error(\`DB query failed: \${errorText}\`);
|
|
383
|
+
}
|
|
384
|
+
return await response.json();
|
|
385
|
+
},
|
|
386
|
+
async all() {
|
|
387
|
+
return await this.run();
|
|
388
|
+
},
|
|
389
|
+
async first(colName) {
|
|
390
|
+
const result = await this.run();
|
|
391
|
+
if (result.results.length === 0) {
|
|
392
|
+
return null;
|
|
393
|
+
}
|
|
394
|
+
const firstRow = result.results[0];
|
|
395
|
+
if (colName) {
|
|
396
|
+
return firstRow[colName] ?? null;
|
|
397
|
+
}
|
|
398
|
+
return firstRow;
|
|
399
|
+
},
|
|
400
|
+
async raw(options) {
|
|
401
|
+
const result = await this.run();
|
|
402
|
+
const rows = result.results;
|
|
403
|
+
if (rows.length === 0) {
|
|
404
|
+
return [];
|
|
405
|
+
}
|
|
406
|
+
const keys = Object.keys(rows[0]);
|
|
407
|
+
const arrayRows = rows.map((row) => keys.map((key) => row[key]));
|
|
408
|
+
if (options?.columnNames) {
|
|
409
|
+
return [keys, ...arrayRows];
|
|
410
|
+
}
|
|
411
|
+
return arrayRows;
|
|
412
|
+
},
|
|
413
|
+
};
|
|
414
|
+
},
|
|
415
|
+
async dump() {
|
|
416
|
+
const response = await fetch(serviceUrl, {
|
|
417
|
+
method: "POST",
|
|
418
|
+
headers: { "Content-Type": "application/json" },
|
|
419
|
+
body: JSON.stringify({
|
|
420
|
+
databaseId,
|
|
421
|
+
organizationId,
|
|
422
|
+
method: "dump",
|
|
423
|
+
}),
|
|
424
|
+
});
|
|
425
|
+
if (!response.ok) {
|
|
426
|
+
throw new Error(\`DB dump failed: \${response.statusText}\`);
|
|
427
|
+
}
|
|
428
|
+
return await response.arrayBuffer();
|
|
429
|
+
},
|
|
430
|
+
async batch(statements) {
|
|
431
|
+
const stmts = [];
|
|
432
|
+
for (const stmt of statements) {
|
|
433
|
+
const stmtData = stmt.__db_data;
|
|
434
|
+
if (stmtData) {
|
|
435
|
+
stmts.push(stmtData);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
const response = await fetch(serviceUrl, {
|
|
439
|
+
method: "POST",
|
|
440
|
+
headers: { "Content-Type": "application/json" },
|
|
441
|
+
body: JSON.stringify({
|
|
442
|
+
databaseId,
|
|
443
|
+
organizationId,
|
|
444
|
+
method: "batch",
|
|
445
|
+
statements: stmts,
|
|
446
|
+
}),
|
|
447
|
+
});
|
|
448
|
+
if (!response.ok) {
|
|
449
|
+
throw new Error(\`DB batch failed: \${response.statusText}\`);
|
|
450
|
+
}
|
|
451
|
+
return await response.json();
|
|
452
|
+
},
|
|
453
|
+
async exec(query) {
|
|
454
|
+
const response = await fetch(serviceUrl, {
|
|
455
|
+
method: "POST",
|
|
456
|
+
headers: { "Content-Type": "application/json" },
|
|
457
|
+
body: JSON.stringify({
|
|
458
|
+
databaseId,
|
|
459
|
+
organizationId,
|
|
460
|
+
method: "exec",
|
|
461
|
+
query,
|
|
462
|
+
}),
|
|
463
|
+
});
|
|
464
|
+
if (!response.ok) {
|
|
465
|
+
throw new Error(\`DB exec failed: \${response.statusText}\`);
|
|
466
|
+
}
|
|
467
|
+
return await response.json();
|
|
468
|
+
},
|
|
469
|
+
};
|
|
470
|
+
}`;
|
|
471
|
+
}
|
|
472
|
+
});
|
|
473
|
+
|
|
474
|
+
// ../shared/dist/error.js
|
|
475
|
+
var init_error = __esm({
|
|
476
|
+
"../shared/dist/error.js"() {
|
|
477
|
+
}
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
// ../shared/dist/health-check.js
|
|
481
|
+
var init_health_check = __esm({
|
|
482
|
+
"../shared/dist/health-check.js"() {
|
|
483
|
+
}
|
|
484
|
+
});
|
|
485
|
+
|
|
486
|
+
// ../shared/dist/trace-event.js
|
|
487
|
+
var init_trace_event = __esm({
|
|
488
|
+
"../shared/dist/trace-event.js"() {
|
|
489
|
+
}
|
|
490
|
+
});
|
|
491
|
+
|
|
492
|
+
// ../shared/dist/url-validation.js
|
|
493
|
+
var init_url_validation = __esm({
|
|
494
|
+
"../shared/dist/url-validation.js"() {
|
|
495
|
+
}
|
|
496
|
+
});
|
|
497
|
+
|
|
498
|
+
// ../shared/dist/index.js
|
|
499
|
+
var init_dist = __esm({
|
|
500
|
+
"../shared/dist/index.js"() {
|
|
501
|
+
init_d1_runtime();
|
|
502
|
+
init_error();
|
|
503
|
+
init_health_check();
|
|
504
|
+
init_trace_event();
|
|
505
|
+
init_url_validation();
|
|
506
|
+
}
|
|
507
|
+
});
|
|
508
|
+
|
|
509
|
+
// ../emulator/dist/runtime/db-runtime.js
|
|
510
|
+
var init_db_runtime = __esm({
|
|
511
|
+
"../emulator/dist/runtime/db-runtime.js"() {
|
|
512
|
+
init_dist();
|
|
513
|
+
}
|
|
514
|
+
});
|
|
515
|
+
|
|
516
|
+
// ../emulator/dist/runtime/queue-runtime.js
|
|
517
|
+
var QUEUE_RUNTIME_CODE;
|
|
518
|
+
var init_queue_runtime = __esm({
|
|
519
|
+
"../emulator/dist/runtime/queue-runtime.js"() {
|
|
520
|
+
QUEUE_RUNTIME_CODE = `
|
|
521
|
+
interface QueueSendResult {
|
|
522
|
+
success: boolean;
|
|
523
|
+
messageId?: string;
|
|
524
|
+
error?: string;
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
interface QueueBatchSendResult {
|
|
528
|
+
success: boolean;
|
|
529
|
+
messageIds?: string[];
|
|
530
|
+
error?: string;
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
interface Queue<T = unknown> {
|
|
534
|
+
send: (payload: T, options?: { delaySeconds?: number }) => Promise<QueueSendResult>;
|
|
535
|
+
sendBatch: (messages: Array<{ payload: T; delaySeconds?: number }>) => Promise<QueueBatchSendResult>;
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
export function initializeQueue<T = unknown>(queueName: string, serviceUrl: string): Queue<T> {
|
|
539
|
+
return {
|
|
540
|
+
async send(payload: T, options?: { delaySeconds?: number }): Promise<QueueSendResult> {
|
|
541
|
+
const response = await fetch(serviceUrl + "/queue/send", {
|
|
542
|
+
method: "POST",
|
|
543
|
+
headers: { "Content-Type": "application/json" },
|
|
544
|
+
body: JSON.stringify({
|
|
545
|
+
queueName,
|
|
546
|
+
payload,
|
|
547
|
+
delaySeconds: options?.delaySeconds,
|
|
548
|
+
}),
|
|
549
|
+
});
|
|
550
|
+
|
|
551
|
+
if (!response.ok) {
|
|
552
|
+
const errorText = await response.text();
|
|
553
|
+
return { success: false, error: "Queue send failed: " + errorText };
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
return await response.json();
|
|
557
|
+
},
|
|
558
|
+
|
|
559
|
+
async sendBatch(messages: Array<{ payload: T; delaySeconds?: number }>): Promise<QueueBatchSendResult> {
|
|
560
|
+
const response = await fetch(serviceUrl + "/queue/batch-send", {
|
|
561
|
+
method: "POST",
|
|
562
|
+
headers: { "Content-Type": "application/json" },
|
|
563
|
+
body: JSON.stringify({
|
|
564
|
+
queueName,
|
|
565
|
+
messages,
|
|
566
|
+
}),
|
|
567
|
+
});
|
|
568
|
+
|
|
569
|
+
if (!response.ok) {
|
|
570
|
+
const errorText = await response.text();
|
|
571
|
+
return { success: false, error: "Queue batch send failed: " + errorText };
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
return await response.json();
|
|
575
|
+
},
|
|
576
|
+
};
|
|
577
|
+
}
|
|
578
|
+
`;
|
|
579
|
+
}
|
|
580
|
+
});
|
|
581
|
+
|
|
582
|
+
// ../emulator/dist/runtime/workflow-runtime.js
|
|
583
|
+
var WORKFLOW_RUNTIME_CODE;
|
|
584
|
+
var init_workflow_runtime = __esm({
|
|
585
|
+
"../emulator/dist/runtime/workflow-runtime.js"() {
|
|
586
|
+
WORKFLOW_RUNTIME_CODE = `
|
|
587
|
+
interface WorkflowExecution {
|
|
588
|
+
id: string;
|
|
589
|
+
status: "pending" | "running" | "completed" | "failed" | "cancelled";
|
|
590
|
+
result?: unknown;
|
|
591
|
+
error?: string;
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
interface Workflow<TInput = unknown> {
|
|
595
|
+
trigger: (input: TInput) => Promise<{ executionId: string }>;
|
|
596
|
+
getExecution: (executionId: string) => Promise<WorkflowExecution>;
|
|
597
|
+
cancel: (executionId: string) => Promise<void>;
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
export function initializeWorkflow<TInput = unknown>(
|
|
601
|
+
workflowName: string,
|
|
602
|
+
serviceUrl: string
|
|
603
|
+
): Workflow<TInput> {
|
|
604
|
+
return {
|
|
605
|
+
async trigger(input: TInput): Promise<{ executionId: string }> {
|
|
606
|
+
const response = await fetch(serviceUrl + "/workflow/trigger", {
|
|
607
|
+
method: "POST",
|
|
608
|
+
headers: { "Content-Type": "application/json" },
|
|
609
|
+
body: JSON.stringify({ workflowName, input }),
|
|
610
|
+
});
|
|
611
|
+
|
|
612
|
+
if (!response.ok) {
|
|
613
|
+
const errorText = await response.text();
|
|
614
|
+
throw new Error("Workflow trigger failed: " + errorText);
|
|
615
|
+
}
|
|
616
|
+
|
|
617
|
+
const result = await response.json();
|
|
618
|
+
if (!result.success) {
|
|
619
|
+
throw new Error(result.error || "Workflow trigger failed");
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
return { executionId: result.executionId };
|
|
623
|
+
},
|
|
624
|
+
|
|
625
|
+
async getExecution(executionId: string): Promise<WorkflowExecution> {
|
|
626
|
+
const response = await fetch(serviceUrl + "/workflow/status", {
|
|
627
|
+
method: "POST",
|
|
628
|
+
headers: { "Content-Type": "application/json" },
|
|
629
|
+
body: JSON.stringify({ executionId }),
|
|
630
|
+
});
|
|
631
|
+
|
|
632
|
+
if (!response.ok) {
|
|
633
|
+
const errorText = await response.text();
|
|
634
|
+
throw new Error("Workflow status failed: " + errorText);
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
const result = await response.json();
|
|
638
|
+
if (!result.success) {
|
|
639
|
+
throw new Error(result.error || "Workflow status failed");
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
const exec = result.execution;
|
|
643
|
+
return {
|
|
644
|
+
id: exec.id,
|
|
645
|
+
status: exec.status,
|
|
646
|
+
result: exec.output,
|
|
647
|
+
error: exec.error,
|
|
648
|
+
};
|
|
649
|
+
},
|
|
650
|
+
|
|
651
|
+
async cancel(executionId: string): Promise<void> {
|
|
652
|
+
const response = await fetch(serviceUrl + "/workflow/cancel", {
|
|
653
|
+
method: "POST",
|
|
654
|
+
headers: { "Content-Type": "application/json" },
|
|
655
|
+
body: JSON.stringify({ executionId }),
|
|
656
|
+
});
|
|
657
|
+
|
|
658
|
+
if (!response.ok) {
|
|
659
|
+
const errorText = await response.text();
|
|
660
|
+
throw new Error("Workflow cancel failed: " + errorText);
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
const result = await response.json();
|
|
664
|
+
if (!result.success) {
|
|
665
|
+
throw new Error(result.error || "Workflow cancel failed");
|
|
666
|
+
}
|
|
667
|
+
},
|
|
668
|
+
};
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
interface WorkflowStepContext {
|
|
672
|
+
run: <T>(stepName: string, fn: () => T | Promise<T>, options?: { retries?: number }) => Promise<T>;
|
|
673
|
+
sleep: (durationMs: number) => Promise<void>;
|
|
674
|
+
}
|
|
675
|
+
|
|
676
|
+
interface WorkflowContext<TInput = unknown, TEnv = unknown> {
|
|
677
|
+
input: TInput;
|
|
678
|
+
env: TEnv;
|
|
679
|
+
step: WorkflowStepContext;
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
export function createStepContext(executionId: string, serviceUrl: string): WorkflowStepContext {
|
|
683
|
+
let stepIndex = 0;
|
|
684
|
+
|
|
685
|
+
return {
|
|
686
|
+
async run<T>(stepName: string, fn: () => T | Promise<T>, options?: { retries?: number }): Promise<T> {
|
|
687
|
+
const currentStepIndex = stepIndex++;
|
|
688
|
+
const maxRetries = options?.retries ?? 0;
|
|
689
|
+
|
|
690
|
+
const startResponse = await fetch(serviceUrl + "/workflow/step/start", {
|
|
691
|
+
method: "POST",
|
|
692
|
+
headers: { "Content-Type": "application/json" },
|
|
693
|
+
body: JSON.stringify({ executionId, stepName, stepIndex: currentStepIndex }),
|
|
694
|
+
});
|
|
695
|
+
|
|
696
|
+
if (!startResponse.ok) {
|
|
697
|
+
const errorText = await startResponse.text();
|
|
698
|
+
throw new Error("Failed to start step: " + errorText);
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
const startResult = await startResponse.json();
|
|
702
|
+
|
|
703
|
+
if (startResult.alreadyCompleted) {
|
|
704
|
+
return startResult.output as T;
|
|
705
|
+
}
|
|
706
|
+
|
|
707
|
+
const startTime = Date.now();
|
|
708
|
+
let lastError: Error | null = null;
|
|
709
|
+
|
|
710
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
711
|
+
try {
|
|
712
|
+
const result = await fn();
|
|
713
|
+
const durationMs = Date.now() - startTime;
|
|
714
|
+
|
|
715
|
+
await fetch(serviceUrl + "/workflow/step/complete", {
|
|
716
|
+
method: "POST",
|
|
717
|
+
headers: { "Content-Type": "application/json" },
|
|
718
|
+
body: JSON.stringify({ executionId, stepName, output: result, durationMs }),
|
|
719
|
+
});
|
|
720
|
+
|
|
721
|
+
return result;
|
|
722
|
+
} catch (error) {
|
|
723
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
724
|
+
if (attempt < maxRetries) {
|
|
725
|
+
await new Promise((resolve) => setTimeout(resolve, Math.min(1000 * Math.pow(2, attempt), 30000)));
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
const durationMs = Date.now() - startTime;
|
|
731
|
+
await fetch(serviceUrl + "/workflow/step/fail", {
|
|
732
|
+
method: "POST",
|
|
733
|
+
headers: { "Content-Type": "application/json" },
|
|
734
|
+
body: JSON.stringify({ executionId, stepName, error: lastError?.message || "Step execution failed", durationMs }),
|
|
735
|
+
});
|
|
736
|
+
|
|
737
|
+
throw lastError;
|
|
738
|
+
},
|
|
739
|
+
|
|
740
|
+
async sleep(durationMs: number): Promise<void> {
|
|
741
|
+
const stepName = "__sleep_" + stepIndex++;
|
|
742
|
+
|
|
743
|
+
await fetch(serviceUrl + "/workflow/step/start", {
|
|
744
|
+
method: "POST",
|
|
745
|
+
headers: { "Content-Type": "application/json" },
|
|
746
|
+
body: JSON.stringify({ executionId, stepName, stepIndex: stepIndex - 1 }),
|
|
747
|
+
});
|
|
748
|
+
|
|
749
|
+
await new Promise((resolve) => setTimeout(resolve, durationMs));
|
|
750
|
+
|
|
751
|
+
await fetch(serviceUrl + "/workflow/step/complete", {
|
|
752
|
+
method: "POST",
|
|
753
|
+
headers: { "Content-Type": "application/json" },
|
|
754
|
+
body: JSON.stringify({ executionId, stepName, output: { sleptMs: durationMs }, durationMs }),
|
|
755
|
+
});
|
|
756
|
+
},
|
|
757
|
+
};
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
export async function executeWorkflow<TInput, TOutput, TEnv>(
|
|
761
|
+
executionId: string,
|
|
762
|
+
workflowFn: (context: WorkflowContext<TInput, TEnv>) => Promise<TOutput>,
|
|
763
|
+
input: TInput,
|
|
764
|
+
env: TEnv,
|
|
765
|
+
serviceUrl: string
|
|
766
|
+
): Promise<TOutput> {
|
|
767
|
+
const step = createStepContext(executionId, serviceUrl);
|
|
768
|
+
const context: WorkflowContext<TInput, TEnv> = { input, env, step };
|
|
769
|
+
|
|
770
|
+
try {
|
|
771
|
+
const output = await workflowFn(context);
|
|
772
|
+
|
|
773
|
+
await fetch(serviceUrl + "/workflow/complete", {
|
|
774
|
+
method: "POST",
|
|
775
|
+
headers: { "Content-Type": "application/json" },
|
|
776
|
+
body: JSON.stringify({ executionId, output }),
|
|
777
|
+
});
|
|
778
|
+
|
|
779
|
+
return output;
|
|
780
|
+
} catch (error) {
|
|
781
|
+
await fetch(serviceUrl + "/workflow/fail", {
|
|
782
|
+
method: "POST",
|
|
783
|
+
headers: { "Content-Type": "application/json" },
|
|
784
|
+
body: JSON.stringify({ executionId, error: error instanceof Error ? error.message : String(error) }),
|
|
785
|
+
});
|
|
786
|
+
|
|
787
|
+
throw error;
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
`;
|
|
791
|
+
}
|
|
792
|
+
});
|
|
793
|
+
function generateWrapperCode(config, mockServiceUrl) {
|
|
794
|
+
const imports = [];
|
|
795
|
+
const bindings = [];
|
|
796
|
+
if (config.db) {
|
|
797
|
+
imports.push('import { initializeDB } from "__ploy_db_runtime__";');
|
|
798
|
+
for (const [bindingName, dbName] of Object.entries(config.db)) {
|
|
799
|
+
bindings.push(` ${bindingName}: initializeDB("${dbName}", "${mockServiceUrl}/db"),`);
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
if (config.queue) {
|
|
803
|
+
imports.push('import { initializeQueue } from "__ploy_queue_runtime__";');
|
|
804
|
+
for (const [bindingName, queueName] of Object.entries(config.queue)) {
|
|
805
|
+
bindings.push(` ${bindingName}: initializeQueue("${queueName}", "${mockServiceUrl}"),`);
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
if (config.workflow) {
|
|
809
|
+
imports.push('import { initializeWorkflow, createStepContext, executeWorkflow } from "__ploy_workflow_runtime__";');
|
|
810
|
+
for (const [bindingName, workflowName] of Object.entries(config.workflow)) {
|
|
811
|
+
bindings.push(` ${bindingName}: initializeWorkflow("${workflowName}", "${mockServiceUrl}"),`);
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
imports.push('import userWorker from "__ploy_user_worker__";');
|
|
815
|
+
const workflowHandlerCode = config.workflow ? `
|
|
816
|
+
// Handle workflow execution requests
|
|
817
|
+
if (request.headers.get("X-Ploy-Workflow-Execution") === "true") {
|
|
818
|
+
const workflowName = request.headers.get("X-Ploy-Workflow-Name");
|
|
819
|
+
const executionId = request.headers.get("X-Ploy-Execution-Id");
|
|
820
|
+
|
|
821
|
+
if (workflowName && executionId && userWorker.workflows && userWorker.workflows[workflowName]) {
|
|
822
|
+
const input = await request.json();
|
|
823
|
+
try {
|
|
824
|
+
await executeWorkflow(
|
|
825
|
+
executionId,
|
|
826
|
+
userWorker.workflows[workflowName],
|
|
827
|
+
input,
|
|
828
|
+
injectedEnv,
|
|
829
|
+
"${mockServiceUrl}"
|
|
830
|
+
);
|
|
831
|
+
return new Response(JSON.stringify({ success: true }), {
|
|
832
|
+
headers: { "Content-Type": "application/json" }
|
|
833
|
+
});
|
|
834
|
+
} catch (error) {
|
|
835
|
+
return new Response(JSON.stringify({ success: false, error: String(error) }), {
|
|
836
|
+
status: 500,
|
|
837
|
+
headers: { "Content-Type": "application/json" }
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
}` : "";
|
|
842
|
+
const queueHandlerCode = config.queue ? `
|
|
843
|
+
// Handle queue message delivery
|
|
844
|
+
if (request.headers.get("X-Ploy-Queue-Delivery") === "true") {
|
|
845
|
+
const queueName = request.headers.get("X-Ploy-Queue-Name");
|
|
846
|
+
const messageId = request.headers.get("X-Ploy-Message-Id");
|
|
847
|
+
const deliveryId = request.headers.get("X-Ploy-Delivery-Id");
|
|
848
|
+
const attempt = parseInt(request.headers.get("X-Ploy-Message-Attempt") || "1", 10);
|
|
849
|
+
|
|
850
|
+
if (queueName && messageId && userWorker.message) {
|
|
851
|
+
const payload = await request.json();
|
|
852
|
+
try {
|
|
853
|
+
await userWorker.message({
|
|
854
|
+
id: messageId,
|
|
855
|
+
queueName,
|
|
856
|
+
payload,
|
|
857
|
+
attempt,
|
|
858
|
+
timestamp: Date.now()
|
|
859
|
+
}, injectedEnv, ctx);
|
|
860
|
+
return new Response(JSON.stringify({ success: true }), {
|
|
861
|
+
headers: { "Content-Type": "application/json" }
|
|
862
|
+
});
|
|
863
|
+
} catch (error) {
|
|
864
|
+
return new Response(JSON.stringify({ success: false, error: String(error) }), {
|
|
865
|
+
status: 500,
|
|
866
|
+
headers: { "Content-Type": "application/json" }
|
|
867
|
+
});
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
}` : "";
|
|
871
|
+
return `${imports.join("\n")}
|
|
872
|
+
|
|
873
|
+
const ployBindings = {
|
|
874
|
+
${bindings.join("\n")}
|
|
875
|
+
};
|
|
876
|
+
|
|
877
|
+
export default {
|
|
878
|
+
async fetch(request, env, ctx) {
|
|
879
|
+
const injectedEnv = { ...env, ...ployBindings };
|
|
880
|
+
${workflowHandlerCode}
|
|
881
|
+
${queueHandlerCode}
|
|
882
|
+
|
|
883
|
+
if (userWorker.fetch) {
|
|
884
|
+
return userWorker.fetch(request, injectedEnv, ctx);
|
|
885
|
+
}
|
|
886
|
+
|
|
887
|
+
return new Response("Worker has no fetch handler", { status: 500 });
|
|
888
|
+
},
|
|
889
|
+
|
|
890
|
+
async scheduled(event, env, ctx) {
|
|
891
|
+
if (userWorker.scheduled) {
|
|
892
|
+
const injectedEnv = { ...env, ...ployBindings };
|
|
893
|
+
return userWorker.scheduled(event, injectedEnv, ctx);
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
};
|
|
897
|
+
`;
|
|
898
|
+
}
|
|
899
|
+
function createRuntimePlugin(_config) {
|
|
900
|
+
return {
|
|
901
|
+
name: "ploy-runtime",
|
|
902
|
+
setup(build2) {
|
|
903
|
+
build2.onResolve({ filter: /^__ploy_db_runtime__$/ }, () => ({
|
|
904
|
+
path: "__ploy_db_runtime__",
|
|
905
|
+
namespace: "ploy-runtime"
|
|
906
|
+
}));
|
|
907
|
+
build2.onResolve({ filter: /^__ploy_queue_runtime__$/ }, () => ({
|
|
908
|
+
path: "__ploy_queue_runtime__",
|
|
909
|
+
namespace: "ploy-runtime"
|
|
910
|
+
}));
|
|
911
|
+
build2.onResolve({ filter: /^__ploy_workflow_runtime__$/ }, () => ({
|
|
912
|
+
path: "__ploy_workflow_runtime__",
|
|
913
|
+
namespace: "ploy-runtime"
|
|
914
|
+
}));
|
|
915
|
+
build2.onLoad({ filter: /^__ploy_db_runtime__$/, namespace: "ploy-runtime" }, () => ({
|
|
916
|
+
contents: DB_RUNTIME_CODE,
|
|
917
|
+
loader: "ts"
|
|
918
|
+
}));
|
|
919
|
+
build2.onLoad({ filter: /^__ploy_queue_runtime__$/, namespace: "ploy-runtime" }, () => ({
|
|
920
|
+
contents: QUEUE_RUNTIME_CODE,
|
|
921
|
+
loader: "ts"
|
|
922
|
+
}));
|
|
923
|
+
build2.onLoad({ filter: /^__ploy_workflow_runtime__$/, namespace: "ploy-runtime" }, () => ({
|
|
924
|
+
contents: WORKFLOW_RUNTIME_CODE,
|
|
925
|
+
loader: "ts"
|
|
926
|
+
}));
|
|
927
|
+
}
|
|
928
|
+
};
|
|
13
929
|
}
|
|
14
|
-
function
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
930
|
+
async function bundleWorker(options) {
|
|
931
|
+
const { projectDir, tempDir, entryPoint, config, mockServiceUrl } = options;
|
|
932
|
+
const wrapperCode = generateWrapperCode(config, mockServiceUrl);
|
|
933
|
+
const wrapperPath = join(tempDir, "wrapper.ts");
|
|
934
|
+
writeFileSync(wrapperPath, wrapperCode);
|
|
935
|
+
const bundlePath = join(tempDir, "worker.bundle.js");
|
|
936
|
+
const buildOptions = {
|
|
937
|
+
entryPoints: [wrapperPath],
|
|
938
|
+
bundle: true,
|
|
939
|
+
format: "esm",
|
|
940
|
+
platform: "neutral",
|
|
941
|
+
target: "es2022",
|
|
942
|
+
outfile: bundlePath,
|
|
943
|
+
minify: false,
|
|
944
|
+
sourcemap: false,
|
|
945
|
+
external: ["cloudflare:*"],
|
|
946
|
+
alias: {
|
|
947
|
+
__ploy_user_worker__: entryPoint
|
|
948
|
+
},
|
|
949
|
+
plugins: [createRuntimePlugin()],
|
|
950
|
+
absWorkingDir: projectDir,
|
|
951
|
+
logLevel: "warning"
|
|
952
|
+
};
|
|
953
|
+
await build(buildOptions);
|
|
954
|
+
return bundlePath;
|
|
955
|
+
}
|
|
956
|
+
var init_bundler = __esm({
|
|
957
|
+
"../emulator/dist/bundler/bundler.js"() {
|
|
958
|
+
init_db_runtime();
|
|
959
|
+
init_queue_runtime();
|
|
960
|
+
init_workflow_runtime();
|
|
961
|
+
}
|
|
962
|
+
});
|
|
963
|
+
function createFileWatcher(srcDir, onRebuild) {
|
|
964
|
+
let watcher = null;
|
|
965
|
+
let debounceTimer = null;
|
|
966
|
+
let isRebuilding = false;
|
|
967
|
+
function shouldRebuild(filePath) {
|
|
968
|
+
const extensions = [".ts", ".tsx", ".js", ".jsx", ".mts", ".mjs"];
|
|
969
|
+
return extensions.some((ext) => filePath.endsWith(ext));
|
|
970
|
+
}
|
|
971
|
+
function scheduleRebuild() {
|
|
972
|
+
if (debounceTimer) {
|
|
973
|
+
clearTimeout(debounceTimer);
|
|
974
|
+
}
|
|
975
|
+
debounceTimer = setTimeout(async () => {
|
|
976
|
+
if (isRebuilding) {
|
|
977
|
+
return;
|
|
978
|
+
}
|
|
979
|
+
isRebuilding = true;
|
|
980
|
+
try {
|
|
981
|
+
await onRebuild();
|
|
982
|
+
} finally {
|
|
983
|
+
isRebuilding = false;
|
|
984
|
+
}
|
|
985
|
+
}, 100);
|
|
986
|
+
}
|
|
987
|
+
return {
|
|
988
|
+
start() {
|
|
989
|
+
if (watcher) {
|
|
990
|
+
return;
|
|
991
|
+
}
|
|
992
|
+
watcher = watch(srcDir, {
|
|
993
|
+
persistent: true,
|
|
994
|
+
ignoreInitial: true,
|
|
995
|
+
ignored: ["**/node_modules/**", "**/dist/**", "**/.ploy/**", "**/.*"]
|
|
996
|
+
});
|
|
997
|
+
watcher.on("change", (filePath) => {
|
|
998
|
+
if (shouldRebuild(filePath)) {
|
|
999
|
+
scheduleRebuild();
|
|
1000
|
+
}
|
|
1001
|
+
});
|
|
1002
|
+
watcher.on("add", (filePath) => {
|
|
1003
|
+
if (shouldRebuild(filePath)) {
|
|
1004
|
+
scheduleRebuild();
|
|
1005
|
+
}
|
|
1006
|
+
});
|
|
1007
|
+
watcher.on("unlink", (filePath) => {
|
|
1008
|
+
if (shouldRebuild(filePath)) {
|
|
1009
|
+
scheduleRebuild();
|
|
1010
|
+
}
|
|
1011
|
+
});
|
|
1012
|
+
},
|
|
1013
|
+
stop() {
|
|
1014
|
+
if (debounceTimer) {
|
|
1015
|
+
clearTimeout(debounceTimer);
|
|
1016
|
+
debounceTimer = null;
|
|
1017
|
+
}
|
|
1018
|
+
if (watcher) {
|
|
1019
|
+
watcher.close();
|
|
1020
|
+
watcher = null;
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
};
|
|
1024
|
+
}
|
|
1025
|
+
var init_watcher = __esm({
|
|
1026
|
+
"../emulator/dist/bundler/watcher.js"() {
|
|
1027
|
+
}
|
|
1028
|
+
});
|
|
1029
|
+
|
|
1030
|
+
// ../emulator/dist/config/ploy-config.js
|
|
1031
|
+
function readPloyConfig2(projectDir, configPath) {
|
|
1032
|
+
const config = readPloyConfigSync(projectDir, configPath);
|
|
1033
|
+
if (!config.kind) {
|
|
1034
|
+
throw new Error(`Missing required field 'kind' in ${configPath || "ploy.yaml"}`);
|
|
1035
|
+
}
|
|
1036
|
+
if (config.kind !== "dynamic" && config.kind !== "worker") {
|
|
1037
|
+
throw new Error(`Invalid kind '${config.kind}' in ${configPath || "ploy.yaml"}. Must be 'dynamic' or 'worker'`);
|
|
1038
|
+
}
|
|
1039
|
+
return config;
|
|
1040
|
+
}
|
|
1041
|
+
function getWorkerEntryPoint2(projectDir, config) {
|
|
1042
|
+
return getWorkerEntryPoint(projectDir, config);
|
|
1043
|
+
}
|
|
1044
|
+
var init_ploy_config2 = __esm({
|
|
1045
|
+
"../emulator/dist/config/ploy-config.js"() {
|
|
1046
|
+
init_cli();
|
|
1047
|
+
}
|
|
1048
|
+
});
|
|
1049
|
+
function generateWorkerdConfig(options) {
|
|
1050
|
+
const { port, mockServicePort } = options;
|
|
1051
|
+
const services = [
|
|
1052
|
+
'(name = "main", worker = .worker)',
|
|
1053
|
+
`(name = "mock", external = (address = "localhost:${mockServicePort}", http = ()))`,
|
|
1054
|
+
'(name = "internet", network = (allow = ["public", "private", "local"], tlsOptions = (trustBrowserCas = true)))'
|
|
1055
|
+
];
|
|
1056
|
+
const bindings = [
|
|
1057
|
+
'(name = "mock", service = "mock")',
|
|
1058
|
+
'(name = "internet", service = "internet")'
|
|
1059
|
+
];
|
|
1060
|
+
const configContent = `using Workerd = import "/workerd/workerd.capnp";
|
|
1061
|
+
|
|
1062
|
+
const config :Workerd.Config = (
|
|
1063
|
+
services = [
|
|
1064
|
+
${services.join(",\n ")}
|
|
1065
|
+
],
|
|
1066
|
+
sockets = [
|
|
1067
|
+
(name = "http", address = "*:${port}", http = (), service = "main")
|
|
1068
|
+
]
|
|
1069
|
+
);
|
|
1070
|
+
|
|
1071
|
+
const worker :Workerd.Worker = (
|
|
1072
|
+
modules = [
|
|
1073
|
+
(name = "worker.js", esModule = embed "worker.bundle.js")
|
|
1074
|
+
],
|
|
1075
|
+
compatibilityDate = "2025-09-15",
|
|
1076
|
+
compatibilityFlags = [
|
|
1077
|
+
"experimental",
|
|
1078
|
+
"nodejs_compat",
|
|
1079
|
+
"nodejs_als"
|
|
1080
|
+
],
|
|
1081
|
+
globalOutbound = "internet",
|
|
1082
|
+
bindings = [
|
|
1083
|
+
${bindings.join(",\n ")}
|
|
1084
|
+
]
|
|
1085
|
+
);
|
|
1086
|
+
`;
|
|
1087
|
+
return configContent;
|
|
1088
|
+
}
|
|
1089
|
+
function writeWorkerdConfig(options) {
|
|
1090
|
+
const configContent = generateWorkerdConfig(options);
|
|
1091
|
+
const configPath = join(options.tempDir, "workerd.capnp");
|
|
1092
|
+
writeFileSync(configPath, configContent);
|
|
1093
|
+
return configPath;
|
|
1094
|
+
}
|
|
1095
|
+
var init_workerd_config = __esm({
|
|
1096
|
+
"../emulator/dist/config/workerd-config.js"() {
|
|
1097
|
+
}
|
|
1098
|
+
});
|
|
1099
|
+
function findDashboardDistPath() {
|
|
1100
|
+
const possiblePaths = [
|
|
1101
|
+
join(__dirname, "..", "dashboard-dist"),
|
|
1102
|
+
join(__dirname, "..", "..", "src", "dashboard-dist")
|
|
1103
|
+
];
|
|
1104
|
+
for (const p of possiblePaths) {
|
|
1105
|
+
if (existsSync(p)) {
|
|
1106
|
+
return p;
|
|
1107
|
+
}
|
|
1108
|
+
}
|
|
1109
|
+
return null;
|
|
1110
|
+
}
|
|
1111
|
+
function getMimeType(filePath) {
|
|
1112
|
+
const ext = filePath.substring(filePath.lastIndexOf("."));
|
|
1113
|
+
return MIME_TYPES[ext] || "application/octet-stream";
|
|
1114
|
+
}
|
|
1115
|
+
function createDashboardRoutes(app, dbManager2, config) {
|
|
1116
|
+
const dashboardDistPath = findDashboardDistPath();
|
|
1117
|
+
const hasDashboard = dashboardDistPath !== null;
|
|
1118
|
+
function getDbResourceName(bindingName) {
|
|
1119
|
+
return config.db?.[bindingName] ?? null;
|
|
1120
|
+
}
|
|
1121
|
+
app.get("/api/config", (c) => {
|
|
1122
|
+
return c.json({
|
|
1123
|
+
db: config.db,
|
|
1124
|
+
queue: config.queue,
|
|
1125
|
+
workflow: config.workflow
|
|
1126
|
+
});
|
|
1127
|
+
});
|
|
1128
|
+
app.post("/api/db/:binding/query", async (c) => {
|
|
1129
|
+
const binding = c.req.param("binding");
|
|
1130
|
+
const resourceName = getDbResourceName(binding);
|
|
1131
|
+
if (!resourceName) {
|
|
1132
|
+
return c.json({ error: `Database binding '${binding}' not found` }, 404);
|
|
1133
|
+
}
|
|
1134
|
+
const body = await c.req.json();
|
|
1135
|
+
const { query } = body;
|
|
1136
|
+
if (!query) {
|
|
1137
|
+
return c.json({ error: "Query is required" }, 400);
|
|
1138
|
+
}
|
|
1139
|
+
try {
|
|
1140
|
+
const db = dbManager2.getD1Database(resourceName);
|
|
1141
|
+
const startTime = Date.now();
|
|
1142
|
+
const stmt = db.prepare(query);
|
|
1143
|
+
const isSelect = query.trim().toUpperCase().startsWith("SELECT");
|
|
1144
|
+
let results = [];
|
|
1145
|
+
let changes = 0;
|
|
1146
|
+
if (isSelect) {
|
|
1147
|
+
results = stmt.all();
|
|
1148
|
+
} else {
|
|
1149
|
+
const info = stmt.run();
|
|
1150
|
+
changes = info.changes;
|
|
1151
|
+
}
|
|
1152
|
+
const duration = Date.now() - startTime;
|
|
1153
|
+
return c.json({
|
|
1154
|
+
results,
|
|
1155
|
+
success: true,
|
|
1156
|
+
meta: {
|
|
1157
|
+
duration,
|
|
1158
|
+
rows_read: results.length,
|
|
1159
|
+
rows_written: changes
|
|
1160
|
+
}
|
|
1161
|
+
});
|
|
1162
|
+
} catch (err) {
|
|
1163
|
+
return c.json({
|
|
1164
|
+
results: [],
|
|
1165
|
+
success: false,
|
|
1166
|
+
error: err instanceof Error ? err.message : String(err),
|
|
1167
|
+
meta: { duration: 0, rows_read: 0, rows_written: 0 }
|
|
1168
|
+
}, 400);
|
|
1169
|
+
}
|
|
1170
|
+
});
|
|
1171
|
+
app.get("/api/db/:binding/tables", (c) => {
|
|
1172
|
+
const binding = c.req.param("binding");
|
|
1173
|
+
const resourceName = getDbResourceName(binding);
|
|
1174
|
+
if (!resourceName) {
|
|
1175
|
+
return c.json({ error: `Database binding '${binding}' not found` }, 404);
|
|
1176
|
+
}
|
|
1177
|
+
try {
|
|
1178
|
+
const db = dbManager2.getD1Database(resourceName);
|
|
1179
|
+
const tables = db.prepare(`SELECT name FROM sqlite_master
|
|
1180
|
+
WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_litestream_%'
|
|
1181
|
+
ORDER BY name`).all();
|
|
1182
|
+
return c.json({ tables });
|
|
1183
|
+
} catch (err) {
|
|
1184
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1185
|
+
}
|
|
1186
|
+
});
|
|
1187
|
+
app.get("/api/db/:binding/tables/:tableName", (c) => {
|
|
1188
|
+
const binding = c.req.param("binding");
|
|
1189
|
+
const resourceName = getDbResourceName(binding);
|
|
1190
|
+
if (!resourceName) {
|
|
1191
|
+
return c.json({ error: `Database binding '${binding}' not found` }, 404);
|
|
1192
|
+
}
|
|
1193
|
+
const tableName = c.req.param("tableName");
|
|
1194
|
+
const limit = parseInt(c.req.query("limit") || "50", 10);
|
|
1195
|
+
const offset = parseInt(c.req.query("offset") || "0", 10);
|
|
1196
|
+
try {
|
|
1197
|
+
const db = dbManager2.getD1Database(resourceName);
|
|
1198
|
+
const columnsResult = db.prepare(`PRAGMA table_info("${tableName}")`).all();
|
|
1199
|
+
const columns = columnsResult.map((col) => col.name);
|
|
1200
|
+
const countResult = db.prepare(`SELECT COUNT(*) as count FROM "${tableName}"`).get();
|
|
1201
|
+
const total = countResult.count;
|
|
1202
|
+
const data = db.prepare(`SELECT * FROM "${tableName}" LIMIT ? OFFSET ?`).all(limit, offset);
|
|
1203
|
+
return c.json({ data, columns, total });
|
|
1204
|
+
} catch (err) {
|
|
1205
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1206
|
+
}
|
|
1207
|
+
});
|
|
1208
|
+
app.get("/api/db/:binding/schema", (c) => {
|
|
1209
|
+
const binding = c.req.param("binding");
|
|
1210
|
+
const resourceName = getDbResourceName(binding);
|
|
1211
|
+
if (!resourceName) {
|
|
1212
|
+
return c.json({ error: `Database binding '${binding}' not found` }, 404);
|
|
1213
|
+
}
|
|
1214
|
+
try {
|
|
1215
|
+
const db = dbManager2.getD1Database(resourceName);
|
|
1216
|
+
const tablesResult = db.prepare(`SELECT name FROM sqlite_master
|
|
1217
|
+
WHERE type='table' AND name NOT LIKE 'sqlite_%'
|
|
1218
|
+
ORDER BY name`).all();
|
|
1219
|
+
const tables = tablesResult.map((table) => {
|
|
1220
|
+
const columnsResult = db.prepare(`PRAGMA table_info("${table.name}")`).all();
|
|
1221
|
+
return {
|
|
1222
|
+
name: table.name,
|
|
1223
|
+
columns: columnsResult.map((col) => ({
|
|
1224
|
+
name: col.name,
|
|
1225
|
+
type: col.type,
|
|
1226
|
+
notNull: col.notnull === 1,
|
|
1227
|
+
primaryKey: col.pk === 1
|
|
1228
|
+
}))
|
|
1229
|
+
};
|
|
1230
|
+
});
|
|
1231
|
+
return c.json({ tables });
|
|
1232
|
+
} catch (err) {
|
|
1233
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1234
|
+
}
|
|
1235
|
+
});
|
|
1236
|
+
app.get("/api/queue/:binding/metrics", (c) => {
|
|
1237
|
+
const binding = c.req.param("binding");
|
|
1238
|
+
const queueName = config.queue?.[binding];
|
|
1239
|
+
if (!queueName) {
|
|
1240
|
+
return c.json({ error: "Queue not found" }, 404);
|
|
1241
|
+
}
|
|
1242
|
+
try {
|
|
1243
|
+
const db = dbManager2.emulatorDb;
|
|
1244
|
+
const metrics = {
|
|
1245
|
+
pending: 0,
|
|
1246
|
+
processing: 0,
|
|
1247
|
+
acknowledged: 0,
|
|
1248
|
+
failed: 0,
|
|
1249
|
+
deadLetter: 0
|
|
1250
|
+
};
|
|
1251
|
+
const statusCounts = db.prepare(`SELECT status, COUNT(*) as count
|
|
1252
|
+
FROM queue_messages
|
|
1253
|
+
WHERE queue_name = ?
|
|
1254
|
+
GROUP BY status`).all(queueName);
|
|
1255
|
+
for (const row of statusCounts) {
|
|
1256
|
+
if (row.status === "pending") {
|
|
1257
|
+
metrics.pending = row.count;
|
|
1258
|
+
} else if (row.status === "processing") {
|
|
1259
|
+
metrics.processing = row.count;
|
|
1260
|
+
} else if (row.status === "acknowledged") {
|
|
1261
|
+
metrics.acknowledged = row.count;
|
|
1262
|
+
} else if (row.status === "failed") {
|
|
1263
|
+
metrics.failed = row.count;
|
|
1264
|
+
} else if (row.status === "dead_letter") {
|
|
1265
|
+
metrics.deadLetter = row.count;
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
return c.json({ metrics });
|
|
1269
|
+
} catch (err) {
|
|
1270
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1271
|
+
}
|
|
1272
|
+
});
|
|
1273
|
+
app.get("/api/queue/:binding/messages", (c) => {
|
|
1274
|
+
const binding = c.req.param("binding");
|
|
1275
|
+
const queueName = config.queue?.[binding];
|
|
1276
|
+
const limit = parseInt(c.req.query("limit") || "10", 10);
|
|
1277
|
+
if (!queueName) {
|
|
1278
|
+
return c.json({ error: "Queue not found" }, 404);
|
|
1279
|
+
}
|
|
1280
|
+
try {
|
|
1281
|
+
const db = dbManager2.emulatorDb;
|
|
1282
|
+
const messages = db.prepare(`SELECT id, status, payload, attempt, created_at
|
|
1283
|
+
FROM queue_messages
|
|
1284
|
+
WHERE queue_name = ?
|
|
1285
|
+
ORDER BY created_at DESC
|
|
1286
|
+
LIMIT ?`).all(queueName, limit);
|
|
1287
|
+
return c.json({
|
|
1288
|
+
messages: messages.map((m) => ({
|
|
1289
|
+
id: m.id,
|
|
1290
|
+
status: m.status.toUpperCase(),
|
|
1291
|
+
payload: JSON.parse(m.payload),
|
|
1292
|
+
attempt: m.attempt,
|
|
1293
|
+
createdAt: new Date(m.created_at * 1e3).toISOString()
|
|
1294
|
+
}))
|
|
1295
|
+
});
|
|
1296
|
+
} catch (err) {
|
|
1297
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1298
|
+
}
|
|
1299
|
+
});
|
|
1300
|
+
app.get("/api/workflow/:binding/executions", (c) => {
|
|
1301
|
+
const binding = c.req.param("binding");
|
|
1302
|
+
const workflowConfig = config.workflow?.[binding];
|
|
1303
|
+
const limit = parseInt(c.req.query("limit") || "20", 10);
|
|
1304
|
+
if (!workflowConfig) {
|
|
1305
|
+
return c.json({ error: "Workflow not found" }, 404);
|
|
1306
|
+
}
|
|
1307
|
+
try {
|
|
1308
|
+
const db = dbManager2.emulatorDb;
|
|
1309
|
+
const workflowName = workflowConfig;
|
|
1310
|
+
const executions = db.prepare(`SELECT
|
|
1311
|
+
e.id,
|
|
1312
|
+
e.workflow_name,
|
|
1313
|
+
e.status,
|
|
1314
|
+
e.error,
|
|
1315
|
+
e.started_at,
|
|
1316
|
+
e.completed_at,
|
|
1317
|
+
e.created_at,
|
|
1318
|
+
(SELECT COUNT(*) FROM workflow_steps WHERE execution_id = e.id) as steps_count,
|
|
1319
|
+
(SELECT COUNT(*) FROM workflow_steps WHERE execution_id = e.id AND status = 'completed') as steps_completed
|
|
1320
|
+
FROM workflow_executions e
|
|
1321
|
+
WHERE e.workflow_name = ?
|
|
1322
|
+
ORDER BY e.created_at DESC
|
|
1323
|
+
LIMIT ?`).all(workflowName, limit);
|
|
1324
|
+
return c.json({
|
|
1325
|
+
executions: executions.map((e) => ({
|
|
1326
|
+
id: e.id,
|
|
1327
|
+
status: e.status.toUpperCase(),
|
|
1328
|
+
startedAt: e.started_at ? new Date(e.started_at * 1e3).toISOString() : null,
|
|
1329
|
+
completedAt: e.completed_at ? new Date(e.completed_at * 1e3).toISOString() : null,
|
|
1330
|
+
durationMs: e.started_at && e.completed_at ? (e.completed_at - e.started_at) * 1e3 : null,
|
|
1331
|
+
stepsCount: e.steps_count,
|
|
1332
|
+
stepsCompleted: e.steps_completed,
|
|
1333
|
+
errorMessage: e.error,
|
|
1334
|
+
createdAt: new Date(e.created_at * 1e3).toISOString()
|
|
1335
|
+
}))
|
|
1336
|
+
});
|
|
1337
|
+
} catch (err) {
|
|
1338
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1339
|
+
}
|
|
1340
|
+
});
|
|
1341
|
+
app.get("/api/workflow/:binding/executions/:executionId", (c) => {
|
|
1342
|
+
const binding = c.req.param("binding");
|
|
1343
|
+
const executionId = c.req.param("executionId");
|
|
1344
|
+
const workflowConfig = config.workflow?.[binding];
|
|
1345
|
+
if (!workflowConfig) {
|
|
1346
|
+
return c.json({ error: "Workflow not found" }, 404);
|
|
1347
|
+
}
|
|
1348
|
+
try {
|
|
1349
|
+
const db = dbManager2.emulatorDb;
|
|
1350
|
+
const execution = db.prepare(`SELECT id, workflow_name, status, error, started_at, completed_at, created_at
|
|
1351
|
+
FROM workflow_executions
|
|
1352
|
+
WHERE id = ?`).get(executionId);
|
|
1353
|
+
if (!execution) {
|
|
1354
|
+
return c.json({ error: "Execution not found" }, 404);
|
|
1355
|
+
}
|
|
1356
|
+
const steps = db.prepare(`SELECT id, step_name, step_index, status, output, error, duration_ms, created_at
|
|
1357
|
+
FROM workflow_steps
|
|
1358
|
+
WHERE execution_id = ?
|
|
1359
|
+
ORDER BY step_index`).all(executionId);
|
|
1360
|
+
return c.json({
|
|
1361
|
+
execution: {
|
|
1362
|
+
id: execution.id,
|
|
1363
|
+
status: execution.status.toUpperCase(),
|
|
1364
|
+
startedAt: execution.started_at ? new Date(execution.started_at * 1e3).toISOString() : null,
|
|
1365
|
+
completedAt: execution.completed_at ? new Date(execution.completed_at * 1e3).toISOString() : null,
|
|
1366
|
+
durationMs: execution.started_at && execution.completed_at ? (execution.completed_at - execution.started_at) * 1e3 : null,
|
|
1367
|
+
stepsCount: steps.length,
|
|
1368
|
+
stepsCompleted: steps.filter((s) => s.status === "completed").length,
|
|
1369
|
+
errorMessage: execution.error,
|
|
1370
|
+
createdAt: new Date(execution.created_at * 1e3).toISOString()
|
|
1371
|
+
},
|
|
1372
|
+
steps: steps.map((s) => ({
|
|
1373
|
+
id: String(s.id),
|
|
1374
|
+
stepName: s.step_name,
|
|
1375
|
+
stepIndex: s.step_index,
|
|
1376
|
+
status: s.status.toUpperCase(),
|
|
1377
|
+
output: s.output ? JSON.parse(s.output) : null,
|
|
1378
|
+
errorMessage: s.error,
|
|
1379
|
+
durationMs: s.duration_ms,
|
|
1380
|
+
createdAt: new Date(s.created_at * 1e3).toISOString()
|
|
1381
|
+
}))
|
|
1382
|
+
});
|
|
1383
|
+
} catch (err) {
|
|
1384
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 500);
|
|
1385
|
+
}
|
|
1386
|
+
});
|
|
1387
|
+
if (hasDashboard) {
|
|
1388
|
+
app.get("/assets/*", (c) => {
|
|
1389
|
+
const path = c.req.path;
|
|
1390
|
+
const filePath = join(dashboardDistPath, path);
|
|
1391
|
+
if (!existsSync(filePath)) {
|
|
1392
|
+
return c.notFound();
|
|
1393
|
+
}
|
|
1394
|
+
const content = readFileSync(filePath);
|
|
1395
|
+
const mimeType = getMimeType(filePath);
|
|
1396
|
+
return new Response(content, {
|
|
1397
|
+
headers: {
|
|
1398
|
+
"Content-Type": mimeType,
|
|
1399
|
+
"Cache-Control": "public, max-age=31536000"
|
|
1400
|
+
}
|
|
1401
|
+
});
|
|
1402
|
+
});
|
|
1403
|
+
app.get("*", (c) => {
|
|
1404
|
+
if (c.req.path.startsWith("/api/") || c.req.path.startsWith("/db") || c.req.path.startsWith("/queue") || c.req.path.startsWith("/workflow")) {
|
|
1405
|
+
return c.notFound();
|
|
1406
|
+
}
|
|
1407
|
+
const indexPath = join(dashboardDistPath, "index.html");
|
|
1408
|
+
if (!existsSync(indexPath)) {
|
|
1409
|
+
return c.text("Dashboard not found. Run 'pnpm build' in packages/dev-dashboard.", 404);
|
|
1410
|
+
}
|
|
1411
|
+
const content = readFileSync(indexPath, "utf-8");
|
|
1412
|
+
return c.html(content);
|
|
1413
|
+
});
|
|
1414
|
+
}
|
|
1415
|
+
}
|
|
1416
|
+
var __filename, __dirname, MIME_TYPES;
|
|
1417
|
+
var init_dashboard_routes = __esm({
|
|
1418
|
+
"../emulator/dist/services/dashboard-routes.js"() {
|
|
1419
|
+
__filename = fileURLToPath(import.meta.url);
|
|
1420
|
+
__dirname = dirname(__filename);
|
|
1421
|
+
MIME_TYPES = {
|
|
1422
|
+
".html": "text/html",
|
|
1423
|
+
".js": "application/javascript",
|
|
1424
|
+
".css": "text/css",
|
|
1425
|
+
".json": "application/json",
|
|
1426
|
+
".png": "image/png",
|
|
1427
|
+
".jpg": "image/jpeg",
|
|
1428
|
+
".svg": "image/svg+xml",
|
|
1429
|
+
".ico": "image/x-icon",
|
|
1430
|
+
".woff": "font/woff",
|
|
1431
|
+
".woff2": "font/woff2"
|
|
21
1432
|
};
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
1433
|
+
}
|
|
1434
|
+
});
|
|
1435
|
+
|
|
1436
|
+
// ../emulator/dist/services/db-service.js
|
|
1437
|
+
function createDbHandler(getDatabase) {
|
|
1438
|
+
return async (c) => {
|
|
1439
|
+
const startTime = Date.now();
|
|
1440
|
+
try {
|
|
1441
|
+
const body = await c.req.json();
|
|
1442
|
+
const { bindingName, method, query, params, statements } = body;
|
|
1443
|
+
const db = getDatabase(bindingName);
|
|
1444
|
+
if (method === "prepare" && query) {
|
|
1445
|
+
const stmt = db.prepare(query);
|
|
1446
|
+
const isSelect = query.trim().toUpperCase().startsWith("SELECT");
|
|
1447
|
+
let results;
|
|
1448
|
+
let changes = 0;
|
|
1449
|
+
if (isSelect) {
|
|
1450
|
+
results = params?.length ? stmt.all(...params) : stmt.all();
|
|
1451
|
+
} else {
|
|
1452
|
+
const info = params?.length ? stmt.run(...params) : stmt.run();
|
|
1453
|
+
changes = info.changes;
|
|
1454
|
+
results = [];
|
|
1455
|
+
}
|
|
1456
|
+
const duration = Date.now() - startTime;
|
|
1457
|
+
return c.json({
|
|
1458
|
+
results,
|
|
1459
|
+
success: true,
|
|
1460
|
+
meta: {
|
|
1461
|
+
duration,
|
|
1462
|
+
rows_read: results.length,
|
|
1463
|
+
rows_written: changes
|
|
1464
|
+
}
|
|
1465
|
+
});
|
|
1466
|
+
}
|
|
1467
|
+
if (method === "exec" && query) {
|
|
1468
|
+
db.exec(query);
|
|
1469
|
+
const duration = Date.now() - startTime;
|
|
1470
|
+
return c.json({
|
|
1471
|
+
results: [],
|
|
1472
|
+
success: true,
|
|
1473
|
+
meta: {
|
|
1474
|
+
duration,
|
|
1475
|
+
rows_read: 0,
|
|
1476
|
+
rows_written: 0
|
|
1477
|
+
}
|
|
1478
|
+
});
|
|
1479
|
+
}
|
|
1480
|
+
if (method === "batch" && statements) {
|
|
1481
|
+
const results = [];
|
|
1482
|
+
const transaction = db.transaction(() => {
|
|
1483
|
+
for (const stmt of statements) {
|
|
1484
|
+
const prepared = db.prepare(stmt.query);
|
|
1485
|
+
const isSelect = stmt.query.trim().toUpperCase().startsWith("SELECT");
|
|
1486
|
+
if (isSelect) {
|
|
1487
|
+
const rows = stmt.params?.length ? prepared.all(...stmt.params) : prepared.all();
|
|
1488
|
+
results.push({
|
|
1489
|
+
results: rows,
|
|
1490
|
+
success: true,
|
|
1491
|
+
meta: {
|
|
1492
|
+
duration: 0,
|
|
1493
|
+
rows_read: rows.length,
|
|
1494
|
+
rows_written: 0
|
|
1495
|
+
}
|
|
1496
|
+
});
|
|
1497
|
+
} else {
|
|
1498
|
+
const info = stmt.params?.length ? prepared.run(...stmt.params) : prepared.run();
|
|
1499
|
+
results.push({
|
|
1500
|
+
results: [],
|
|
1501
|
+
success: true,
|
|
1502
|
+
meta: {
|
|
1503
|
+
duration: 0,
|
|
1504
|
+
rows_read: 0,
|
|
1505
|
+
rows_written: info.changes
|
|
1506
|
+
}
|
|
1507
|
+
});
|
|
28
1508
|
}
|
|
1509
|
+
}
|
|
1510
|
+
});
|
|
1511
|
+
transaction();
|
|
1512
|
+
return c.json(results);
|
|
1513
|
+
}
|
|
1514
|
+
if (method === "dump") {
|
|
1515
|
+
const buffer = db.serialize();
|
|
1516
|
+
return new Response(new Uint8Array(buffer), {
|
|
1517
|
+
headers: {
|
|
1518
|
+
"Content-Type": "application/octet-stream"
|
|
1519
|
+
}
|
|
1520
|
+
});
|
|
1521
|
+
}
|
|
1522
|
+
return c.json({ error: "Invalid method" }, 400);
|
|
1523
|
+
} catch (err) {
|
|
1524
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1525
|
+
return c.json({ error: message }, 500);
|
|
1526
|
+
}
|
|
1527
|
+
};
|
|
1528
|
+
}
|
|
1529
|
+
var init_db_service = __esm({
|
|
1530
|
+
"../emulator/dist/services/db-service.js"() {
|
|
1531
|
+
}
|
|
1532
|
+
});
|
|
1533
|
+
function createQueueHandlers(db) {
|
|
1534
|
+
const sendHandler = async (c) => {
|
|
1535
|
+
try {
|
|
1536
|
+
const body = await c.req.json();
|
|
1537
|
+
const { queueName, payload, delaySeconds = 0 } = body;
|
|
1538
|
+
const id = randomUUID();
|
|
1539
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1540
|
+
const visibleAt = now + delaySeconds;
|
|
1541
|
+
db.prepare(`INSERT INTO queue_messages (id, queue_name, payload, visible_at)
|
|
1542
|
+
VALUES (?, ?, ?, ?)`).run(id, queueName, JSON.stringify(payload), visibleAt);
|
|
1543
|
+
return c.json({ success: true, messageId: id });
|
|
1544
|
+
} catch (err) {
|
|
1545
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1546
|
+
return c.json({ success: false, error: message }, 500);
|
|
1547
|
+
}
|
|
1548
|
+
};
|
|
1549
|
+
const batchSendHandler = async (c) => {
|
|
1550
|
+
try {
|
|
1551
|
+
const body = await c.req.json();
|
|
1552
|
+
const { queueName, messages } = body;
|
|
1553
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1554
|
+
const messageIds = [];
|
|
1555
|
+
const insert = db.prepare(`INSERT INTO queue_messages (id, queue_name, payload, visible_at)
|
|
1556
|
+
VALUES (?, ?, ?, ?)`);
|
|
1557
|
+
const transaction = db.transaction(() => {
|
|
1558
|
+
for (const msg of messages) {
|
|
1559
|
+
const id = randomUUID();
|
|
1560
|
+
const visibleAt = now + (msg.delaySeconds || 0);
|
|
1561
|
+
insert.run(id, queueName, JSON.stringify(msg.payload), visibleAt);
|
|
1562
|
+
messageIds.push(id);
|
|
29
1563
|
}
|
|
30
|
-
|
|
31
|
-
|
|
1564
|
+
});
|
|
1565
|
+
transaction();
|
|
1566
|
+
return c.json({ success: true, messageIds });
|
|
1567
|
+
} catch (err) {
|
|
1568
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1569
|
+
return c.json({ success: false, error: message }, 500);
|
|
1570
|
+
}
|
|
1571
|
+
};
|
|
1572
|
+
const receiveHandler = async (c) => {
|
|
1573
|
+
try {
|
|
1574
|
+
const body = await c.req.json();
|
|
1575
|
+
const { queueName, maxMessages = 10, visibilityTimeoutSeconds = 30 } = body;
|
|
1576
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1577
|
+
const deliveryId = randomUUID();
|
|
1578
|
+
const newVisibleAt = now + visibilityTimeoutSeconds;
|
|
1579
|
+
const rows = db.prepare(`SELECT id, queue_name, payload, attempt
|
|
1580
|
+
FROM queue_messages
|
|
1581
|
+
WHERE queue_name = ? AND status = 'pending' AND visible_at <= ?
|
|
1582
|
+
ORDER BY visible_at ASC
|
|
1583
|
+
LIMIT ?`).all(queueName, now, maxMessages);
|
|
1584
|
+
if (rows.length === 0) {
|
|
1585
|
+
return c.json({ success: true, messages: [] });
|
|
1586
|
+
}
|
|
1587
|
+
const ids = rows.map((r) => r.id);
|
|
1588
|
+
const placeholders = ids.map(() => "?").join(",");
|
|
1589
|
+
db.prepare(`UPDATE queue_messages
|
|
1590
|
+
SET status = 'processing', delivery_id = ?, visible_at = ?, attempt = attempt + 1, updated_at = ?
|
|
1591
|
+
WHERE id IN (${placeholders})`).run(deliveryId, newVisibleAt, now, ...ids);
|
|
1592
|
+
const messages = rows.map((row) => ({
|
|
1593
|
+
id: row.id,
|
|
1594
|
+
payload: JSON.parse(row.payload),
|
|
1595
|
+
attempt: row.attempt + 1,
|
|
1596
|
+
deliveryId
|
|
1597
|
+
}));
|
|
1598
|
+
return c.json({ success: true, messages });
|
|
1599
|
+
} catch (err) {
|
|
1600
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1601
|
+
return c.json({ success: false, messages: [], error: message }, 500);
|
|
1602
|
+
}
|
|
1603
|
+
};
|
|
1604
|
+
const ackHandler = async (c) => {
|
|
1605
|
+
try {
|
|
1606
|
+
const body = await c.req.json();
|
|
1607
|
+
const { messageId, deliveryId } = body;
|
|
1608
|
+
const result = db.prepare(`DELETE FROM queue_messages
|
|
1609
|
+
WHERE id = ? AND delivery_id = ?`).run(messageId, deliveryId);
|
|
1610
|
+
if (result.changes === 0) {
|
|
1611
|
+
return c.json({ success: false, error: "Message not found or already processed" }, 404);
|
|
1612
|
+
}
|
|
1613
|
+
return c.json({ success: true });
|
|
1614
|
+
} catch (err) {
|
|
1615
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1616
|
+
return c.json({ success: false, error: message }, 500);
|
|
1617
|
+
}
|
|
1618
|
+
};
|
|
1619
|
+
const retryHandler = async (c) => {
|
|
1620
|
+
try {
|
|
1621
|
+
const body = await c.req.json();
|
|
1622
|
+
const { messageId, deliveryId } = body;
|
|
1623
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1624
|
+
const backoffSeconds = 5;
|
|
1625
|
+
const result = db.prepare(`UPDATE queue_messages
|
|
1626
|
+
SET status = 'pending', delivery_id = NULL, visible_at = ?, updated_at = ?
|
|
1627
|
+
WHERE id = ? AND delivery_id = ?`).run(now + backoffSeconds, now, messageId, deliveryId);
|
|
1628
|
+
if (result.changes === 0) {
|
|
1629
|
+
return c.json({ success: false, error: "Message not found or already processed" }, 404);
|
|
1630
|
+
}
|
|
1631
|
+
return c.json({ success: true });
|
|
1632
|
+
} catch (err) {
|
|
1633
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1634
|
+
return c.json({ success: false, error: message }, 500);
|
|
1635
|
+
}
|
|
1636
|
+
};
|
|
1637
|
+
return {
|
|
1638
|
+
sendHandler,
|
|
1639
|
+
batchSendHandler,
|
|
1640
|
+
receiveHandler,
|
|
1641
|
+
ackHandler,
|
|
1642
|
+
retryHandler
|
|
1643
|
+
};
|
|
1644
|
+
}
|
|
1645
|
+
function createQueueProcessor(db, queueBindings, workerUrl) {
|
|
1646
|
+
let interval = null;
|
|
1647
|
+
async function processQueues() {
|
|
1648
|
+
for (const [bindingName, queueName] of Object.entries(queueBindings)) {
|
|
1649
|
+
await processQueue(bindingName, queueName);
|
|
1650
|
+
}
|
|
1651
|
+
}
|
|
1652
|
+
async function processQueue(bindingName, queueName) {
|
|
1653
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1654
|
+
const deliveryId = randomUUID();
|
|
1655
|
+
const visibilityTimeout = 30;
|
|
1656
|
+
const rows = db.prepare(`SELECT id, payload, attempt
|
|
1657
|
+
FROM queue_messages
|
|
1658
|
+
WHERE queue_name = ? AND status = 'pending' AND visible_at <= ?
|
|
1659
|
+
LIMIT 10`).all(queueName, now);
|
|
1660
|
+
if (rows.length === 0) {
|
|
1661
|
+
return;
|
|
1662
|
+
}
|
|
1663
|
+
const ids = rows.map((r) => r.id);
|
|
1664
|
+
const placeholders = ids.map(() => "?").join(",");
|
|
1665
|
+
db.prepare(`UPDATE queue_messages
|
|
1666
|
+
SET status = 'processing', delivery_id = ?, visible_at = ?, attempt = attempt + 1, updated_at = ?
|
|
1667
|
+
WHERE id IN (${placeholders})`).run(deliveryId, now + visibilityTimeout, now, ...ids);
|
|
1668
|
+
for (const row of rows) {
|
|
1669
|
+
try {
|
|
1670
|
+
const response = await fetch(workerUrl, {
|
|
1671
|
+
method: "POST",
|
|
1672
|
+
headers: {
|
|
1673
|
+
"Content-Type": "application/json",
|
|
1674
|
+
"X-Ploy-Queue-Delivery": "true",
|
|
1675
|
+
"X-Ploy-Queue-Name": queueName,
|
|
1676
|
+
"X-Ploy-Queue-Binding": bindingName,
|
|
1677
|
+
"X-Ploy-Message-Id": row.id,
|
|
1678
|
+
"X-Ploy-Delivery-Id": deliveryId,
|
|
1679
|
+
"X-Ploy-Message-Attempt": String(row.attempt + 1)
|
|
1680
|
+
},
|
|
1681
|
+
body: row.payload
|
|
1682
|
+
});
|
|
1683
|
+
if (response.ok) {
|
|
1684
|
+
db.prepare(`DELETE FROM queue_messages WHERE id = ?`).run(row.id);
|
|
1685
|
+
} else {
|
|
1686
|
+
db.prepare(`UPDATE queue_messages
|
|
1687
|
+
SET status = 'pending', delivery_id = NULL, visible_at = ?, updated_at = ?
|
|
1688
|
+
WHERE id = ?`).run(now + 5, now, row.id);
|
|
1689
|
+
}
|
|
1690
|
+
} catch {
|
|
1691
|
+
db.prepare(`UPDATE queue_messages
|
|
1692
|
+
SET status = 'pending', delivery_id = NULL, visible_at = ?, updated_at = ?
|
|
1693
|
+
WHERE id = ?`).run(now + 5, now, row.id);
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
}
|
|
1697
|
+
return {
|
|
1698
|
+
start() {
|
|
1699
|
+
if (interval) {
|
|
1700
|
+
return;
|
|
1701
|
+
}
|
|
1702
|
+
interval = setInterval(() => {
|
|
1703
|
+
processQueues().catch(() => {
|
|
1704
|
+
});
|
|
1705
|
+
}, 1e3);
|
|
1706
|
+
},
|
|
1707
|
+
stop() {
|
|
1708
|
+
if (interval) {
|
|
1709
|
+
clearInterval(interval);
|
|
1710
|
+
interval = null;
|
|
1711
|
+
}
|
|
1712
|
+
}
|
|
1713
|
+
};
|
|
1714
|
+
}
|
|
1715
|
+
var init_queue_service = __esm({
|
|
1716
|
+
"../emulator/dist/services/queue-service.js"() {
|
|
1717
|
+
}
|
|
1718
|
+
});
|
|
1719
|
+
function createWorkflowHandlers(db, workerUrl) {
|
|
1720
|
+
const triggerHandler = async (c) => {
|
|
1721
|
+
try {
|
|
1722
|
+
const body = await c.req.json();
|
|
1723
|
+
const { workflowName, input } = body;
|
|
1724
|
+
const id = randomUUID();
|
|
1725
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1726
|
+
db.prepare(`INSERT INTO workflow_executions (id, workflow_name, status, input, started_at, created_at)
|
|
1727
|
+
VALUES (?, ?, 'running', ?, ?, ?)`).run(id, workflowName, JSON.stringify(input), now, now);
|
|
1728
|
+
if (workerUrl) {
|
|
1729
|
+
fetch(workerUrl, {
|
|
1730
|
+
method: "POST",
|
|
1731
|
+
headers: {
|
|
1732
|
+
"Content-Type": "application/json",
|
|
1733
|
+
"X-Ploy-Workflow-Execution": "true",
|
|
1734
|
+
"X-Ploy-Workflow-Name": workflowName,
|
|
1735
|
+
"X-Ploy-Execution-Id": id
|
|
1736
|
+
},
|
|
1737
|
+
body: JSON.stringify(input)
|
|
1738
|
+
}).catch(() => {
|
|
1739
|
+
});
|
|
1740
|
+
}
|
|
1741
|
+
return c.json({ success: true, executionId: id });
|
|
1742
|
+
} catch (err) {
|
|
1743
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1744
|
+
return c.json({ success: false, error: message }, 500);
|
|
1745
|
+
}
|
|
1746
|
+
};
|
|
1747
|
+
const statusHandler = async (c) => {
|
|
1748
|
+
try {
|
|
1749
|
+
const body = await c.req.json();
|
|
1750
|
+
const { executionId } = body;
|
|
1751
|
+
const execution = db.prepare(`SELECT id, workflow_name, status, input, output, error, started_at, completed_at, created_at
|
|
1752
|
+
FROM workflow_executions WHERE id = ?`).get(executionId);
|
|
1753
|
+
if (!execution) {
|
|
1754
|
+
return c.json({ success: false, error: "Execution not found" }, 404);
|
|
1755
|
+
}
|
|
1756
|
+
const steps = db.prepare(`SELECT step_name, step_index, status, output, error, duration_ms
|
|
1757
|
+
FROM workflow_steps WHERE execution_id = ? ORDER BY step_index`).all(executionId);
|
|
1758
|
+
return c.json({
|
|
1759
|
+
success: true,
|
|
1760
|
+
execution: {
|
|
1761
|
+
id: execution.id,
|
|
1762
|
+
workflowName: execution.workflow_name,
|
|
1763
|
+
status: execution.status,
|
|
1764
|
+
input: execution.input ? JSON.parse(execution.input) : null,
|
|
1765
|
+
output: execution.output ? JSON.parse(execution.output) : null,
|
|
1766
|
+
error: execution.error,
|
|
1767
|
+
startedAt: execution.started_at ? new Date(execution.started_at * 1e3).toISOString() : null,
|
|
1768
|
+
completedAt: execution.completed_at ? new Date(execution.completed_at * 1e3).toISOString() : null,
|
|
1769
|
+
steps: steps.map((s) => ({
|
|
1770
|
+
stepName: s.step_name,
|
|
1771
|
+
stepIndex: s.step_index,
|
|
1772
|
+
status: s.status,
|
|
1773
|
+
output: s.output ? JSON.parse(s.output) : null,
|
|
1774
|
+
error: s.error,
|
|
1775
|
+
durationMs: s.duration_ms
|
|
1776
|
+
}))
|
|
1777
|
+
}
|
|
1778
|
+
});
|
|
1779
|
+
} catch (err) {
|
|
1780
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1781
|
+
return c.json({ success: false, error: message }, 500);
|
|
1782
|
+
}
|
|
1783
|
+
};
|
|
1784
|
+
const cancelHandler = async (c) => {
|
|
1785
|
+
try {
|
|
1786
|
+
const body = await c.req.json();
|
|
1787
|
+
const { executionId } = body;
|
|
1788
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1789
|
+
const result = db.prepare(`UPDATE workflow_executions
|
|
1790
|
+
SET status = 'cancelled', completed_at = ?
|
|
1791
|
+
WHERE id = ? AND status IN ('pending', 'running')`).run(now, executionId);
|
|
1792
|
+
if (result.changes === 0) {
|
|
1793
|
+
return c.json({ success: false, error: "Execution not found or already completed" }, 404);
|
|
1794
|
+
}
|
|
1795
|
+
return c.json({ success: true });
|
|
1796
|
+
} catch (err) {
|
|
1797
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1798
|
+
return c.json({ success: false, error: message }, 500);
|
|
1799
|
+
}
|
|
1800
|
+
};
|
|
1801
|
+
const stepStartHandler = async (c) => {
|
|
1802
|
+
try {
|
|
1803
|
+
const body = await c.req.json();
|
|
1804
|
+
const { executionId, stepName, stepIndex } = body;
|
|
1805
|
+
const existing = db.prepare(`SELECT status, output FROM workflow_steps
|
|
1806
|
+
WHERE execution_id = ? AND step_name = ? AND step_index = ?`).get(executionId, stepName, stepIndex);
|
|
1807
|
+
if (existing && existing.status === "completed") {
|
|
1808
|
+
return c.json({
|
|
1809
|
+
success: true,
|
|
1810
|
+
alreadyCompleted: true,
|
|
1811
|
+
output: existing.output ? JSON.parse(existing.output) : null
|
|
1812
|
+
});
|
|
1813
|
+
}
|
|
1814
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1815
|
+
if (existing) {
|
|
1816
|
+
db.prepare(`UPDATE workflow_steps SET status = 'running' WHERE execution_id = ? AND step_name = ?`).run(executionId, stepName);
|
|
1817
|
+
} else {
|
|
1818
|
+
db.prepare(`INSERT INTO workflow_steps (execution_id, step_name, step_index, status, created_at)
|
|
1819
|
+
VALUES (?, ?, ?, 'running', ?)`).run(executionId, stepName, stepIndex, now);
|
|
1820
|
+
}
|
|
1821
|
+
return c.json({ success: true, alreadyCompleted: false });
|
|
1822
|
+
} catch (err) {
|
|
1823
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1824
|
+
return c.json({ success: false, error: message }, 500);
|
|
1825
|
+
}
|
|
1826
|
+
};
|
|
1827
|
+
const stepCompleteHandler = async (c) => {
|
|
1828
|
+
try {
|
|
1829
|
+
const body = await c.req.json();
|
|
1830
|
+
const { executionId, stepName, output, durationMs } = body;
|
|
1831
|
+
db.prepare(`UPDATE workflow_steps
|
|
1832
|
+
SET status = 'completed', output = ?, duration_ms = ?
|
|
1833
|
+
WHERE execution_id = ? AND step_name = ?`).run(JSON.stringify(output), durationMs, executionId, stepName);
|
|
1834
|
+
return c.json({ success: true });
|
|
1835
|
+
} catch (err) {
|
|
1836
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1837
|
+
return c.json({ success: false, error: message }, 500);
|
|
1838
|
+
}
|
|
1839
|
+
};
|
|
1840
|
+
const stepFailHandler = async (c) => {
|
|
1841
|
+
try {
|
|
1842
|
+
const body = await c.req.json();
|
|
1843
|
+
const { executionId, stepName, error: error2, durationMs } = body;
|
|
1844
|
+
db.prepare(`UPDATE workflow_steps
|
|
1845
|
+
SET status = 'failed', error = ?, duration_ms = ?
|
|
1846
|
+
WHERE execution_id = ? AND step_name = ?`).run(error2, durationMs, executionId, stepName);
|
|
1847
|
+
return c.json({ success: true });
|
|
1848
|
+
} catch (err) {
|
|
1849
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1850
|
+
return c.json({ success: false, error: message }, 500);
|
|
1851
|
+
}
|
|
1852
|
+
};
|
|
1853
|
+
const completeHandler = async (c) => {
|
|
1854
|
+
try {
|
|
1855
|
+
const body = await c.req.json();
|
|
1856
|
+
const { executionId, output } = body;
|
|
1857
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1858
|
+
db.prepare(`UPDATE workflow_executions
|
|
1859
|
+
SET status = 'completed', output = ?, completed_at = ?
|
|
1860
|
+
WHERE id = ?`).run(JSON.stringify(output), now, executionId);
|
|
1861
|
+
return c.json({ success: true });
|
|
1862
|
+
} catch (err) {
|
|
1863
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1864
|
+
return c.json({ success: false, error: message }, 500);
|
|
1865
|
+
}
|
|
1866
|
+
};
|
|
1867
|
+
const failHandler = async (c) => {
|
|
1868
|
+
try {
|
|
1869
|
+
const body = await c.req.json();
|
|
1870
|
+
const { executionId, error: error2 } = body;
|
|
1871
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
1872
|
+
db.prepare(`UPDATE workflow_executions
|
|
1873
|
+
SET status = 'failed', error = ?, completed_at = ?
|
|
1874
|
+
WHERE id = ?`).run(error2, now, executionId);
|
|
1875
|
+
return c.json({ success: true });
|
|
1876
|
+
} catch (err) {
|
|
1877
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1878
|
+
return c.json({ success: false, error: message }, 500);
|
|
1879
|
+
}
|
|
1880
|
+
};
|
|
1881
|
+
return {
|
|
1882
|
+
triggerHandler,
|
|
1883
|
+
statusHandler,
|
|
1884
|
+
cancelHandler,
|
|
1885
|
+
stepStartHandler,
|
|
1886
|
+
stepCompleteHandler,
|
|
1887
|
+
stepFailHandler,
|
|
1888
|
+
completeHandler,
|
|
1889
|
+
failHandler
|
|
1890
|
+
};
|
|
1891
|
+
}
|
|
1892
|
+
var init_workflow_service = __esm({
|
|
1893
|
+
"../emulator/dist/services/workflow-service.js"() {
|
|
1894
|
+
}
|
|
1895
|
+
});
|
|
1896
|
+
async function startMockServer(dbManager2, config, options = {}) {
|
|
1897
|
+
const app = new Hono();
|
|
1898
|
+
if (config.db) {
|
|
1899
|
+
const dbHandler = createDbHandler(dbManager2.getD1Database);
|
|
1900
|
+
app.post("/db", dbHandler);
|
|
1901
|
+
}
|
|
1902
|
+
if (config.queue) {
|
|
1903
|
+
const queueHandlers = createQueueHandlers(dbManager2.emulatorDb);
|
|
1904
|
+
app.post("/queue/send", queueHandlers.sendHandler);
|
|
1905
|
+
app.post("/queue/batch-send", queueHandlers.batchSendHandler);
|
|
1906
|
+
app.post("/queue/receive", queueHandlers.receiveHandler);
|
|
1907
|
+
app.post("/queue/ack", queueHandlers.ackHandler);
|
|
1908
|
+
app.post("/queue/retry", queueHandlers.retryHandler);
|
|
1909
|
+
}
|
|
1910
|
+
if (config.workflow) {
|
|
1911
|
+
const workflowHandlers = createWorkflowHandlers(dbManager2.emulatorDb, options.workerUrl);
|
|
1912
|
+
app.post("/workflow/trigger", workflowHandlers.triggerHandler);
|
|
1913
|
+
app.post("/workflow/status", workflowHandlers.statusHandler);
|
|
1914
|
+
app.post("/workflow/cancel", workflowHandlers.cancelHandler);
|
|
1915
|
+
app.post("/workflow/step/start", workflowHandlers.stepStartHandler);
|
|
1916
|
+
app.post("/workflow/step/complete", workflowHandlers.stepCompleteHandler);
|
|
1917
|
+
app.post("/workflow/step/fail", workflowHandlers.stepFailHandler);
|
|
1918
|
+
app.post("/workflow/complete", workflowHandlers.completeHandler);
|
|
1919
|
+
app.post("/workflow/fail", workflowHandlers.failHandler);
|
|
1920
|
+
}
|
|
1921
|
+
app.get("/health", (c) => c.json({ status: "ok" }));
|
|
1922
|
+
if (options.dashboardEnabled !== false) {
|
|
1923
|
+
createDashboardRoutes(app, dbManager2, config);
|
|
1924
|
+
}
|
|
1925
|
+
const serverPort = options.port ?? DEFAULT_MOCK_SERVER_PORT;
|
|
1926
|
+
return await new Promise((resolve) => {
|
|
1927
|
+
const server = serve({
|
|
1928
|
+
fetch: app.fetch,
|
|
1929
|
+
port: serverPort
|
|
1930
|
+
}, (info) => {
|
|
1931
|
+
resolve({
|
|
1932
|
+
port: info.port,
|
|
1933
|
+
close: () => new Promise((res) => {
|
|
1934
|
+
server.close(() => res());
|
|
1935
|
+
})
|
|
1936
|
+
});
|
|
1937
|
+
});
|
|
1938
|
+
});
|
|
1939
|
+
}
|
|
1940
|
+
var DEFAULT_MOCK_SERVER_PORT;
|
|
1941
|
+
var init_mock_server = __esm({
|
|
1942
|
+
"../emulator/dist/services/mock-server.js"() {
|
|
1943
|
+
init_dashboard_routes();
|
|
1944
|
+
init_db_service();
|
|
1945
|
+
init_queue_service();
|
|
1946
|
+
init_workflow_service();
|
|
1947
|
+
DEFAULT_MOCK_SERVER_PORT = 4003;
|
|
1948
|
+
}
|
|
1949
|
+
});
|
|
1950
|
+
|
|
1951
|
+
// ../emulator/dist/utils/logger.js
|
|
1952
|
+
function timestamp() {
|
|
1953
|
+
return (/* @__PURE__ */ new Date()).toLocaleTimeString("en-US", { hour12: false });
|
|
1954
|
+
}
|
|
1955
|
+
function log(message) {
|
|
1956
|
+
console.log(`${COLORS.dim}[${timestamp()}]${COLORS.reset} ${COLORS.cyan}[ploy]${COLORS.reset} ${message}`);
|
|
1957
|
+
}
|
|
1958
|
+
function success(message) {
|
|
1959
|
+
console.log(`${COLORS.dim}[${timestamp()}]${COLORS.reset} ${COLORS.green}[ploy]${COLORS.reset} ${message}`);
|
|
1960
|
+
}
|
|
1961
|
+
function error(message) {
|
|
1962
|
+
console.error(`${COLORS.dim}[${timestamp()}]${COLORS.reset} ${COLORS.red}[ploy]${COLORS.reset} ${message}`);
|
|
1963
|
+
}
|
|
1964
|
+
function debug(message, verbose) {
|
|
1965
|
+
if (verbose) {
|
|
1966
|
+
console.log(`${COLORS.dim}[${timestamp()}]${COLORS.reset} ${COLORS.magenta}[ploy:debug]${COLORS.reset} ${message}`);
|
|
1967
|
+
}
|
|
1968
|
+
}
|
|
1969
|
+
var COLORS;
|
|
1970
|
+
var init_logger = __esm({
|
|
1971
|
+
"../emulator/dist/utils/logger.js"() {
|
|
1972
|
+
COLORS = {
|
|
1973
|
+
reset: "\x1B[0m",
|
|
1974
|
+
dim: "\x1B[2m",
|
|
1975
|
+
cyan: "\x1B[36m",
|
|
1976
|
+
green: "\x1B[32m",
|
|
1977
|
+
yellow: "\x1B[33m",
|
|
1978
|
+
red: "\x1B[31m",
|
|
1979
|
+
magenta: "\x1B[35m"
|
|
1980
|
+
};
|
|
1981
|
+
}
|
|
1982
|
+
});
|
|
1983
|
+
function getProjectHash(projectDir) {
|
|
1984
|
+
return createHash("sha256").update(projectDir).digest("hex").slice(0, 12);
|
|
1985
|
+
}
|
|
1986
|
+
function getTempDir(projectDir) {
|
|
1987
|
+
const hash = getProjectHash(projectDir);
|
|
1988
|
+
return join(tmpdir(), `ploy-emulator-${hash}`);
|
|
1989
|
+
}
|
|
1990
|
+
function getDataDir(projectDir) {
|
|
1991
|
+
return join(projectDir, ".ploy");
|
|
1992
|
+
}
|
|
1993
|
+
function ensureDir(dir) {
|
|
1994
|
+
mkdirSync(dir, { recursive: true });
|
|
1995
|
+
}
|
|
1996
|
+
function ensureTempDir(projectDir) {
|
|
1997
|
+
const tempDir = getTempDir(projectDir);
|
|
1998
|
+
ensureDir(tempDir);
|
|
1999
|
+
return tempDir;
|
|
2000
|
+
}
|
|
2001
|
+
function ensureDataDir(projectDir) {
|
|
2002
|
+
const dataDir = getDataDir(projectDir);
|
|
2003
|
+
ensureDir(dataDir);
|
|
2004
|
+
ensureDir(join(dataDir, "db"));
|
|
2005
|
+
return dataDir;
|
|
2006
|
+
}
|
|
2007
|
+
var init_paths = __esm({
|
|
2008
|
+
"../emulator/dist/utils/paths.js"() {
|
|
2009
|
+
}
|
|
2010
|
+
});
|
|
2011
|
+
function initializeDatabases(projectDir) {
|
|
2012
|
+
const dataDir = ensureDataDir(projectDir);
|
|
2013
|
+
const d1Databases = /* @__PURE__ */ new Map();
|
|
2014
|
+
const emulatorDb = new Database(join(dataDir, "emulator.db"));
|
|
2015
|
+
emulatorDb.pragma("journal_mode = WAL");
|
|
2016
|
+
emulatorDb.exec(EMULATOR_SCHEMA);
|
|
2017
|
+
function getD1Database(bindingName) {
|
|
2018
|
+
let db = d1Databases.get(bindingName);
|
|
2019
|
+
if (!db) {
|
|
2020
|
+
db = new Database(join(dataDir, "db", `${bindingName}.db`));
|
|
2021
|
+
db.pragma("journal_mode = WAL");
|
|
2022
|
+
d1Databases.set(bindingName, db);
|
|
2023
|
+
}
|
|
2024
|
+
return db;
|
|
2025
|
+
}
|
|
2026
|
+
function close() {
|
|
2027
|
+
emulatorDb.close();
|
|
2028
|
+
for (const db of d1Databases.values()) {
|
|
2029
|
+
db.close();
|
|
2030
|
+
}
|
|
2031
|
+
d1Databases.clear();
|
|
2032
|
+
}
|
|
2033
|
+
return {
|
|
2034
|
+
emulatorDb,
|
|
2035
|
+
getD1Database,
|
|
2036
|
+
close
|
|
2037
|
+
};
|
|
2038
|
+
}
|
|
2039
|
+
var EMULATOR_SCHEMA;
|
|
2040
|
+
var init_sqlite = __esm({
|
|
2041
|
+
"../emulator/dist/utils/sqlite.js"() {
|
|
2042
|
+
init_paths();
|
|
2043
|
+
EMULATOR_SCHEMA = `
|
|
2044
|
+
-- Queue messages table
|
|
2045
|
+
CREATE TABLE IF NOT EXISTS queue_messages (
|
|
2046
|
+
id TEXT PRIMARY KEY,
|
|
2047
|
+
queue_name TEXT NOT NULL,
|
|
2048
|
+
payload TEXT NOT NULL,
|
|
2049
|
+
status TEXT DEFAULT 'pending',
|
|
2050
|
+
attempt INTEGER DEFAULT 0,
|
|
2051
|
+
delivery_id TEXT,
|
|
2052
|
+
visible_at INTEGER NOT NULL,
|
|
2053
|
+
created_at INTEGER DEFAULT (strftime('%s', 'now')),
|
|
2054
|
+
updated_at INTEGER DEFAULT (strftime('%s', 'now'))
|
|
2055
|
+
);
|
|
2056
|
+
|
|
2057
|
+
CREATE INDEX IF NOT EXISTS idx_queue_messages_status
|
|
2058
|
+
ON queue_messages(queue_name, status, visible_at);
|
|
2059
|
+
|
|
2060
|
+
-- Workflow executions table
|
|
2061
|
+
CREATE TABLE IF NOT EXISTS workflow_executions (
|
|
2062
|
+
id TEXT PRIMARY KEY,
|
|
2063
|
+
workflow_name TEXT NOT NULL,
|
|
2064
|
+
status TEXT DEFAULT 'pending',
|
|
2065
|
+
input TEXT,
|
|
2066
|
+
output TEXT,
|
|
2067
|
+
error TEXT,
|
|
2068
|
+
started_at INTEGER,
|
|
2069
|
+
completed_at INTEGER,
|
|
2070
|
+
created_at INTEGER DEFAULT (strftime('%s', 'now'))
|
|
2071
|
+
);
|
|
2072
|
+
|
|
2073
|
+
CREATE INDEX IF NOT EXISTS idx_workflow_executions_status
|
|
2074
|
+
ON workflow_executions(workflow_name, status);
|
|
2075
|
+
|
|
2076
|
+
-- Workflow steps table
|
|
2077
|
+
CREATE TABLE IF NOT EXISTS workflow_steps (
|
|
2078
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
2079
|
+
execution_id TEXT NOT NULL,
|
|
2080
|
+
step_name TEXT NOT NULL,
|
|
2081
|
+
step_index INTEGER NOT NULL,
|
|
2082
|
+
status TEXT DEFAULT 'pending',
|
|
2083
|
+
output TEXT,
|
|
2084
|
+
error TEXT,
|
|
2085
|
+
duration_ms INTEGER,
|
|
2086
|
+
created_at INTEGER DEFAULT (strftime('%s', 'now')),
|
|
2087
|
+
FOREIGN KEY (execution_id) REFERENCES workflow_executions(id)
|
|
2088
|
+
);
|
|
2089
|
+
|
|
2090
|
+
CREATE INDEX IF NOT EXISTS idx_workflow_steps_execution
|
|
2091
|
+
ON workflow_steps(execution_id, step_index);
|
|
2092
|
+
`;
|
|
2093
|
+
}
|
|
2094
|
+
});
|
|
2095
|
+
async function startEmulator(options = {}) {
|
|
2096
|
+
const emulator = new EmulatorServer(options);
|
|
2097
|
+
await emulator.start();
|
|
2098
|
+
return emulator;
|
|
2099
|
+
}
|
|
2100
|
+
var EmulatorServer;
|
|
2101
|
+
var init_emulator = __esm({
|
|
2102
|
+
"../emulator/dist/emulator.js"() {
|
|
2103
|
+
init_bundler();
|
|
2104
|
+
init_watcher();
|
|
2105
|
+
init_ploy_config2();
|
|
2106
|
+
init_workerd_config();
|
|
2107
|
+
init_mock_server();
|
|
2108
|
+
init_queue_service();
|
|
2109
|
+
init_logger();
|
|
2110
|
+
init_paths();
|
|
2111
|
+
init_sqlite();
|
|
2112
|
+
EmulatorServer = class {
|
|
2113
|
+
options;
|
|
2114
|
+
projectDir;
|
|
2115
|
+
tempDir = "";
|
|
2116
|
+
config = null;
|
|
2117
|
+
dbManager = null;
|
|
2118
|
+
mockServer = null;
|
|
2119
|
+
workerdProcess = null;
|
|
2120
|
+
fileWatcher = null;
|
|
2121
|
+
queueProcessor = null;
|
|
2122
|
+
constructor(options = {}) {
|
|
2123
|
+
const port = options.port ?? 8787;
|
|
2124
|
+
this.options = {
|
|
2125
|
+
port,
|
|
2126
|
+
host: options.host ?? "localhost",
|
|
2127
|
+
configPath: options.configPath ?? "ploy.yaml",
|
|
2128
|
+
watch: options.watch ?? true,
|
|
2129
|
+
verbose: options.verbose ?? false,
|
|
2130
|
+
dashboardPort: options.dashboardPort ?? port + 1e3
|
|
2131
|
+
};
|
|
2132
|
+
this.projectDir = process.cwd();
|
|
2133
|
+
}
|
|
2134
|
+
async start() {
|
|
2135
|
+
log("Starting Ploy emulator...");
|
|
2136
|
+
try {
|
|
2137
|
+
this.config = readPloyConfig2(this.projectDir, this.options.configPath);
|
|
2138
|
+
debug(`Loaded config: ${JSON.stringify(this.config)}`, this.options.verbose);
|
|
2139
|
+
this.tempDir = ensureTempDir(this.projectDir);
|
|
2140
|
+
ensureDataDir(this.projectDir);
|
|
2141
|
+
debug(`Temp dir: ${this.tempDir}`, this.options.verbose);
|
|
2142
|
+
this.dbManager = initializeDatabases(this.projectDir);
|
|
2143
|
+
debug("Initialized databases", this.options.verbose);
|
|
2144
|
+
const workerUrl = `http://${this.options.host}:${this.options.port}`;
|
|
2145
|
+
const dashboardPort = this.options.dashboardPort;
|
|
2146
|
+
this.mockServer = await startMockServer(this.dbManager, this.config, {
|
|
2147
|
+
workerUrl,
|
|
2148
|
+
port: dashboardPort,
|
|
2149
|
+
dashboardEnabled: true
|
|
2150
|
+
});
|
|
2151
|
+
debug(`Mock server started on port ${this.mockServer.port}`, this.options.verbose);
|
|
2152
|
+
const mockServiceUrl = `http://localhost:${this.mockServer.port}`;
|
|
2153
|
+
const entryPoint = getWorkerEntryPoint2(this.projectDir, this.config);
|
|
2154
|
+
debug(`Entry point: ${entryPoint}`, this.options.verbose);
|
|
2155
|
+
await this.bundle(entryPoint, mockServiceUrl);
|
|
2156
|
+
const workerdConfigPath = writeWorkerdConfig({
|
|
2157
|
+
tempDir: this.tempDir,
|
|
2158
|
+
bundlePath: join(this.tempDir, "worker.bundle.js"),
|
|
2159
|
+
port: this.options.port,
|
|
2160
|
+
mockServicePort: this.mockServer.port,
|
|
2161
|
+
config: this.config
|
|
2162
|
+
});
|
|
2163
|
+
await this.startWorkerd(workerdConfigPath);
|
|
2164
|
+
if (this.options.watch) {
|
|
2165
|
+
const srcDir = this.getSrcDir();
|
|
2166
|
+
this.fileWatcher = createFileWatcher(srcDir, async () => {
|
|
2167
|
+
log("Changes detected, rebuilding...");
|
|
2168
|
+
try {
|
|
2169
|
+
await this.bundle(entryPoint, mockServiceUrl);
|
|
2170
|
+
success("Rebuild complete");
|
|
2171
|
+
} catch (err) {
|
|
2172
|
+
error(`Rebuild failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
2173
|
+
}
|
|
2174
|
+
});
|
|
2175
|
+
this.fileWatcher.start();
|
|
2176
|
+
debug(`Watching ${srcDir} for changes`, this.options.verbose);
|
|
2177
|
+
}
|
|
2178
|
+
if (this.config.queue) {
|
|
2179
|
+
const workerUrl2 = `http://${this.options.host}:${this.options.port}`;
|
|
2180
|
+
this.queueProcessor = createQueueProcessor(this.dbManager.emulatorDb, this.config.queue, workerUrl2);
|
|
2181
|
+
this.queueProcessor.start();
|
|
2182
|
+
debug("Queue processor started", this.options.verbose);
|
|
2183
|
+
}
|
|
2184
|
+
success(`Emulator running at http://${this.options.host}:${this.options.port}`);
|
|
2185
|
+
log(` Dashboard: http://${this.options.host}:${this.mockServer.port}`);
|
|
2186
|
+
if (this.config.db) {
|
|
2187
|
+
log(` DB bindings: ${Object.keys(this.config.db).join(", ")}`);
|
|
2188
|
+
}
|
|
2189
|
+
if (this.config.queue) {
|
|
2190
|
+
log(` Queue bindings: ${Object.keys(this.config.queue).join(", ")}`);
|
|
2191
|
+
}
|
|
2192
|
+
if (this.config.workflow) {
|
|
2193
|
+
log(` Workflow bindings: ${Object.keys(this.config.workflow).join(", ")}`);
|
|
2194
|
+
}
|
|
2195
|
+
this.setupSignalHandlers();
|
|
2196
|
+
} catch (err) {
|
|
2197
|
+
error(`Failed to start emulator: ${err instanceof Error ? err.message : String(err)}`);
|
|
2198
|
+
await this.stop();
|
|
2199
|
+
throw err;
|
|
2200
|
+
}
|
|
2201
|
+
}
|
|
2202
|
+
async bundle(entryPoint, mockServiceUrl) {
|
|
2203
|
+
if (!this.config) {
|
|
2204
|
+
throw new Error("Config not loaded");
|
|
2205
|
+
}
|
|
2206
|
+
await bundleWorker({
|
|
2207
|
+
projectDir: this.projectDir,
|
|
2208
|
+
tempDir: this.tempDir,
|
|
2209
|
+
entryPoint,
|
|
2210
|
+
config: this.config,
|
|
2211
|
+
mockServiceUrl
|
|
2212
|
+
});
|
|
2213
|
+
}
|
|
2214
|
+
async startWorkerd(configPath) {
|
|
2215
|
+
const args2 = ["serve", "--experimental", "--verbose", configPath];
|
|
2216
|
+
if (this.options.watch) {
|
|
2217
|
+
args2.push("--watch");
|
|
2218
|
+
}
|
|
2219
|
+
const workerdBin = this.findWorkerdBinary();
|
|
2220
|
+
log(`[ploy] Using workerd binary: ${workerdBin}`);
|
|
2221
|
+
debug(`Starting workerd: ${workerdBin} ${args2.join(" ")}`, this.options.verbose);
|
|
2222
|
+
return await new Promise((resolve, reject) => {
|
|
2223
|
+
const workerdBinDir = dirname(workerdBin);
|
|
2224
|
+
this.workerdProcess = spawn(workerdBin, args2, {
|
|
2225
|
+
cwd: this.tempDir,
|
|
2226
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
2227
|
+
shell: true,
|
|
2228
|
+
env: {
|
|
2229
|
+
...process.env,
|
|
2230
|
+
PATH: `${workerdBinDir}:${process.env.PATH || ""}`
|
|
2231
|
+
}
|
|
2232
|
+
});
|
|
2233
|
+
let started = false;
|
|
2234
|
+
let stderrOutput = "";
|
|
2235
|
+
this.workerdProcess.stdout?.on("data", (data) => {
|
|
2236
|
+
const output = data.toString();
|
|
2237
|
+
if (this.options.verbose) {
|
|
2238
|
+
process.stdout.write(output);
|
|
2239
|
+
}
|
|
2240
|
+
if (!started && (output.includes("Listening") || output.includes("running"))) {
|
|
2241
|
+
started = true;
|
|
2242
|
+
resolve();
|
|
2243
|
+
}
|
|
2244
|
+
});
|
|
2245
|
+
this.workerdProcess.stderr?.on("data", (data) => {
|
|
2246
|
+
const output = data.toString();
|
|
2247
|
+
stderrOutput += output;
|
|
2248
|
+
log(`[workerd stderr] ${output.trim()}`);
|
|
2249
|
+
if (output.includes("error") || output.includes("Error")) {
|
|
2250
|
+
error(output.trim());
|
|
2251
|
+
} else if (this.options.verbose) {
|
|
2252
|
+
process.stderr.write(output);
|
|
2253
|
+
}
|
|
2254
|
+
if (!started && output.includes("Listening")) {
|
|
2255
|
+
started = true;
|
|
2256
|
+
resolve();
|
|
2257
|
+
}
|
|
2258
|
+
});
|
|
2259
|
+
this.workerdProcess.on("error", (err) => {
|
|
2260
|
+
error(`workerd error: ${err.message}`);
|
|
2261
|
+
if (!started) {
|
|
2262
|
+
reject(err);
|
|
2263
|
+
}
|
|
2264
|
+
});
|
|
2265
|
+
this.workerdProcess.on("exit", (code) => {
|
|
2266
|
+
if (code !== 0 && code !== null) {
|
|
2267
|
+
error(`workerd exited with code ${code}`);
|
|
2268
|
+
if (stderrOutput) {
|
|
2269
|
+
error(`workerd stderr: ${stderrOutput.trim()}`);
|
|
2270
|
+
}
|
|
2271
|
+
}
|
|
2272
|
+
if (!started) {
|
|
2273
|
+
reject(new Error(`workerd exited with code ${code}`));
|
|
2274
|
+
}
|
|
2275
|
+
});
|
|
2276
|
+
setTimeout(() => {
|
|
2277
|
+
if (!started) {
|
|
2278
|
+
started = true;
|
|
2279
|
+
resolve();
|
|
2280
|
+
}
|
|
2281
|
+
}, 2e3);
|
|
2282
|
+
});
|
|
2283
|
+
}
|
|
2284
|
+
findWorkerdBinary() {
|
|
2285
|
+
const __filename2 = fileURLToPath(import.meta.url);
|
|
2286
|
+
const __dirname2 = dirname(__filename2);
|
|
2287
|
+
const emulatorPkgDir = join(__dirname2, "..");
|
|
2288
|
+
const possiblePaths = [
|
|
2289
|
+
join(emulatorPkgDir, "node_modules", ".bin", "workerd"),
|
|
2290
|
+
join(this.projectDir, "node_modules", ".bin", "workerd"),
|
|
2291
|
+
"workerd"
|
|
2292
|
+
];
|
|
2293
|
+
for (const p of possiblePaths) {
|
|
2294
|
+
if (p === "workerd" || existsSync(p)) {
|
|
2295
|
+
return p;
|
|
2296
|
+
}
|
|
2297
|
+
}
|
|
2298
|
+
return "workerd";
|
|
2299
|
+
}
|
|
2300
|
+
getSrcDir() {
|
|
2301
|
+
const possibleDirs = [join(this.projectDir, "src"), this.projectDir];
|
|
2302
|
+
for (const dir of possibleDirs) {
|
|
2303
|
+
if (existsSync(dir)) {
|
|
2304
|
+
return dir;
|
|
2305
|
+
}
|
|
32
2306
|
}
|
|
33
|
-
|
|
34
|
-
|
|
2307
|
+
return this.projectDir;
|
|
2308
|
+
}
|
|
2309
|
+
setupSignalHandlers() {
|
|
2310
|
+
const handler = async () => {
|
|
2311
|
+
log("\nShutting down...");
|
|
2312
|
+
await this.stop();
|
|
2313
|
+
process.exit(0);
|
|
2314
|
+
};
|
|
2315
|
+
process.on("SIGINT", handler);
|
|
2316
|
+
process.on("SIGTERM", handler);
|
|
2317
|
+
}
|
|
2318
|
+
async stop() {
|
|
2319
|
+
if (this.queueProcessor) {
|
|
2320
|
+
this.queueProcessor.stop();
|
|
2321
|
+
this.queueProcessor = null;
|
|
35
2322
|
}
|
|
36
|
-
|
|
37
|
-
|
|
2323
|
+
if (this.fileWatcher) {
|
|
2324
|
+
this.fileWatcher.stop();
|
|
2325
|
+
this.fileWatcher = null;
|
|
38
2326
|
}
|
|
39
|
-
|
|
40
|
-
|
|
2327
|
+
if (this.workerdProcess) {
|
|
2328
|
+
this.workerdProcess.kill("SIGTERM");
|
|
2329
|
+
this.workerdProcess = null;
|
|
41
2330
|
}
|
|
2331
|
+
if (this.mockServer) {
|
|
2332
|
+
await this.mockServer.close();
|
|
2333
|
+
this.mockServer = null;
|
|
2334
|
+
}
|
|
2335
|
+
if (this.dbManager) {
|
|
2336
|
+
this.dbManager.close();
|
|
2337
|
+
this.dbManager = null;
|
|
2338
|
+
}
|
|
2339
|
+
log("Emulator stopped");
|
|
2340
|
+
}
|
|
2341
|
+
};
|
|
2342
|
+
}
|
|
2343
|
+
});
|
|
2344
|
+
|
|
2345
|
+
// ../emulator/dist/nextjs-dev.js
|
|
2346
|
+
function createDevD1(databaseId, apiUrl) {
|
|
2347
|
+
return {
|
|
2348
|
+
async dump() {
|
|
2349
|
+
const response = await fetch(`${apiUrl}/db`, {
|
|
2350
|
+
method: "POST",
|
|
2351
|
+
headers: { "Content-Type": "application/json" },
|
|
2352
|
+
body: JSON.stringify({
|
|
2353
|
+
databaseId,
|
|
2354
|
+
method: "dump"
|
|
2355
|
+
})
|
|
2356
|
+
});
|
|
2357
|
+
if (!response.ok) {
|
|
2358
|
+
throw new Error(`DB dump failed: ${await response.text()}`);
|
|
2359
|
+
}
|
|
2360
|
+
return await response.arrayBuffer();
|
|
2361
|
+
},
|
|
2362
|
+
prepare(query) {
|
|
2363
|
+
let boundParams = [];
|
|
2364
|
+
const stmt = {
|
|
2365
|
+
bind(...values) {
|
|
2366
|
+
boundParams = values;
|
|
2367
|
+
return stmt;
|
|
2368
|
+
},
|
|
2369
|
+
async run() {
|
|
2370
|
+
const response = await fetch(`${apiUrl}/db`, {
|
|
2371
|
+
method: "POST",
|
|
2372
|
+
headers: { "Content-Type": "application/json" },
|
|
2373
|
+
body: JSON.stringify({
|
|
2374
|
+
databaseId,
|
|
2375
|
+
method: "prepare",
|
|
2376
|
+
query,
|
|
2377
|
+
params: boundParams
|
|
2378
|
+
})
|
|
2379
|
+
});
|
|
2380
|
+
if (!response.ok) {
|
|
2381
|
+
throw new Error(`DB query failed: ${await response.text()}`);
|
|
2382
|
+
}
|
|
2383
|
+
return await response.json();
|
|
2384
|
+
},
|
|
2385
|
+
async all() {
|
|
2386
|
+
return await stmt.run();
|
|
2387
|
+
},
|
|
2388
|
+
async first(colName) {
|
|
2389
|
+
const result = await stmt.run();
|
|
2390
|
+
if (result.results.length === 0) {
|
|
2391
|
+
return null;
|
|
2392
|
+
}
|
|
2393
|
+
const firstRow = result.results[0];
|
|
2394
|
+
if (colName) {
|
|
2395
|
+
return firstRow[colName] ?? null;
|
|
2396
|
+
}
|
|
2397
|
+
return firstRow;
|
|
2398
|
+
},
|
|
2399
|
+
async raw() {
|
|
2400
|
+
const result = await stmt.run();
|
|
2401
|
+
if (result.results.length === 0) {
|
|
2402
|
+
return [];
|
|
2403
|
+
}
|
|
2404
|
+
const keys = Object.keys(result.results[0]);
|
|
2405
|
+
return result.results.map((row) => keys.map((k) => row[k]));
|
|
2406
|
+
}
|
|
2407
|
+
};
|
|
2408
|
+
return stmt;
|
|
2409
|
+
},
|
|
2410
|
+
async exec(query) {
|
|
2411
|
+
const response = await fetch(`${apiUrl}/db`, {
|
|
2412
|
+
method: "POST",
|
|
2413
|
+
headers: { "Content-Type": "application/json" },
|
|
2414
|
+
body: JSON.stringify({
|
|
2415
|
+
databaseId,
|
|
2416
|
+
method: "exec",
|
|
2417
|
+
query
|
|
2418
|
+
})
|
|
2419
|
+
});
|
|
2420
|
+
if (!response.ok) {
|
|
2421
|
+
throw new Error(`DB exec failed: ${await response.text()}`);
|
|
2422
|
+
}
|
|
2423
|
+
return await response.json();
|
|
2424
|
+
},
|
|
2425
|
+
async batch(statements) {
|
|
2426
|
+
const stmts = statements.map((s) => {
|
|
2427
|
+
const data = s.__db_data;
|
|
2428
|
+
return data || s;
|
|
2429
|
+
});
|
|
2430
|
+
const response = await fetch(`${apiUrl}/db`, {
|
|
2431
|
+
method: "POST",
|
|
2432
|
+
headers: { "Content-Type": "application/json" },
|
|
2433
|
+
body: JSON.stringify({
|
|
2434
|
+
databaseId,
|
|
2435
|
+
method: "batch",
|
|
2436
|
+
statements: stmts
|
|
2437
|
+
})
|
|
2438
|
+
});
|
|
2439
|
+
if (!response.ok) {
|
|
2440
|
+
throw new Error(`DB batch failed: ${await response.text()}`);
|
|
2441
|
+
}
|
|
2442
|
+
return await response.json();
|
|
42
2443
|
}
|
|
43
|
-
|
|
2444
|
+
};
|
|
44
2445
|
}
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
2446
|
+
async function initPloyForDev(config) {
|
|
2447
|
+
if (process.env.NODE_ENV !== "development") {
|
|
2448
|
+
return;
|
|
2449
|
+
}
|
|
2450
|
+
if (globalThis.__PLOY_DEV_INITIALIZED__) {
|
|
2451
|
+
return;
|
|
2452
|
+
}
|
|
2453
|
+
globalThis.__PLOY_DEV_INITIALIZED__ = true;
|
|
2454
|
+
const configPath = config?.configPath || "./ploy.yaml";
|
|
2455
|
+
const projectDir = process.cwd();
|
|
2456
|
+
let ployConfig;
|
|
2457
|
+
try {
|
|
2458
|
+
ployConfig = readPloyConfig2(projectDir, configPath);
|
|
2459
|
+
} catch {
|
|
2460
|
+
if (config?.bindings?.db) {
|
|
2461
|
+
ployConfig = { db: config.bindings.db };
|
|
2462
|
+
} else {
|
|
2463
|
+
return;
|
|
2464
|
+
}
|
|
2465
|
+
}
|
|
2466
|
+
if (config?.bindings?.db) {
|
|
2467
|
+
ployConfig = { ...ployConfig, db: config.bindings.db };
|
|
2468
|
+
}
|
|
2469
|
+
if (!ployConfig.db || Object.keys(ployConfig.db).length === 0) {
|
|
2470
|
+
return;
|
|
2471
|
+
}
|
|
2472
|
+
ensureDataDir(projectDir);
|
|
2473
|
+
dbManager = initializeDatabases(projectDir);
|
|
2474
|
+
mockServer = await startMockServer(dbManager, ployConfig, {});
|
|
2475
|
+
const apiUrl = `http://localhost:${mockServer.port}`;
|
|
2476
|
+
const env = {};
|
|
2477
|
+
for (const [bindingName, databaseId] of Object.entries(ployConfig.db)) {
|
|
2478
|
+
env[bindingName] = createDevD1(databaseId, apiUrl);
|
|
2479
|
+
}
|
|
2480
|
+
const context = {
|
|
2481
|
+
env,
|
|
2482
|
+
cf: void 0,
|
|
2483
|
+
ctx: void 0
|
|
2484
|
+
};
|
|
2485
|
+
globalThis.__PLOY_DEV_CONTEXT__ = context;
|
|
2486
|
+
Object.defineProperty(globalThis, PLOY_CONTEXT_SYMBOL, {
|
|
2487
|
+
get() {
|
|
2488
|
+
return context;
|
|
2489
|
+
},
|
|
2490
|
+
configurable: true
|
|
2491
|
+
});
|
|
2492
|
+
const bindingNames = Object.keys(env);
|
|
2493
|
+
console.log(`[Ploy] Development context initialized with bindings: ${bindingNames.join(", ")}`);
|
|
2494
|
+
console.log(`[Ploy] Mock server running at ${apiUrl}`);
|
|
2495
|
+
const cleanup = async () => {
|
|
2496
|
+
if (mockServer) {
|
|
2497
|
+
await mockServer.close();
|
|
2498
|
+
mockServer = null;
|
|
2499
|
+
}
|
|
2500
|
+
if (dbManager) {
|
|
2501
|
+
dbManager.close();
|
|
2502
|
+
dbManager = null;
|
|
2503
|
+
}
|
|
2504
|
+
};
|
|
2505
|
+
process.on("exit", () => {
|
|
2506
|
+
if (dbManager) {
|
|
2507
|
+
dbManager.close();
|
|
2508
|
+
}
|
|
2509
|
+
});
|
|
2510
|
+
process.on("SIGINT", async () => {
|
|
2511
|
+
await cleanup();
|
|
2512
|
+
process.exit(0);
|
|
2513
|
+
});
|
|
2514
|
+
process.on("SIGTERM", async () => {
|
|
2515
|
+
await cleanup();
|
|
2516
|
+
process.exit(0);
|
|
2517
|
+
});
|
|
2518
|
+
}
|
|
2519
|
+
var PLOY_CONTEXT_SYMBOL, mockServer, dbManager;
|
|
2520
|
+
var init_nextjs_dev = __esm({
|
|
2521
|
+
"../emulator/dist/nextjs-dev.js"() {
|
|
2522
|
+
init_ploy_config2();
|
|
2523
|
+
init_mock_server();
|
|
2524
|
+
init_paths();
|
|
2525
|
+
init_sqlite();
|
|
2526
|
+
PLOY_CONTEXT_SYMBOL = /* @__PURE__ */ Symbol.for("__ploy-context__");
|
|
2527
|
+
mockServer = null;
|
|
2528
|
+
dbManager = null;
|
|
2529
|
+
}
|
|
2530
|
+
});
|
|
2531
|
+
|
|
2532
|
+
// ../emulator/dist/dev-dashboard.js
|
|
2533
|
+
async function startDevDashboard(options = {}) {
|
|
2534
|
+
const projectDir = process.cwd();
|
|
2535
|
+
const configPath = options.configPath ?? "ploy.yaml";
|
|
2536
|
+
const port = options.port ?? 4e3;
|
|
2537
|
+
const verbose = options.verbose ?? false;
|
|
2538
|
+
log("Starting Ploy development dashboard...");
|
|
2539
|
+
let config;
|
|
2540
|
+
try {
|
|
2541
|
+
config = readPloyConfig2(projectDir, configPath);
|
|
2542
|
+
} catch {
|
|
2543
|
+
config = {};
|
|
2544
|
+
}
|
|
2545
|
+
debug(`Loaded config: ${JSON.stringify(config)}`, verbose);
|
|
2546
|
+
ensureDataDir(projectDir);
|
|
2547
|
+
const dbManager2 = initializeDatabases(projectDir);
|
|
2548
|
+
debug("Initialized databases", verbose);
|
|
2549
|
+
const mockServer2 = await startMockServer(dbManager2, config, {
|
|
2550
|
+
port,
|
|
2551
|
+
dashboardEnabled: true
|
|
2552
|
+
});
|
|
2553
|
+
debug(`Mock server started on port ${mockServer2.port}`, verbose);
|
|
2554
|
+
if (config.db) {
|
|
2555
|
+
log(` DB bindings: ${Object.keys(config.db).join(", ")}`);
|
|
2556
|
+
}
|
|
2557
|
+
if (config.queue) {
|
|
2558
|
+
log(` Queue bindings: ${Object.keys(config.queue).join(", ")}`);
|
|
2559
|
+
}
|
|
2560
|
+
if (config.workflow) {
|
|
2561
|
+
log(` Workflow bindings: ${Object.keys(config.workflow).join(", ")}`);
|
|
2562
|
+
}
|
|
2563
|
+
return {
|
|
2564
|
+
port: mockServer2.port,
|
|
2565
|
+
close: async () => {
|
|
2566
|
+
await mockServer2.close();
|
|
2567
|
+
dbManager2.close();
|
|
2568
|
+
log("Dashboard stopped");
|
|
2569
|
+
}
|
|
2570
|
+
};
|
|
2571
|
+
}
|
|
2572
|
+
var init_dev_dashboard = __esm({
|
|
2573
|
+
"../emulator/dist/dev-dashboard.js"() {
|
|
2574
|
+
init_ploy_config2();
|
|
2575
|
+
init_mock_server();
|
|
2576
|
+
init_logger();
|
|
2577
|
+
init_paths();
|
|
2578
|
+
init_sqlite();
|
|
2579
|
+
}
|
|
2580
|
+
});
|
|
2581
|
+
|
|
2582
|
+
// ../emulator/dist/index.js
|
|
2583
|
+
var dist_exports = {};
|
|
2584
|
+
__export(dist_exports, {
|
|
2585
|
+
DB_RUNTIME_CODE: () => DB_RUNTIME_CODE,
|
|
2586
|
+
DB_RUNTIME_CODE_PRODUCTION: () => DB_RUNTIME_CODE_PRODUCTION,
|
|
2587
|
+
EmulatorServer: () => EmulatorServer,
|
|
2588
|
+
initPloyForDev: () => initPloyForDev,
|
|
2589
|
+
startDevDashboard: () => startDevDashboard,
|
|
2590
|
+
startEmulator: () => startEmulator
|
|
2591
|
+
});
|
|
2592
|
+
var init_dist2 = __esm({
|
|
2593
|
+
"../emulator/dist/index.js"() {
|
|
2594
|
+
init_emulator();
|
|
2595
|
+
init_nextjs_dev();
|
|
2596
|
+
init_dev_dashboard();
|
|
2597
|
+
init_db_runtime();
|
|
2598
|
+
}
|
|
2599
|
+
});
|
|
2600
|
+
|
|
2601
|
+
// src/commands/build.ts
|
|
2602
|
+
init_cli();
|
|
2603
|
+
function detectPackageManager() {
|
|
2604
|
+
const cwd = process.cwd();
|
|
2605
|
+
if (existsSync(join(cwd, "pnpm-lock.yaml"))) {
|
|
2606
|
+
return "pnpm";
|
|
2607
|
+
}
|
|
2608
|
+
if (existsSync(join(cwd, "yarn.lock"))) {
|
|
2609
|
+
return "yarn";
|
|
2610
|
+
}
|
|
2611
|
+
return "npm";
|
|
2612
|
+
}
|
|
2613
|
+
function runWranglerBuild() {
|
|
2614
|
+
return new Promise((resolve, reject) => {
|
|
2615
|
+
const packageManager = detectPackageManager();
|
|
2616
|
+
let command2;
|
|
2617
|
+
let args2;
|
|
2618
|
+
if (packageManager === "npm") {
|
|
2619
|
+
command2 = "npx";
|
|
2620
|
+
args2 = ["wrangler", "build"];
|
|
2621
|
+
} else if (packageManager === "yarn") {
|
|
2622
|
+
command2 = "yarn";
|
|
2623
|
+
args2 = ["wrangler", "build"];
|
|
2624
|
+
} else {
|
|
2625
|
+
command2 = "pnpm";
|
|
2626
|
+
args2 = ["wrangler", "build"];
|
|
2627
|
+
}
|
|
2628
|
+
console.log(`Running: ${command2} ${args2.join(" ")}`);
|
|
2629
|
+
const child = spawn(command2, args2, {
|
|
2630
|
+
cwd: process.cwd(),
|
|
2631
|
+
stdio: "inherit",
|
|
2632
|
+
shell: process.platform === "win32"
|
|
2633
|
+
});
|
|
2634
|
+
child.on("error", (error2) => {
|
|
2635
|
+
reject(new Error(`Failed to run wrangler build: ${error2.message}`));
|
|
54
2636
|
});
|
|
2637
|
+
child.on("close", (code) => {
|
|
2638
|
+
if (code === 0) {
|
|
2639
|
+
resolve();
|
|
2640
|
+
} else {
|
|
2641
|
+
reject(new Error(`wrangler build exited with code ${code}`));
|
|
2642
|
+
}
|
|
2643
|
+
});
|
|
2644
|
+
});
|
|
2645
|
+
}
|
|
2646
|
+
async function buildCommand(options = {}) {
|
|
2647
|
+
const cwd = process.cwd();
|
|
2648
|
+
const configFile = options.config || "ploy.yaml";
|
|
2649
|
+
console.log("Validating ploy.yaml...");
|
|
2650
|
+
const config = readPloyConfigSync(cwd, options.config);
|
|
2651
|
+
validatePloyConfig(config, configFile);
|
|
2652
|
+
console.log("Configuration valid.");
|
|
2653
|
+
console.log("");
|
|
2654
|
+
await runWranglerBuild();
|
|
2655
|
+
}
|
|
2656
|
+
function parseBuildArgs(args2) {
|
|
2657
|
+
const options = {};
|
|
2658
|
+
for (let i = 0; i < args2.length; i++) {
|
|
2659
|
+
const arg = args2[i];
|
|
2660
|
+
if (arg === "-c" || arg === "--config") {
|
|
2661
|
+
const nextArg = args2[i + 1];
|
|
2662
|
+
if (!nextArg || nextArg.startsWith("-")) {
|
|
2663
|
+
console.error(`Error: ${arg} requires a path argument`);
|
|
2664
|
+
process.exit(1);
|
|
2665
|
+
}
|
|
2666
|
+
options.config = nextArg;
|
|
2667
|
+
i++;
|
|
2668
|
+
} else if (arg === "-h" || arg === "--help") {
|
|
2669
|
+
printBuildHelp();
|
|
2670
|
+
process.exit(0);
|
|
2671
|
+
}
|
|
2672
|
+
}
|
|
2673
|
+
return options;
|
|
2674
|
+
}
|
|
2675
|
+
function printBuildHelp() {
|
|
2676
|
+
console.log("Usage: ploy build [options]");
|
|
2677
|
+
console.log("");
|
|
2678
|
+
console.log("Build your Ploy project");
|
|
2679
|
+
console.log("");
|
|
2680
|
+
console.log("Options:");
|
|
2681
|
+
console.log(
|
|
2682
|
+
" -c, --config <path> Path to config file (default: ploy.yaml)"
|
|
2683
|
+
);
|
|
2684
|
+
console.log(" -h, --help Show this help message");
|
|
2685
|
+
console.log("");
|
|
2686
|
+
console.log("Examples:");
|
|
2687
|
+
console.log(
|
|
2688
|
+
" ploy build Build using ploy.yaml in current directory"
|
|
2689
|
+
);
|
|
2690
|
+
console.log(
|
|
2691
|
+
" ploy build -c custom.yaml Build using a custom config file"
|
|
2692
|
+
);
|
|
2693
|
+
}
|
|
2694
|
+
|
|
2695
|
+
// src/commands/types.ts
|
|
2696
|
+
init_cli();
|
|
2697
|
+
async function updateTsConfigInclude(cwd, outputPath) {
|
|
2698
|
+
const tsconfigPath = join(cwd, "tsconfig.json");
|
|
2699
|
+
try {
|
|
2700
|
+
const content = await readFile(tsconfigPath, "utf-8");
|
|
2701
|
+
const tsconfig = JSON.parse(content);
|
|
2702
|
+
if (!tsconfig.include) {
|
|
2703
|
+
tsconfig.include = [];
|
|
2704
|
+
}
|
|
2705
|
+
if (tsconfig.include.includes(outputPath)) {
|
|
2706
|
+
return false;
|
|
2707
|
+
}
|
|
2708
|
+
tsconfig.include.push(outputPath);
|
|
2709
|
+
await writeFile(tsconfigPath, JSON.stringify(tsconfig, null, " ") + "\n");
|
|
2710
|
+
return true;
|
|
2711
|
+
} catch (error2) {
|
|
2712
|
+
if (error2 && typeof error2 === "object" && "code" in error2) {
|
|
2713
|
+
if (error2.code === "ENOENT") {
|
|
2714
|
+
return false;
|
|
2715
|
+
}
|
|
2716
|
+
}
|
|
2717
|
+
console.warn(`Warning: Could not update tsconfig.json: ${error2}`);
|
|
2718
|
+
return false;
|
|
2719
|
+
}
|
|
55
2720
|
}
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
2721
|
+
function generateEnvType(config) {
|
|
2722
|
+
const imports = [];
|
|
2723
|
+
const properties = [];
|
|
2724
|
+
if (config.ai) {
|
|
2725
|
+
properties.push(" AI_URL: string;");
|
|
2726
|
+
properties.push(" AI_TOKEN: string;");
|
|
2727
|
+
}
|
|
2728
|
+
if (config.db) {
|
|
2729
|
+
imports.push("D1Database");
|
|
2730
|
+
for (const bindingName of Object.keys(config.db)) {
|
|
2731
|
+
properties.push(` ${bindingName}: D1Database;`);
|
|
2732
|
+
}
|
|
2733
|
+
}
|
|
2734
|
+
if (config.queue) {
|
|
2735
|
+
imports.push("QueueBinding");
|
|
2736
|
+
for (const bindingName of Object.keys(config.queue)) {
|
|
2737
|
+
properties.push(` ${bindingName}: QueueBinding;`);
|
|
2738
|
+
}
|
|
2739
|
+
}
|
|
2740
|
+
if (config.workflow) {
|
|
2741
|
+
imports.push("WorkflowBinding");
|
|
2742
|
+
for (const bindingName of Object.keys(config.workflow)) {
|
|
2743
|
+
properties.push(` ${bindingName}: WorkflowBinding;`);
|
|
2744
|
+
}
|
|
2745
|
+
}
|
|
2746
|
+
const lines = [
|
|
2747
|
+
"// This file is auto-generated by `ploy types`. Do not edit manually.",
|
|
2748
|
+
""
|
|
2749
|
+
];
|
|
2750
|
+
if (imports.length > 0) {
|
|
2751
|
+
lines.push(`import type { ${imports.join(", ")} } from "@meetploy/types";`);
|
|
2752
|
+
lines.push("");
|
|
2753
|
+
}
|
|
2754
|
+
lines.push('declare module "@meetploy/nextjs" {');
|
|
2755
|
+
lines.push(" interface PloyEnv {");
|
|
2756
|
+
for (const prop of properties) {
|
|
2757
|
+
lines.push(` ${prop}`);
|
|
2758
|
+
}
|
|
2759
|
+
lines.push(" }");
|
|
2760
|
+
lines.push("}");
|
|
2761
|
+
lines.push("");
|
|
2762
|
+
lines.push("declare global {");
|
|
2763
|
+
lines.push(" interface PloyEnv {");
|
|
2764
|
+
for (const prop of properties) {
|
|
2765
|
+
lines.push(` ${prop}`);
|
|
2766
|
+
}
|
|
2767
|
+
lines.push(" }");
|
|
2768
|
+
lines.push("}");
|
|
2769
|
+
lines.push("");
|
|
2770
|
+
lines.push("export {};");
|
|
2771
|
+
lines.push("");
|
|
2772
|
+
return lines.join("\n");
|
|
59
2773
|
}
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
2774
|
+
function getOutputPath(options) {
|
|
2775
|
+
if (options.output) {
|
|
2776
|
+
return options.output;
|
|
2777
|
+
}
|
|
2778
|
+
return "env.d.ts";
|
|
63
2779
|
}
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
console.error("
|
|
69
|
-
console.error(" types Generate TypeScript types from ploy.yaml");
|
|
2780
|
+
async function typesCommand(options = {}) {
|
|
2781
|
+
const cwd = process.cwd();
|
|
2782
|
+
const config = await readPloyConfig(cwd);
|
|
2783
|
+
if (!config) {
|
|
2784
|
+
console.error("Error: ploy.yaml not found in current directory");
|
|
70
2785
|
process.exit(1);
|
|
2786
|
+
}
|
|
2787
|
+
const hasBindings2 = config.ai || config.db || config.queue || config.workflow;
|
|
2788
|
+
if (!hasBindings2) {
|
|
2789
|
+
console.log("No bindings found in ploy.yaml. Generating empty Env.");
|
|
2790
|
+
}
|
|
2791
|
+
const content = generateEnvType(config);
|
|
2792
|
+
const outputPath = getOutputPath(options);
|
|
2793
|
+
const fullOutputPath = join(cwd, outputPath);
|
|
2794
|
+
const outputDir = dirname(fullOutputPath);
|
|
2795
|
+
await mkdir(outputDir, { recursive: true });
|
|
2796
|
+
await writeFile(fullOutputPath, content, "utf-8");
|
|
2797
|
+
console.log(`Generated ${outputPath}`);
|
|
2798
|
+
const updated = await updateTsConfigInclude(cwd, outputPath);
|
|
2799
|
+
if (updated) {
|
|
2800
|
+
console.log(`Updated tsconfig.json to include ${outputPath}`);
|
|
2801
|
+
}
|
|
2802
|
+
process.exit(0);
|
|
2803
|
+
}
|
|
2804
|
+
function parseTypesArgs(args2) {
|
|
2805
|
+
const options = {};
|
|
2806
|
+
for (let i = 0; i < args2.length; i++) {
|
|
2807
|
+
const arg = args2[i];
|
|
2808
|
+
if (arg === "-o" || arg === "--output") {
|
|
2809
|
+
const nextArg = args2[i + 1];
|
|
2810
|
+
if (!nextArg || nextArg.startsWith("-")) {
|
|
2811
|
+
console.error(`Error: ${arg} requires a path argument`);
|
|
2812
|
+
process.exit(1);
|
|
2813
|
+
}
|
|
2814
|
+
options.output = nextArg;
|
|
2815
|
+
i++;
|
|
2816
|
+
} else if (arg === "-h" || arg === "--help") {
|
|
2817
|
+
printTypesHelp();
|
|
2818
|
+
process.exit(0);
|
|
2819
|
+
}
|
|
2820
|
+
}
|
|
2821
|
+
return options;
|
|
2822
|
+
}
|
|
2823
|
+
function printTypesHelp() {
|
|
2824
|
+
console.log("Usage: ploy types [options]");
|
|
2825
|
+
console.log("");
|
|
2826
|
+
console.log("Generate TypeScript types from ploy.yaml bindings");
|
|
2827
|
+
console.log("");
|
|
2828
|
+
console.log("Options:");
|
|
2829
|
+
console.log(" -o, --output <path> Output file path (default: env.d.ts)");
|
|
2830
|
+
console.log(" -h, --help Show this help message");
|
|
2831
|
+
console.log("");
|
|
2832
|
+
console.log("Examples:");
|
|
2833
|
+
console.log(
|
|
2834
|
+
" ploy types Generate env.d.ts in current directory"
|
|
2835
|
+
);
|
|
2836
|
+
console.log(" ploy types -o env.d.ts Generate types to specific path");
|
|
2837
|
+
}
|
|
2838
|
+
|
|
2839
|
+
// src/index.ts
|
|
2840
|
+
var args = process.argv.slice(2);
|
|
2841
|
+
var command = args[0];
|
|
2842
|
+
if (!command) {
|
|
2843
|
+
console.error("Usage: ploy <command>");
|
|
2844
|
+
console.error("\nAvailable commands:");
|
|
2845
|
+
console.error(" build Build your Ploy project");
|
|
2846
|
+
console.error(" dev Start development server");
|
|
2847
|
+
console.error(" types Generate TypeScript types from ploy.yaml");
|
|
2848
|
+
process.exit(1);
|
|
2849
|
+
}
|
|
2850
|
+
function parseDevArgs(args2) {
|
|
2851
|
+
const options = {
|
|
2852
|
+
port: void 0,
|
|
2853
|
+
host: void 0,
|
|
2854
|
+
config: void 0,
|
|
2855
|
+
watch: true,
|
|
2856
|
+
verbose: false,
|
|
2857
|
+
dashboardPort: void 0
|
|
2858
|
+
};
|
|
2859
|
+
for (let i = 0; i < args2.length; i++) {
|
|
2860
|
+
const arg = args2[i];
|
|
2861
|
+
if (arg === "-p" || arg === "--port") {
|
|
2862
|
+
const value = args2[++i];
|
|
2863
|
+
if (value) {
|
|
2864
|
+
options.port = parseInt(value, 10);
|
|
2865
|
+
}
|
|
2866
|
+
} else if (arg === "-h" || arg === "--host") {
|
|
2867
|
+
options.host = args2[++i];
|
|
2868
|
+
} else if (arg === "-c" || arg === "--config") {
|
|
2869
|
+
options.config = args2[++i];
|
|
2870
|
+
} else if (arg === "--no-watch") {
|
|
2871
|
+
options.watch = false;
|
|
2872
|
+
} else if (arg === "-v" || arg === "--verbose") {
|
|
2873
|
+
options.verbose = true;
|
|
2874
|
+
} else if (arg === "--dashboard-port") {
|
|
2875
|
+
const value = args2[++i];
|
|
2876
|
+
if (value) {
|
|
2877
|
+
options.dashboardPort = parseInt(value, 10);
|
|
2878
|
+
}
|
|
2879
|
+
}
|
|
2880
|
+
}
|
|
2881
|
+
return options;
|
|
2882
|
+
}
|
|
2883
|
+
async function isNextJsProject(projectDir, configPath) {
|
|
2884
|
+
const { readPloyConfig: readPloyConfig3 } = await Promise.resolve().then(() => (init_cli(), cli_exports));
|
|
2885
|
+
try {
|
|
2886
|
+
const config = await readPloyConfig3(projectDir, configPath);
|
|
2887
|
+
if (config?.kind === "nextjs") {
|
|
2888
|
+
return true;
|
|
2889
|
+
}
|
|
2890
|
+
} catch {
|
|
2891
|
+
}
|
|
2892
|
+
const nextConfigFiles = [
|
|
2893
|
+
"next.config.ts",
|
|
2894
|
+
"next.config.js",
|
|
2895
|
+
"next.config.mts",
|
|
2896
|
+
"next.config.mjs"
|
|
2897
|
+
];
|
|
2898
|
+
for (const file of nextConfigFiles) {
|
|
2899
|
+
if (existsSync(join(projectDir, file))) {
|
|
2900
|
+
return true;
|
|
2901
|
+
}
|
|
2902
|
+
}
|
|
2903
|
+
return false;
|
|
2904
|
+
}
|
|
2905
|
+
function findNextBinary(projectDir) {
|
|
2906
|
+
const possiblePaths = [
|
|
2907
|
+
join(projectDir, "node_modules", ".bin", "next"),
|
|
2908
|
+
"npx next"
|
|
2909
|
+
];
|
|
2910
|
+
for (const p of possiblePaths) {
|
|
2911
|
+
if (p.includes("npx") || existsSync(p)) {
|
|
2912
|
+
return p;
|
|
2913
|
+
}
|
|
2914
|
+
}
|
|
2915
|
+
return "npx next";
|
|
2916
|
+
}
|
|
2917
|
+
async function startNextJsDev(options) {
|
|
2918
|
+
const projectDir = process.cwd();
|
|
2919
|
+
const nextPort = options.port ?? 3e3;
|
|
2920
|
+
const dashboardPort = options.dashboardPort ?? nextPort + 1e3;
|
|
2921
|
+
const { startDevDashboard: startDevDashboard2 } = await Promise.resolve().then(() => (init_dist2(), dist_exports));
|
|
2922
|
+
const dashboard = await startDevDashboard2({
|
|
2923
|
+
configPath: options.config,
|
|
2924
|
+
port: dashboardPort,
|
|
2925
|
+
verbose: options.verbose
|
|
2926
|
+
});
|
|
2927
|
+
console.log(
|
|
2928
|
+
`
|
|
2929
|
+
Ploy Dashboard: http://${options.host ?? "localhost"}:${dashboard.port}`
|
|
2930
|
+
);
|
|
2931
|
+
const nextBin = findNextBinary(projectDir);
|
|
2932
|
+
const nextArgs = ["dev", "-p", String(nextPort)];
|
|
2933
|
+
if (options.host) {
|
|
2934
|
+
nextArgs.push("-H", options.host);
|
|
2935
|
+
}
|
|
2936
|
+
console.log(` Starting Next.js dev server...`);
|
|
2937
|
+
let nextProcess;
|
|
2938
|
+
if (nextBin.includes("npx")) {
|
|
2939
|
+
nextProcess = spawn("npx", ["next", ...nextArgs], {
|
|
2940
|
+
cwd: projectDir,
|
|
2941
|
+
stdio: "inherit",
|
|
2942
|
+
shell: true,
|
|
2943
|
+
env: {
|
|
2944
|
+
...process.env,
|
|
2945
|
+
// Set environment variable so initPloyForDev knows the mock server URL
|
|
2946
|
+
PLOY_MOCK_SERVER_URL: `http://localhost:${dashboard.port}`
|
|
2947
|
+
}
|
|
2948
|
+
});
|
|
2949
|
+
} else {
|
|
2950
|
+
nextProcess = spawn(nextBin, nextArgs, {
|
|
2951
|
+
cwd: projectDir,
|
|
2952
|
+
stdio: "inherit",
|
|
2953
|
+
shell: true,
|
|
2954
|
+
env: {
|
|
2955
|
+
...process.env,
|
|
2956
|
+
PLOY_MOCK_SERVER_URL: `http://localhost:${dashboard.port}`
|
|
2957
|
+
}
|
|
2958
|
+
});
|
|
2959
|
+
}
|
|
2960
|
+
const cleanup = async () => {
|
|
2961
|
+
console.log("\nShutting down...");
|
|
2962
|
+
nextProcess.kill("SIGTERM");
|
|
2963
|
+
await dashboard.close();
|
|
2964
|
+
process.exit(0);
|
|
2965
|
+
};
|
|
2966
|
+
process.on("SIGINT", cleanup);
|
|
2967
|
+
process.on("SIGTERM", cleanup);
|
|
2968
|
+
nextProcess.on("exit", async (code) => {
|
|
2969
|
+
await dashboard.close();
|
|
2970
|
+
process.exit(code ?? 0);
|
|
2971
|
+
});
|
|
2972
|
+
}
|
|
2973
|
+
if (command === "dev") {
|
|
2974
|
+
const options = parseDevArgs(args.slice(1));
|
|
2975
|
+
const projectDir = process.cwd();
|
|
2976
|
+
const isNextJs = await isNextJsProject(projectDir, options.config);
|
|
2977
|
+
if (isNextJs) {
|
|
2978
|
+
await startNextJsDev({
|
|
2979
|
+
port: options.port,
|
|
2980
|
+
host: options.host,
|
|
2981
|
+
config: options.config,
|
|
2982
|
+
verbose: options.verbose,
|
|
2983
|
+
dashboardPort: options.dashboardPort
|
|
2984
|
+
});
|
|
2985
|
+
} else {
|
|
2986
|
+
const { startEmulator: startEmulator2 } = await Promise.resolve().then(() => (init_dist2(), dist_exports));
|
|
2987
|
+
await startEmulator2({
|
|
2988
|
+
port: options.port,
|
|
2989
|
+
host: options.host,
|
|
2990
|
+
configPath: options.config,
|
|
2991
|
+
watch: options.watch,
|
|
2992
|
+
verbose: options.verbose,
|
|
2993
|
+
dashboardPort: options.dashboardPort
|
|
2994
|
+
});
|
|
2995
|
+
}
|
|
2996
|
+
} else if (command === "types") {
|
|
2997
|
+
const options = parseTypesArgs(args.slice(1));
|
|
2998
|
+
await typesCommand(options);
|
|
2999
|
+
} else if (command === "build") {
|
|
3000
|
+
const options = parseBuildArgs(args.slice(1));
|
|
3001
|
+
await buildCommand(options);
|
|
3002
|
+
} else {
|
|
3003
|
+
console.error(`Unknown command: ${command}`);
|
|
3004
|
+
console.error("\nAvailable commands:");
|
|
3005
|
+
console.error(" build Build your Ploy project");
|
|
3006
|
+
console.error(" dev Start development server");
|
|
3007
|
+
console.error(" types Generate TypeScript types from ploy.yaml");
|
|
3008
|
+
process.exit(1);
|
|
71
3009
|
}
|
|
72
|
-
//# sourceMappingURL=index.js.map
|