vmlive 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,44 @@
1
+ # @vmlive/cli
2
+
3
+ Local emulation sandbox and deployment CLI for vm.live.
4
+
5
+ This CLI provides a local development environment for serverless edge functions running on vm.live, including local emulation for DB, KV, and Bucket storage.
6
+
7
+ ## Installation
8
+
9
+ You do not need to install the CLI globally. It can be run directly via `npx`:
10
+
11
+ ```bash
12
+ npx vmlive [command]
13
+ ```
14
+
15
+ ## Commands
16
+
17
+ ### `npx vmlive login`
18
+ Authenticates the CLI with your vm.live account via PKCE OAuth. Saves the access token to `~/.vm-config.json`.
19
+
20
+ ### `npx vmlive init`
21
+ Scaffolds a new project in the current directory.
22
+ - Prompts for Workspace and Project selection.
23
+ - Generates `vm.json` (configuration manifest) and `index.ts`.
24
+ - Installs `@vmlive/types` and configures TypeScript.
25
+
26
+ ### `npx vmlive dev`
27
+ Starts the local emulation environment.
28
+ - Runs functions locally via `http://<function-name>.localhost:8787`.
29
+ - Provides local proxy bindings for your SQL Database (`env.DB`), Global KV (`env.KV`), and Object Storage (`env.BUCKET`) using the `.vmlive/` directory for persistent storage.
30
+ - Supports hot-reloading for `.js` and `.ts` files.
31
+
32
+ ### `npx vmlive deploy`
33
+ Uploads the local code to the vm.live edge platform.
34
+ - Bundles the code using `esbuild`.
35
+ - Evaluates and injects environment variables from `.env` and `.env.production`.
36
+
37
+ ## AI / LLM Tooling Setup
38
+
39
+ If you are using an AI assistant (such as Cursor, Copilot, or Aider) to write code, provide it with the platform context to ensure it adheres to the V8 Isolate execution model and uses the correct infrastructure bindings.
40
+
41
+ **Instructions:**
42
+ 1. Download the `vmlive-ai-rules.md` file (or relevant rules document) provided by the platform.
43
+ 2. Place it in the root directory of your project.
44
+ 3. If using Cursor, rename the file to `.cursorrules` to force the AI to apply the constraints automatically.
package/package.json ADDED
@@ -0,0 +1,21 @@
1
+ {
2
+ "name": "vmlive",
3
+ "version": "1.0.0",
4
+ "description": "Local development VM for custom Serverless PaaS",
5
+ "type": "module",
6
+ "bin": {
7
+ "vmlive": "./src/cli.js"
8
+ },
9
+ "scripts": {
10
+ "test": "vitest run"
11
+ },
12
+ "dependencies": {
13
+ "@inquirer/prompts": "^8.3.2",
14
+ "dotenv": "^16.4.5",
15
+ "esbuild": "^0.20.2",
16
+ "miniflare": "^3.20231218.0"
17
+ },
18
+ "devDependencies": {
19
+ "vitest": "^3.2.4"
20
+ }
21
+ }
package/src/cli.js ADDED
@@ -0,0 +1,546 @@
1
+ #!/usr/bin/env node
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import { pathToFileURL, fileURLToPath } from 'url';
5
+ import esbuild from 'esbuild';
6
+ import dotenv from 'dotenv';
7
+ import http from 'http';
8
+ import os from 'os';
9
+ import crypto from 'crypto';
10
+ import { exec } from 'child_process';
11
+ import { Miniflare } from 'miniflare';
12
+ import { select, input } from '@inquirer/prompts';
13
+ import { generateShim } from './string-shim.js';
14
+
15
+ const __filename = fileURLToPath(import.meta.url);
16
+ const __dirname = path.dirname(__filename);
17
+
18
+ // Securely project the Miniflare abstraction globally entirely out of sight into the nested Node cache.
19
+ const WORK_DIR = path.resolve('node_modules', '.cache', 'vmlive');
20
+ const DATA_DIR = path.resolve('.vmlive');
21
+ const CONFIG_PATH = path.resolve('vm.json');
22
+ const API_FILE = path.resolve('index.ts');
23
+
24
+ const buildResourcesConfig = () => {
25
+ return {
26
+ d1Databases: ["DB"],
27
+ kvNamespaces: { "__RAW_KV": "local_store" },
28
+ r2Buckets: ["R2"]
29
+ };
30
+ };
31
+
32
+ const buildProxyDispatcher = (functions) => ({
33
+ name: "vm-gateway-proxy",
34
+ modules: true,
35
+ script: `
36
+ export default {
37
+ async fetch(request, env) {
38
+ const url = new URL(request.url);
39
+ const hostHeader = request.headers.get("Host") || url.hostname;
40
+ let targetName = hostHeader.split('.')[0];
41
+
42
+ if (targetName === '127' || targetName === 'localhost') {
43
+ targetName = 'api';
44
+ }
45
+
46
+ if (targetName && env[targetName]) {
47
+ return env[targetName].fetch(request);
48
+ }
49
+
50
+ return new Response(
51
+ \`Function Not Found: \${targetName}.\\n\\nUse format: http://<function-name>.localhost:8787\`,
52
+ { status: 404 }
53
+ );
54
+ }
55
+ }
56
+ `,
57
+ serviceBindings: functions.reduce((acc, fn) => {
58
+ acc[fn.name] = fn.name;
59
+ return acc;
60
+ }, {})
61
+ });
62
+
63
+ const runInit = async () => {
64
+ if (fs.existsSync(CONFIG_PATH)) {
65
+ console.error('\x1b[31m❌ Initialization Aborted:\x1b[0m vm.json already exists in this directory.');
66
+ process.exit(1);
67
+ }
68
+
69
+ // 1. Enforce Authentication Guard
70
+ const configPath = path.join(os.homedir(), '.vm-config.json');
71
+ let jwtToken = process.env.VM_API_TOKEN;
72
+ if (!jwtToken && fs.existsSync(configPath)) {
73
+ jwtToken = JSON.parse(fs.readFileSync(configPath, 'utf-8')).token;
74
+ }
75
+
76
+ if (!jwtToken) {
77
+ console.error('\x1b[31m❌ Unauthorized.\x1b[0m Please run \x1b[36mnpx vmlive login\x1b[0m first to securely pair this CLI.');
78
+ process.exit(1);
79
+ }
80
+
81
+ const GATEKEEPER_URL = process.env.GATEKEEPER_URL || 'http://localhost:8787';
82
+
83
+ // 2. Extract Workspace Architecture
84
+ console.log('\x1b[36m⏳ Orchestrating live Platform boundaries...\x1b[0m');
85
+ const wsRes = await fetch(`${GATEKEEPER_URL}/api/workspaces`, {
86
+ headers: { 'Authorization': `Bearer ${jwtToken}` }
87
+ });
88
+
89
+ if (!wsRes.ok) {
90
+ if (wsRes.status === 401) {
91
+ console.error('\x1b[31m❌ Session Expired:\x1b[0m Your authentication token has expired. Please run \x1b[36mnpx vmlive login\x1b[0m to securely re-authenticate.');
92
+ } else {
93
+ console.error('\x1b[31m❌ Sandbox Error:\x1b[0m Failed to securely retrieve workspace architecture from Production. HTTP ' + wsRes.status);
94
+ }
95
+ process.exit(1);
96
+ }
97
+
98
+ const workspaces = await wsRes.json();
99
+ if (!Array.isArray(workspaces) || workspaces.length === 0) {
100
+ console.error('\x1b[31m❌ Sandbox Error:\x1b[0m You do not possess any authorized Workspaces. Please map one on the dashboard first!');
101
+ process.exit(1);
102
+ }
103
+
104
+ // 3. Interactive Target Lock
105
+ const workspaceSlug = await select({
106
+ message: 'Select exactly which Platform Workspace you want this Sandbox to dynamically bind to:',
107
+ choices: workspaces.map(w => ({ name: w.name, value: w.slug }))
108
+ });
109
+
110
+ // 4. Extract Project Containers
111
+ const projRes = await fetch(`${GATEKEEPER_URL}/api/${workspaceSlug}/projects`, {
112
+ headers: { 'Authorization': `Bearer ${jwtToken}` }
113
+ });
114
+
115
+ if (!projRes.ok) {
116
+ console.error('\x1b[31m❌ Sandbox Error:\x1b[0m Failed to logically retrieve project constraints from Production.');
117
+ process.exit(1);
118
+ }
119
+
120
+ const projects = await projRes.json();
121
+ if (!Array.isArray(projects) || projects.length === 0) {
122
+ console.error('\x1b[31m❌ Sandbox Error:\x1b[0m No projects mathematically exist in this Workspace constraint.');
123
+ process.exit(1);
124
+ }
125
+
126
+ const projectSlug = await select({
127
+ message: 'Select exactly which Edge Project you want to natively emulate offline:',
128
+ choices: projects.map(p => ({ name: p.name, value: p.slug }))
129
+ });
130
+
131
+ const functionNameRaw = await input({
132
+ message: 'What do you want to conceptually name this initial serverless function?',
133
+ default: 'api'
134
+ });
135
+ const functionName = functionNameRaw.trim().toLowerCase().replace(/[^a-z0-9-]/g, '') || 'api';
136
+
137
+ // 5. Instantiation
138
+ if (!fs.existsSync(API_FILE)) {
139
+ const defaultApiTs = `import type { Env, Context } from '@vmlive/types';
140
+ export default {
141
+ async fetch(request: Request, env: Env, ctx: Context) {
142
+ return new Response("Hello from your Local PaaS Edge!", { status: 200 });
143
+ }
144
+ };
145
+ `;
146
+ fs.writeFileSync(API_FILE, defaultApiTs);
147
+ }
148
+
149
+ if (!fs.existsSync(CONFIG_PATH)) {
150
+ const defaultVmJson = `{
151
+ "workspaceId": "${workspaceSlug}",
152
+ "projectId": "${projectSlug}",
153
+ "functions": [
154
+ { "name": "${functionName}", "entry": "index.ts" }
155
+ ]
156
+ }
157
+ `;
158
+ fs.writeFileSync(CONFIG_PATH, defaultVmJson);
159
+ }
160
+
161
+ const gitignorePath = path.resolve('.gitignore');
162
+ const ignoreEntries = ['node_modules', '.vmlive', '.env'];
163
+ if (fs.existsSync(gitignorePath)) {
164
+ let currentIgnore = fs.readFileSync(gitignorePath, 'utf8');
165
+ let appended = false;
166
+ ignoreEntries.forEach(entry => {
167
+ if (!currentIgnore.includes(entry)) {
168
+ currentIgnore += `\n${entry}`;
169
+ appended = true;
170
+ }
171
+ });
172
+ if (appended) fs.writeFileSync(gitignorePath, currentIgnore.trim() + '\n');
173
+ } else {
174
+ fs.writeFileSync(gitignorePath, ignoreEntries.join('\n') + '\n');
175
+ }
176
+
177
+ const pkgPath = path.resolve('package.json');
178
+ if (!fs.existsSync(pkgPath)) {
179
+ const defaultPackageJson = `{
180
+ "name": "vmlive-project",
181
+ "version": "1.0.0",
182
+ "type": "module",
183
+ "devDependencies": {
184
+ "@vmlive/types": "*",
185
+ "typescript": "^5.0.0"
186
+ }
187
+ }
188
+ `;
189
+ fs.writeFileSync(pkgPath, defaultPackageJson);
190
+ } else {
191
+ try {
192
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
193
+ pkg.devDependencies = pkg.devDependencies || {};
194
+ if (!pkg.devDependencies['@vmlive/types']) {
195
+ pkg.devDependencies['@vmlive/types'] = '*';
196
+ fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2));
197
+ }
198
+ } catch(e) {
199
+ // gracefully ignore malformed package.json
200
+ }
201
+ }
202
+
203
+ const tsConfigPath = path.resolve('tsconfig.json');
204
+ if (!fs.existsSync(tsConfigPath)) {
205
+ const defaultTsConfig = `{
206
+ "compilerOptions": {
207
+ "target": "ES2022",
208
+ "module": "ESNext",
209
+ "moduleResolution": "bundler",
210
+ "types": ["@vmlive/types"],
211
+ "strict": true,
212
+ "skipLibCheck": true,
213
+ "noEmit": true
214
+ },
215
+ "include": ["**/*.ts"]
216
+ }
217
+ `;
218
+ fs.writeFileSync(tsConfigPath, defaultTsConfig);
219
+ } else {
220
+ try {
221
+ const tsconfig = JSON.parse(fs.readFileSync(tsConfigPath, 'utf8'));
222
+ tsconfig.compilerOptions = tsconfig.compilerOptions || {};
223
+ tsconfig.compilerOptions.types = tsconfig.compilerOptions.types || [];
224
+ if (!tsconfig.compilerOptions.types.includes('@vmlive/types')) {
225
+ tsconfig.compilerOptions.types.push('@vmlive/types');
226
+ fs.writeFileSync(tsConfigPath, JSON.stringify(tsconfig, null, 2));
227
+ }
228
+ } catch(e) {}
229
+ }
230
+
231
+ console.log('\x1b[36m📦 Installing platform types and dependencies...\x1b[0m');
232
+ await new Promise((resolve) => {
233
+ exec('npm install', (err) => {
234
+ if (err) console.error('\x1b[33m⚠️ Warning: npm install failed. You may need to run it manually to resolve TS errors.\x1b[0m');
235
+ resolve();
236
+ });
237
+ });
238
+
239
+ console.log('\x1b[32m✨ Extensible Project Scaffolded Successfully!\x1b[0m');
240
+ console.log('You can now natively run \x1b[36mnpx vmlive dev\x1b[0m to initiate the offline sandbox proxy.');
241
+ };
242
+
243
+ const runDev = async () => {
244
+ if (!fs.existsSync(CONFIG_PATH)) {
245
+ throw new Error("Missing vm.json in project root. Run 'vm init' to scaffold a new project workspace.");
246
+ }
247
+
248
+ const config = JSON.parse(fs.readFileSync(CONFIG_PATH, 'utf-8'));
249
+
250
+ if (!fs.existsSync(WORK_DIR)) fs.mkdirSync(WORK_DIR, { recursive: true });
251
+ if (!fs.existsSync(DATA_DIR)) {
252
+ fs.mkdirSync(DATA_DIR, { recursive: true });
253
+ }
254
+
255
+ const sharedBindings = fs.existsSync(path.resolve('.env'))
256
+ ? dotenv.parse(fs.readFileSync(path.resolve('.env')))
257
+ : {};
258
+
259
+ const resourcesConfig = buildResourcesConfig();
260
+
261
+ const entryPoints = config.functions.reduce((acc, fn) => {
262
+ acc[`${fn.name}-out`] = path.resolve(fn.entry);
263
+ return acc;
264
+ }, {});
265
+
266
+ const workspaceId = config.workspaceId || "ws_local";
267
+ const projectId = config.projectId || "prj_local";
268
+
269
+ config.functions.forEach(fn => {
270
+ const relativeTarget = `./${fn.name}-out.mjs`;
271
+ fs.writeFileSync(path.join(WORK_DIR, `${fn.name}-shim.mjs`), generateShim(relativeTarget, workspaceId, projectId, fn.name));
272
+ });
273
+
274
+ const builder = await esbuild.context({
275
+ entryPoints,
276
+ bundle: true,
277
+ format: 'esm',
278
+ outdir: WORK_DIR,
279
+ outExtension: { '.js': '.mjs' },
280
+ external: ['cloudflare:*'],
281
+ logLevel: 'warning'
282
+ });
283
+
284
+ await builder.rebuild();
285
+ console.log('\x1b[32m✨ Platform Engine Initialized.\x1b[0m');
286
+
287
+ // Copy Shadow Worker to ephemeral .vm directory for Miniflare Sandbox Bounds
288
+ const shadowSource = fs.readFileSync(path.join(__dirname, 'shadow-dos.js'), 'utf-8');
289
+ fs.writeFileSync(path.join(WORK_DIR, 'shadow-dos.mjs'), shadowSource);
290
+
291
+ const mfPort = process.env.PORT ? parseInt(process.env.PORT) : (config.port || 8787);
292
+
293
+ const miniflareWorkers = [
294
+ buildProxyDispatcher(config.functions),
295
+ {
296
+ name: "vm-shadow-worker",
297
+ modules: true,
298
+ scriptPath: path.join(WORK_DIR, 'shadow-dos.mjs'),
299
+ bindings: { PORT: mfPort },
300
+ durableObjects: {
301
+ LocalTaskManagerDO: "LocalTaskManagerDO",
302
+ LocalChannelRoomDO: "LocalChannelRoomDO"
303
+ }
304
+ },
305
+ ...config.functions.map(fn => ({
306
+ name: fn.name,
307
+ modules: true,
308
+ scriptPath: path.join(WORK_DIR, `${fn.name}-shim.mjs`),
309
+ bindings: sharedBindings,
310
+ ...resourcesConfig,
311
+ durableObjects: {
312
+ TASK_DO: { className: "LocalTaskManagerDO", scriptName: "vm-shadow-worker" },
313
+ CHANNEL_DO: { className: "LocalChannelRoomDO", scriptName: "vm-shadow-worker" }
314
+ }
315
+ }))
316
+ ];
317
+ const mf = new Miniflare({
318
+ workers: miniflareWorkers,
319
+ port: mfPort,
320
+ cachePersist: path.join(WORK_DIR, 'cache'),
321
+ d1Persist: path.join(DATA_DIR, 'db'),
322
+ kvPersist: path.join(DATA_DIR, 'kv'),
323
+ r2Persist: path.join(DATA_DIR, 'bucket')
324
+ });
325
+ await mf.ready;
326
+ console.log(`🚀 Extensible Serverless VM running natively on http://*.localhost:${mfPort}`);
327
+
328
+ setInterval(() => {
329
+ const memoryData = process.memoryUsage();
330
+ const heapMb = Math.round(memoryData.heapUsed / 1024 / 1024);
331
+ if (heapMb > 110) {
332
+ console.warn(`\n\x1b[33m⚠️ [VM-CLI] CAUTION: HIGH MEMORY USAGE (${heapMb}MB)\x1b[0m`);
333
+ console.warn(`\x1b[33mYour worker environment is approaching the 128MB production limit. Local memory profiling is bloated due to dev-tooling overhead, but ensure your production build avoids massive synchronous allocations.\x1b[0m`);
334
+ }
335
+ }, 10000);
336
+
337
+ let debounceTimeout = null;
338
+ fs.watch(process.cwd(), { recursive: true }, (eventType, filename) => {
339
+ if (filename && (filename.endsWith('.ts') || filename.endsWith('.js'))) {
340
+ if (debounceTimeout) clearTimeout(debounceTimeout);
341
+ debounceTimeout = setTimeout(async () => {
342
+ try {
343
+ console.log(`\n🔄 ${filename} mutated. Syncing Engine...`);
344
+ await builder.rebuild();
345
+
346
+ config.functions.forEach(fn => {
347
+ const relativeTarget = `./${fn.name}-out.mjs`;
348
+ fs.writeFileSync(path.join(WORK_DIR, `${fn.name}-shim.mjs`), generateShim(relativeTarget, workspaceId, projectId, fn.name));
349
+ });
350
+
351
+ await mf.setOptions({
352
+ workers: miniflareWorkers,
353
+ port: mfPort,
354
+ cachePersist: path.join(WORK_DIR, 'cache'),
355
+ d1Persist: path.join(DATA_DIR, 'db'),
356
+ kvPersist: path.join(DATA_DIR, 'kv'),
357
+ r2Persist: path.join(DATA_DIR, 'bucket')
358
+ });
359
+ await mf.ready;
360
+ console.log('\x1b[32m✨ Engine Hot-Reloaded.\x1b[0m');
361
+ } catch (err) {
362
+ console.error('\x1b[31m❌ Engine Sync Crash:\x1b[0m', err.message);
363
+ }
364
+ }, 150);
365
+ }
366
+ });
367
+
368
+ process.on('SIGINT', async () => {
369
+ console.log('\n🛑 Suspending Platform VM...');
370
+ await builder.dispose();
371
+ await mf.dispose();
372
+ process.exit(0);
373
+ });
374
+ };
375
+
376
+ const runDeploy = async () => {
377
+ console.log('\x1b[36m🚀 Initiating Deployment pipeline...\x1b[0m');
378
+
379
+ const configPath = path.join(os.homedir(), '.vm-config.json');
380
+ let token = process.env.VM_API_TOKEN;
381
+ if (!token && fs.existsSync(configPath)) {
382
+ token = JSON.parse(fs.readFileSync(configPath, 'utf-8')).token;
383
+ }
384
+
385
+ if (!token) {
386
+ console.error('\x1b[31m❌ Unauthorized. Please run `vm login` first.\x1b[0m');
387
+ process.exit(1);
388
+ }
389
+
390
+ if (!fs.existsSync(CONFIG_PATH)) {
391
+ console.error('\x1b[31m❌ Config vm.json missing. Run `vm init` first.\x1b[0m');
392
+ process.exit(1);
393
+ }
394
+
395
+ const vmConfig = JSON.parse(fs.readFileSync(CONFIG_PATH, 'utf-8'));
396
+ const { workspaceId, projectId, functions } = vmConfig;
397
+
398
+ // Dotenv Cascading: Extract explicitly configured variables mapping them tightly
399
+ let userEnv = {};
400
+ if (fs.existsSync('.env')) {
401
+ const baseEnv = dotenv.parse(fs.readFileSync('.env'));
402
+ userEnv = { ...userEnv, ...baseEnv };
403
+ }
404
+ if (fs.existsSync('.env.production')) {
405
+ const prodEnv = dotenv.parse(fs.readFileSync('.env.production'));
406
+ userEnv = { ...userEnv, ...prodEnv }; // Strict Override True
407
+ }
408
+
409
+ console.log(`\x1b[35m[Config]\x1b[0m Successfully extracted ${Object.keys(userEnv).length} Environment Variables.`);
410
+
411
+ for (const fn of functions) {
412
+ console.log(`\x1b[36m[esbuild]\x1b[0m Bundling ${fn.name}...`);
413
+ const outPath = path.join(WORK_DIR, `${fn.name}-out.mjs`);
414
+
415
+ await esbuild.build({
416
+ entryPoints: [path.resolve(fn.entry)],
417
+ bundle: true,
418
+ format: 'esm',
419
+ outfile: outPath,
420
+ external: ['cloudflare:*', 'node:*'],
421
+ });
422
+
423
+ const code = fs.readFileSync(outPath, 'utf8');
424
+
425
+ const GATEKEEPER_URL = process.env.GATEKEEPER_URL || 'http://localhost:8787';
426
+ const target = `${GATEKEEPER_URL}/api/${workspaceId}/projects/${projectId}/functions/${fn.name}/deploy`;
427
+
428
+ console.log(`\x1b[36m[POST]\x1b[0m Uploading to Gatekeeper Dispatch Edge...`);
429
+
430
+ const res = await fetch(target, {
431
+ method: 'POST',
432
+ headers: {
433
+ 'Authorization': `Bearer ${token}`,
434
+ 'Content-Type': 'application/json'
435
+ },
436
+ body: JSON.stringify({ code, envVars: userEnv })
437
+ });
438
+
439
+ if (!res.ok) {
440
+ console.error(`\x1b[31m❌ Deployment failed: ${res.status}\x1b[0m`);
441
+ console.error(await res.text());
442
+ process.exit(1);
443
+ }
444
+
445
+ const jsonRes = await res.json();
446
+ console.log(`\x1b[32m✨ Extracted Function Deployed to: ${jsonRes.public_url || 'Success!'}\x1b[0m`);
447
+ }
448
+ };
449
+
450
+ const runLogin = async () => {
451
+ console.log('\x1b[36m✨ Initiating vm.live PKCE Authentication...\x1b[0m');
452
+
453
+ const codeVerifier = crypto.randomBytes(32).toString('base64url');
454
+ const codeChallenge = crypto.createHash('sha256').update(codeVerifier).digest('base64url');
455
+
456
+ const GATEKEEPER_URL = process.env.GATEKEEPER_URL || 'http://localhost:8787';
457
+
458
+ const server = http.createServer(async (req, res) => {
459
+ const url = new URL(req.url, `http://127.0.0.1:${server.address().port}`);
460
+ const authCode = url.searchParams.get('code');
461
+
462
+ if (!authCode) {
463
+ res.writeHead(400);
464
+ res.end('Bad Request: Missing OAuth Code');
465
+ return;
466
+ }
467
+
468
+ try {
469
+ // Exchange
470
+ console.log('⏳ Exchanging PKCE Authorization Code...');
471
+ const tokenRes = await fetch(`${GATEKEEPER_URL}/api/auth/cli/exchange`, {
472
+ method: 'POST',
473
+ headers: { 'Content-Type': 'application/json' },
474
+ body: JSON.stringify({ auth_code: authCode, code_verifier: codeVerifier })
475
+ });
476
+
477
+ if (!tokenRes.ok) throw new Error(await tokenRes.text());
478
+ const { access_token } = await tokenRes.json();
479
+
480
+ const configPath = path.join(os.homedir(), '.vm-config.json');
481
+ fs.writeFileSync(configPath, JSON.stringify({ token: access_token }, null, 2));
482
+
483
+ res.writeHead(200, { 'Content-Type': 'text/html' });
484
+ res.end(`
485
+ <html><body style="background:#09090b;color:#fff;font-family:sans-serif;display:flex;align-items:center;justify-content:center;height:100vh;margin:0;">
486
+ <div style="text-align:center;">
487
+ <div style="width:64px;height:64px;background:rgba(16,185,129,0.1);color:#10b981;border-radius:50%;display:flex;align-items:center;justify-content:center;margin:0 auto 24px auto;">
488
+ <svg width="32" height="32" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M20 6L9 17l-5-5"></path></svg>
489
+ </div>
490
+ <h1 style="color:#f4f4f5;font-size:24px;margin-bottom:8px;">Authentication Successful</h1>
491
+ <p style="color:#a1a1aa;margin-bottom:24px;">Your vm.live CLI is securely authorized.</p>
492
+ <p style="color:#52525b;font-size:14px;">You may now safely close this browser window and return to your terminal.</p>
493
+ </div>
494
+ </body></html>
495
+ `);
496
+ console.log('\x1b[32m✅ Successfully authenticated! Saved strictly to ~/.vm-config.json\x1b[0m');
497
+
498
+ setTimeout(() => {
499
+ server.close();
500
+ process.exit(0);
501
+ }, 1500); // 1.5s flush buffer TCP protection
502
+
503
+
504
+ } catch (e) {
505
+ console.error('\x1b[31m❌ Failed to exchange code:\x1b[0m', e.message);
506
+ res.writeHead(500, { 'Content-Type': 'text/html' });
507
+ res.end('<html><body><h1>Authentication Failed</h1></body></html>');
508
+
509
+ setTimeout(() => {
510
+ server.close();
511
+ process.exit(1);
512
+ }, 1500);
513
+ }
514
+ });
515
+
516
+ server.listen(0, '127.0.0.1', () => {
517
+ const PORT = server.address().port;
518
+ const EDITOR_URL = process.env.EDITOR_URL || 'http://localhost:5174';
519
+ const authUrl = `${EDITOR_URL}/?challenge=${codeChallenge}&port=${PORT}`;
520
+ console.log(`\x1b[34mOpening browser to: ${authUrl}\x1b[0m`);
521
+ const openCmd = process.platform === 'darwin' ? 'open' : process.platform === 'win32' ? 'start' : 'xdg-open';
522
+ exec(`${openCmd} "${authUrl}"`);
523
+ });
524
+ };
525
+
526
+ const main = async () => {
527
+ const command = process.argv[2];
528
+ if (command === 'init') {
529
+ runInit();
530
+ } else if (command === 'dev' || !command) {
531
+ await runDev();
532
+ } else if (command === 'login') {
533
+ await runLogin();
534
+ } else if (command === 'deploy') {
535
+ await runDeploy();
536
+ } else {
537
+ console.error(`\x1b[31m❌ Unknown command: ${command}\x1b[0m`);
538
+ console.log('Usage: vm init | vm dev | vm login | vm deploy');
539
+ process.exit(1);
540
+ }
541
+ };
542
+
543
+ main().catch(err => {
544
+ console.error("Fatal Kernel Exception:", err);
545
+ process.exit(1);
546
+ });
@@ -0,0 +1,137 @@
1
+ import { DurableObject } from "cloudflare:workers";
2
+
3
+ export class LocalTaskManagerDO extends DurableObject {
4
+ constructor(ctx, env) {
5
+ super(ctx, env);
6
+ }
7
+
8
+ async fetch(request) {
9
+ const url = new URL(request.url);
10
+
11
+ if (request.method === "POST" && url.pathname === "/.internal/tasks/schedule") {
12
+ const body = await request.json();
13
+ const { functionSlug, executeAt, name, payload, workspaceId, projectId, interval } = body;
14
+
15
+ const taskId = interval ? `cron:${functionSlug}:${name}` : crypto.randomUUID();
16
+
17
+ const taskRecord = {
18
+ id: taskId,
19
+ function_slug: functionSlug,
20
+ type: "schedule",
21
+ execute_at: executeAt,
22
+ name: name,
23
+ payload: JSON.stringify(payload || {}),
24
+ workspace_id: workspaceId,
25
+ project_id: projectId,
26
+ interval: interval || null,
27
+ status: "pending"
28
+ };
29
+
30
+ await this.ctx.storage.put(taskId, taskRecord);
31
+ // We set the native Miniflare alarm
32
+ await this.ctx.storage.setAlarm(executeAt);
33
+
34
+ return Response.json({ success: true, id: taskId });
35
+ }
36
+ return new Response("Not Found", { status: 404 });
37
+ }
38
+
39
+ async alarm() {
40
+ const now = Date.now();
41
+ const MapTasks = await this.ctx.storage.list();
42
+
43
+ for (const [id, task] of MapTasks) {
44
+ if (task.execute_at <= now && task.status !== 'completed') {
45
+ try {
46
+ // Structurally bypass network by resolving back through Miniflare host headers natively
47
+ const invokeUrl = `http://127.0.0.1:${this.env.PORT || 8787}/.internal/tasks/dispatch`;
48
+
49
+ const res = await fetch(invokeUrl, {
50
+ method: 'POST',
51
+ headers: {
52
+ 'Host': `${task.function_slug}.localhost`,
53
+ 'Content-Type': 'application/json',
54
+ 'x-vm-system-signature': 'local-bypass'
55
+ },
56
+ body: JSON.stringify({
57
+ taskId: task.id,
58
+ name: task.name,
59
+ payload: JSON.parse(task.payload),
60
+ executeAt: task.execute_at,
61
+ dispatchedAt: now
62
+ })
63
+ });
64
+
65
+ if (res.ok) {
66
+ if (task.interval) {
67
+ const match = task.interval.match(/^(\d+)([smh])$/);
68
+ if (match) {
69
+ const val = parseInt(match[1]);
70
+ const unit = match[2];
71
+ let addMs = 0;
72
+ if (unit === 's') addMs = val * 1000;
73
+ else if (unit === 'm') addMs = val * 60000;
74
+ else if (unit === 'h') addMs = val * 3600000;
75
+
76
+ task.execute_at = now + addMs;
77
+ await this.ctx.storage.put(id, task);
78
+ await this.ctx.storage.setAlarm(task.execute_at);
79
+ }
80
+ } else {
81
+ task.status = 'completed';
82
+ await this.ctx.storage.put(id, task);
83
+ }
84
+ }
85
+ } catch (e) {
86
+ console.error("[Shadow DO] Task execution failed locally:", e);
87
+ }
88
+ }
89
+ }
90
+ }
91
+ }
92
+
93
+ export class LocalChannelRoomDO extends DurableObject {
94
+ constructor(ctx, env) {
95
+ super(ctx, env);
96
+ }
97
+
98
+ async fetch(request) {
99
+ const url = new URL(request.url);
100
+
101
+ if (request.method === "POST" && url.pathname === "/broadcast") {
102
+ const body = await request.json();
103
+ const websockets = this.ctx.getWebSockets();
104
+ for (const ws of websockets) {
105
+ ws.send(JSON.stringify(body));
106
+ }
107
+ return new Response("OK");
108
+ }
109
+
110
+ if (request.headers.get("Upgrade") === "websocket") {
111
+ const pair = new WebSocketPair();
112
+ const [client, server] = Object.values(pair);
113
+ this.ctx.acceptWebSocket(server);
114
+ return new Response(null, { status: 101, webSocket: client });
115
+ }
116
+
117
+ return new Response("Not Found", { status: 404 });
118
+ }
119
+
120
+ webSocketMessage(ws, message) {
121
+ this.ctx.getWebSockets().forEach((sock) => {
122
+ if (sock !== ws) {
123
+ sock.send(message);
124
+ }
125
+ });
126
+ }
127
+
128
+ webSocketClose(ws, code, reason, wasClean) {
129
+ ws.close(code, reason);
130
+ }
131
+ }
132
+
133
+ export default {
134
+ async fetch() {
135
+ return new Response("Shadow DO Platform Engine Internal Worker", { status: 200 });
136
+ }
137
+ };
@@ -0,0 +1,151 @@
1
+ export const generateShim = (outFileUrl, workspaceId, projectId, functionSlug) => `
2
+ import * as UserCode from '${outFileUrl}';
3
+
4
+ export default {
5
+ async fetch(req, env, ctx) {
6
+ // Isolate the global KV
7
+ const { __RAW_KV, ...safeEnv } = env;
8
+ const KV_PREFIX = \`${workspaceId}:${projectId}:\`;
9
+
10
+ // 1. Air-Gapped Virtual KV Multiplexer
11
+ const virtualKV = __RAW_KV ? {
12
+ get: (key, options) => __RAW_KV.get(KV_PREFIX + key, options),
13
+ getWithMetadata: (key, options) => __RAW_KV.getWithMetadata(KV_PREFIX + key, options),
14
+ put: (key, value, options) => __RAW_KV.put(KV_PREFIX + key, value, options),
15
+ delete: (key) => __RAW_KV.delete(KV_PREFIX + key),
16
+ list: async (options = {}) => {
17
+ const mergedOptions = { ...options, prefix: KV_PREFIX + (options.prefix || "") };
18
+ const result = await __RAW_KV.list(mergedOptions);
19
+ return {
20
+ ...result,
21
+ keys: result.keys.map(k => ({ ...k, name: k.name.substring(KV_PREFIX.length) }))
22
+ };
23
+ }
24
+ } : undefined;
25
+
26
+ // 2. Custom Platform Bindings internally mapped to Shadow DO instances
27
+ const Tasks = {
28
+ schedule: async (time, name, payload) => {
29
+ let addMs = 0;
30
+ const match = time.toString().match(/^(\\d+)([smhd])$/);
31
+ if (match) {
32
+ const val = parseInt(match[1]);
33
+ const unit = match[2];
34
+ if (unit === 's') addMs = val * 1000;
35
+ else if (unit === 'm') addMs = val * 60000;
36
+ else if (unit === 'h') addMs = val * 3600000;
37
+ else if (unit === 'd') addMs = val * 86400000;
38
+ } else {
39
+ addMs = parseInt(time) || 0;
40
+ }
41
+
42
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m 🕒 Tasks.schedule: [\${name}] at \${time}\`);
43
+
44
+ const req = new Request("http://platform/.internal/tasks/schedule", {
45
+ method: "POST", body: JSON.stringify({
46
+ functionSlug: "${functionSlug}",
47
+ executeAt: Date.now() + addMs,
48
+ name,
49
+ payload,
50
+ workspaceId: "${workspaceId}",
51
+ projectId: "${projectId}"
52
+ })
53
+ });
54
+ const stub = env.TASK_DO.idFromName("${workspaceId}:${projectId}");
55
+ await env.TASK_DO.get(stub).fetch(req);
56
+
57
+ return { success: true };
58
+ },
59
+ every: async (interval, name, payload) => {
60
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m 🔄 Tasks.every: [\${name}] every \${interval}\`);
61
+
62
+ const req = new Request("http://platform/.internal/tasks/schedule", {
63
+ method: "POST", body: JSON.stringify({
64
+ functionSlug: "${functionSlug}",
65
+ executeAt: Date.now(),
66
+ name,
67
+ payload,
68
+ workspaceId: "${workspaceId}",
69
+ projectId: "${projectId}",
70
+ interval
71
+ })
72
+ });
73
+ const stub = env.TASK_DO.idFromName("${workspaceId}:${projectId}");
74
+ await env.TASK_DO.get(stub).fetch(req);
75
+
76
+ return { success: true };
77
+ }
78
+ };
79
+
80
+ const Channels = {
81
+ accept: async (roomName, metadata) => {
82
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m 🔌 Channels.accept: [\${roomName}]\`, metadata);
83
+ const req = new Request("http://platform/.internal", { headers: { Upgrade: "websocket" }});
84
+ const stub = env.CHANNEL_DO.idFromName(roomName);
85
+ return await env.CHANNEL_DO.get(stub).fetch(req);
86
+ },
87
+ broadcast: async (roomName, payload) => {
88
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m 📡 Channels.broadcast: [\${roomName}]\`, payload);
89
+ const req = new Request("http://platform/broadcast", { method: "POST", body: JSON.stringify(payload) });
90
+ const stub = env.CHANNEL_DO.idFromName(roomName);
91
+ await env.CHANNEL_DO.get(stub).fetch(req);
92
+ }
93
+ };
94
+
95
+ const Discord = {
96
+ verify: async (request, publicKeyHex) => {
97
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m 👾 Discord.verify: Payload spoofed locally as True\`);
98
+ return {}; // Truthy mock return
99
+ }
100
+ };
101
+
102
+ const Slack = {
103
+ verify: async (request, signingSecret) => {
104
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m 💬 Slack.verify: Payload spoofed locally as True\`);
105
+ return {}; // Truthy mock return
106
+ }
107
+ };
108
+
109
+ // 3. Hydrate exact customEnv definition expected by the tenant code
110
+ const customEnv = {
111
+ ...safeEnv,
112
+ KV: virtualKV,
113
+ Tasks,
114
+ Channels,
115
+ Discord,
116
+ Slack
117
+ };
118
+
119
+ // 4. Secure boundary execution
120
+ try {
121
+ const url = new URL(req.url);
122
+
123
+ // Native Mock Webhook Interception for Shadow DO
124
+ if (req.method === "POST" && url.pathname === "/.internal/tasks/dispatch" && req.headers.get("x-vm-system-signature") === "local-bypass") {
125
+ const payloadBody = await req.json();
126
+ const taskHandler = UserCode.default?.tasks || UserCode.tasks;
127
+ if (taskHandler) {
128
+ console.log(\`\\x1b[36m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m ⚡ Executing Scheduled Task: [\${payloadBody.name}]\`);
129
+ await taskHandler(payloadBody.payload, customEnv, ctx);
130
+ return new Response("OK");
131
+ } else {
132
+ console.warn(\`\\x1b[33m[\${env.PROJECT_ID || '${projectId}'}]\\x1b[0m ⚠️ Warning: Task '[\${payloadBody.name}]' dispatched, but no exported 'tasks()' handler was found.\`);
133
+ return new Response("No tasks handler", { status: 404 });
134
+ }
135
+ }
136
+
137
+ const handler = UserCode.default?.fetch || UserCode.fetch;
138
+ if (!handler) {
139
+ throw new Error("Application does not export a 'fetch' handler.");
140
+ }
141
+ return await handler(req, customEnv, ctx);
142
+ } catch (err) {
143
+ console.error("\\x1b[31m[Runtime Crash]\\x1b[0m", err);
144
+ return new Response(
145
+ JSON.stringify({ error: "Runtime Error", message: err.message, stack: err.stack }),
146
+ { status: 500, headers: { "Content-Type": "application/json" } }
147
+ );
148
+ }
149
+ }
150
+ };
151
+ `;
@@ -0,0 +1,198 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { spawn, execSync } from 'child_process';
4
+ import { describe, it, expect, beforeAll, afterAll } from 'vitest';
5
+ import { fileURLToPath } from 'url';
6
+
7
+ const __filename = fileURLToPath(import.meta.url);
8
+ const __dirname = path.dirname(__filename);
9
+
10
+ const CLI_PATH = path.resolve(__dirname, '../src/cli.js');
11
+ const FIXTURE_DIR = path.resolve(__dirname, 'fixtures/tmp-app');
12
+
13
+ const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
14
+
15
+ describe('VM CLI Workspace Scaffold', () => {
16
+ beforeAll(() => {
17
+ if (fs.existsSync(FIXTURE_DIR)) {
18
+ fs.rmSync(FIXTURE_DIR, { recursive: true, force: true });
19
+ }
20
+ });
21
+
22
+ afterAll(() => {
23
+ if (fs.existsSync(FIXTURE_DIR)) {
24
+ fs.rmSync(FIXTURE_DIR, { recursive: true, force: true });
25
+ }
26
+ });
27
+
28
+ it('1. should run "vm init" and dynamically scaffold the workspace', () => {
29
+ fs.mkdirSync(FIXTURE_DIR, { recursive: true });
30
+
31
+ // Execute init
32
+ execSync(`node ${CLI_PATH} init`, { cwd: FIXTURE_DIR });
33
+
34
+ // Assert files are created
35
+ expect(fs.existsSync(path.join(FIXTURE_DIR, 'vm.json'))).toBe(true);
36
+ expect(fs.existsSync(path.join(FIXTURE_DIR, 'index.ts'))).toBe(true);
37
+
38
+ const vmJson = JSON.parse(fs.readFileSync(path.join(FIXTURE_DIR, 'vm.json'), 'utf8'));
39
+ expect(vmJson.workspaceId).toBe('ws_123');
40
+ });
41
+ });
42
+
43
+ describe('VM CLI Emulator Bindings', () => {
44
+ let devProcess;
45
+ let logBuffer = "";
46
+
47
+ beforeAll(async () => {
48
+ // 1. Prepare Workspace by calling the native scaffold directly
49
+ if (!fs.existsSync(FIXTURE_DIR)) fs.mkdirSync(FIXTURE_DIR, { recursive: true });
50
+ execSync(`node ${CLI_PATH} init`, { cwd: FIXTURE_DIR });
51
+
52
+ // Ensure we specifically override the workspaceId for test predictability
53
+ const vmJson = JSON.parse(fs.readFileSync(path.join(FIXTURE_DIR, 'vm.json'), 'utf8'));
54
+ vmJson.workspaceId = 'ws_123';
55
+ vmJson.projectId = 'demo-project';
56
+ fs.writeFileSync(path.join(FIXTURE_DIR, 'vm.json'), JSON.stringify(vmJson, null, 2));
57
+
58
+ // 2. Overwrite the boilerplate index.ts with a massive dynamic router
59
+ const customApiTs = `export default {
60
+ async fetch(req, env) {
61
+ const url = new URL(req.url);
62
+ if (url.pathname === '/tasks/schedule') {
63
+ await env.Tasks.schedule("5m", "test_sync", { data: 1 });
64
+ return new Response("OK");
65
+ }
66
+ if (url.pathname === '/tasks/every') {
67
+ await env.Tasks.every("1h", "test_cron", { data: 2 });
68
+ return new Response("OK");
69
+ }
70
+ if (url.pathname === '/channels/accept') {
71
+ return await env.Channels.accept("lobby", { pub: true });
72
+ }
73
+ if (url.pathname === '/channels/broadcast') {
74
+ await env.Channels.broadcast("lobby", { msg: "hello" });
75
+ return new Response("OK");
76
+ }
77
+ if (url.pathname === '/discord') {
78
+ const res = await env.Discord.verify(req, "hex_key");
79
+ return new Response(JSON.stringify({ verified: !!res }));
80
+ }
81
+ if (url.pathname === '/slack') {
82
+ const res = await env.Slack.verify(req, "secret_key");
83
+ return new Response(JSON.stringify({ verified: !!res }));
84
+ }
85
+ if (url.pathname === '/kv') {
86
+ await env.KV.put("test_key", "test_val");
87
+ const keys = await env.KV.list({});
88
+ return new Response(JSON.stringify(keys), { headers: { "Content-Type": "application/json" } });
89
+ }
90
+ return new Response("Not Found", { status: 404 });
91
+ }
92
+ };`;
93
+
94
+ fs.writeFileSync(path.join(FIXTURE_DIR, 'index.ts'), customApiTs);
95
+
96
+ // 3. Boot dev process natively and intercept STDOUT stream
97
+ devProcess = spawn('node', [CLI_PATH, 'dev'], {
98
+ cwd: FIXTURE_DIR,
99
+ env: { ...process.env, PORT: "8799" }
100
+ });
101
+
102
+ await new Promise((resolve, reject) => {
103
+ let isReady = false;
104
+ const timeout = setTimeout(() => {
105
+ if (!isReady) {
106
+ devProcess.kill();
107
+ reject(new Error('CLI Dev server boot timeout.'));
108
+ }
109
+ }, 25000);
110
+
111
+ devProcess.stdout.on('data', (data) => {
112
+ const out = data.toString();
113
+ logBuffer += out;
114
+ if (out.includes('Extensible Serverless VM running')) {
115
+ isReady = true;
116
+ clearTimeout(timeout);
117
+ resolve(true);
118
+ }
119
+ });
120
+ devProcess.stderr.on('data', (data) => {
121
+ console.error("CLI STDERR:", data.toString());
122
+ });
123
+ });
124
+
125
+ // We must wait just a bit for Miniflare HTTP sockets to actually map
126
+ await sleep(500);
127
+ }, 30000); // Extended timeout to compile the shim
128
+
129
+ afterAll(() => {
130
+ if (devProcess) {
131
+ devProcess.kill('SIGINT');
132
+ }
133
+ if (fs.existsSync(FIXTURE_DIR)) {
134
+ fs.rmSync(FIXTURE_DIR, { recursive: true, force: true });
135
+ }
136
+ });
137
+
138
+
139
+ // =========================================================================
140
+ // SEPARATED BINDING TESTS
141
+ // =========================================================================
142
+
143
+ it('should completely mock Tasks.schedule and trigger the STDOUT array', async () => {
144
+ await fetch('http://127.0.0.1:8799/tasks/schedule', { headers: { "Host": "api.localhost" } });
145
+ await sleep(100);
146
+ expect(logBuffer).toContain("Tasks.schedule: [test_sync] at 5m");
147
+ });
148
+
149
+ it('should beautifully mock Tasks.every recursively with STDOUT matching', async () => {
150
+ await fetch('http://127.0.0.1:8799/tasks/every', { headers: { "Host": "api.localhost" } });
151
+ await sleep(100);
152
+ expect(logBuffer).toContain("Tasks.every: [test_cron] every 1h");
153
+ });
154
+
155
+ it('should intercept Channels.accept and return an HTTP 101 WebSocket Upgrade packet', async () => {
156
+ try {
157
+ await fetch('http://127.0.0.1:8799/channels/accept', {
158
+ headers: { "Host": "api.localhost" }
159
+ });
160
+ } catch (e) {
161
+ // Node's native fetch (undici) explicitly throws when receiving HTTP 101
162
+ }
163
+ await sleep(100);
164
+ expect(logBuffer).toContain("Channels.accept: [lobby]");
165
+ });
166
+
167
+ it('should fire Channels.broadcast silently mapping via STDOUT', async () => {
168
+ await fetch('http://127.0.0.1:8799/channels/broadcast', { headers: { "Host": "api.localhost" } });
169
+ await sleep(100);
170
+ expect(logBuffer).toContain("Channels.broadcast: [lobby]");
171
+ });
172
+
173
+ it('should safely spoof Discord.verify logic via simple WebCrypto circumvention', async () => {
174
+ const discordRes = await fetch('http://127.0.0.1:8799/discord', { headers: { "Host": "api.localhost" } });
175
+ const json = JSON.parse(await discordRes.text());
176
+ expect(json.verified).toBe(true);
177
+ await sleep(100);
178
+ expect(logBuffer).toContain("Discord.verify: Payload spoofed locally as True");
179
+ });
180
+
181
+ it('should strictly bypass Slack HMAC logic locally', async () => {
182
+ const slackRes = await fetch('http://127.0.0.1:8799/slack', { headers: { "Host": "api.localhost" } });
183
+ const json = JSON.parse(await slackRes.text());
184
+ expect(json.verified).toBe(true);
185
+ await sleep(100);
186
+ expect(logBuffer).toContain("Slack.verify: Payload spoofed locally as True");
187
+ });
188
+
189
+ it('should strip boundary prefixes mathematically from Virtual KV arrays', async () => {
190
+ const kvResponse = await fetch('http://127.0.0.1:8799/kv', { headers: { "Host": "api.localhost" } });
191
+ const kvJson = JSON.parse(await kvResponse.text());
192
+
193
+ expect(kvJson.keys).toBeDefined();
194
+ expect(kvJson.keys.length).toBeGreaterThan(0);
195
+ // Crucially asserts we natively stripped workspaceId:projectId:
196
+ expect(kvJson.keys[0].name).toBe('test_key');
197
+ });
198
+ });