devflare 1.0.0-next.20 → 1.0.0-next.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LLM.md +9 -4
- package/README.md +10 -2
- package/dist/bridge/gateway-runtime.d.ts +1 -1
- package/dist/bridge/gateway-runtime.d.ts.map +1 -1
- package/dist/bridge/proxy.d.ts +2 -0
- package/dist/bridge/proxy.d.ts.map +1 -1
- package/dist/bridge/server.d.ts.map +1 -1
- package/dist/browser.js +3 -3
- package/dist/build-b1z6wqet.js +54 -0
- package/dist/build-qsgnme4z.js +54 -0
- package/dist/build-x7maz3eb.js +54 -0
- package/dist/cli/commands/config.d.ts.map +1 -1
- package/dist/cli/commands/dev.d.ts +1 -0
- package/dist/cli/commands/dev.d.ts.map +1 -1
- package/dist/cli/commands/doctor.d.ts.map +1 -1
- package/dist/cli/help-pages/pages/core.d.ts.map +1 -1
- package/dist/cli/index.js +1 -1
- package/dist/config-qj5jw8km.js +93 -0
- package/dist/deploy-jf3yczsz.js +1055 -0
- package/dist/deploy-nh5tbv45.js +1055 -0
- package/dist/deploy-xqm869nf.js +1055 -0
- package/dist/dev-bgpxrwms.js +2551 -0
- package/dist/dev-cme5de75.js +2551 -0
- package/dist/dev-kzs65xcr.js +2551 -0
- package/dist/dev-server/dev-server-state.d.ts +2 -0
- package/dist/dev-server/dev-server-state.d.ts.map +1 -1
- package/dist/dev-server/miniflare-dev-config.d.ts +4 -0
- package/dist/dev-server/miniflare-dev-config.d.ts.map +1 -1
- package/dist/dev-server/server.d.ts.map +1 -1
- package/dist/dev-zgx7fhe9.js +2553 -0
- package/dist/doctor-0a2brpyz.js +259 -0
- package/dist/index-05pbj4hy.js +1193 -0
- package/dist/index-35bmgpfw.js +573 -0
- package/dist/index-3edvz3hs.js +124 -0
- package/dist/index-4se6krdj.js +574 -0
- package/dist/index-50em8s6c.js +898 -0
- package/dist/index-666tdx14.js +895 -0
- package/dist/index-8p7rxkbs.js +1426 -0
- package/dist/index-aqrwyy57.js +288 -0
- package/dist/index-bj5avaba.js +109 -0
- package/dist/index-c1cj9085.js +2250 -0
- package/dist/index-dgww0ewn.js +574 -0
- package/dist/index-f1yshy4s.js +412 -0
- package/dist/index-hbxkmb1q.js +1426 -0
- package/dist/index-hpwa6vsw.js +239 -0
- package/dist/index-jwd3fanx.js +412 -0
- package/dist/index-kxc4gtyt.js +574 -0
- package/dist/index-nxkesg55.js +68 -0
- package/dist/index-p7q23nce.js +1031 -0
- package/dist/index-pt49cgjv.js +1426 -0
- package/dist/index-rp0aye39.js +1426 -0
- package/dist/index-s9q605sq.js +1033 -0
- package/dist/index-tknbyxzn.js +2202 -0
- package/dist/index-w36q6819.js +895 -0
- package/dist/index-xp0qkkxf.js +68 -0
- package/dist/index-zawn5tte.js +109 -0
- package/dist/index-zpy9caxn.js +1193 -0
- package/dist/index.js +4 -4
- package/dist/runtime/index.js +4 -4
- package/dist/sveltekit/index.js +5 -4
- package/dist/sveltekit/local-bindings.d.ts.map +1 -1
- package/dist/test/index.js +62 -440
- package/dist/test/resolve-service-bindings.d.ts +63 -3
- package/dist/test/resolve-service-bindings.d.ts.map +1 -1
- package/dist/types-vhvt4hvm.js +693 -0
- package/dist/utils/send-email.d.ts.map +1 -1
- package/dist/utils/send-email.js +1 -1
- package/dist/vite/index.js +4 -3
- package/dist/vite/plugin-context.d.ts +3 -1
- package/dist/vite/plugin-context.d.ts.map +1 -1
- package/dist/vite/plugin-programmatic.d.ts.map +1 -1
- package/dist/vite/plugin-service-bindings.d.ts +13 -0
- package/dist/vite/plugin-service-bindings.d.ts.map +1 -0
- package/dist/vite/plugin.d.ts +4 -2
- package/dist/vite/plugin.d.ts.map +1 -1
- package/dist/worker-entrypoint-3rmzd4c1.js +15 -0
- package/package.json +1 -1
package/LLM.md
CHANGED
|
@@ -1278,12 +1278,12 @@ bunx --bun devflare productions rollback --help
|
|
|
1278
1278
|
| Command | Primary job | What the deeper help covers |
|
|
1279
1279
|
| --- | --- | --- |
|
|
1280
1280
|
| `init` | Scaffold a new package. | Template choice and generated starter scripts. |
|
|
1281
|
-
| `dev` | Start local development. | Worker-only defaults, Vite auto-detection, logging, and persistence. |
|
|
1281
|
+
| `dev` | Start local development. | Worker-only defaults, Vite auto-detection, `ref()` service workers, runtime-port selection, logging, and persistence. |
|
|
1282
1282
|
| `build` | Compile deploy-ready artifacts. | Environment resolution and Wrangler-facing output. |
|
|
1283
1283
|
| `deploy` | Ship explicitly to production or preview. | Target selection, dry runs, preview naming, messages, and tags. |
|
|
1284
1284
|
| `types` | Generate `env.d.ts` and typed bindings. | Custom output paths plus entrypoint and Durable Object discovery. |
|
|
1285
|
-
| `doctor` | Check local project health. | Config, package, TypeScript, Vite,
|
|
1286
|
-
| `config` | Print resolved config. | `print`, raw Devflare JSON,
|
|
1285
|
+
| `doctor` | Check local project health. | Config, package, TypeScript, Vite, scope-aware local/deploy artifact diagnostics, and optional plugin guidance. |
|
|
1286
|
+
| `config` | Print resolved config. | `print`, raw Devflare JSON, compiled Wrangler JSON, and build/local/deploy resolution phases. |
|
|
1287
1287
|
| `account` | Inspect Cloudflare account inventories and limits. | Resource lists, usage limits, and interactive global/workspace selection. |
|
|
1288
1288
|
| `login` | Authenticate with Cloudflare via Wrangler. | `--force` behavior and reuse of existing sessions. |
|
|
1289
1289
|
| `previews` | Operate on preview lifecycle state. | `list`, `bindings`, and `cleanup`. |
|
|
@@ -1352,7 +1352,11 @@ When the job changes from building to operating, switch command families instead
|
|
|
1352
1352
|
##### Key points
|
|
1353
1353
|
|
|
1354
1354
|
- Run `types` after binding or entrypoint changes so `env.d.ts` stays honest.
|
|
1355
|
+
- Use `dev --runtime-port <port>` or `DEVFLARE_RUNTIME_PORT` when another local project already owns the default 8787 runtime port.
|
|
1356
|
+
- Use `config --phase local --format wrangler` when you want local config inspection without Cloudflare account lookups.
|
|
1357
|
+
- Use `ref()` service bindings for local full-stack packages; Devflare starts those referenced workers in CLI dev and exposes them as Vite auxiliary workers for framework dev.
|
|
1355
1358
|
- Run `build` or `config print --format wrangler` when the compiled shape matters more than the dev server feeling healthy.
|
|
1359
|
+
- Use `doctor --scope local` when generated deploy artifacts are intentionally absent during a local-only loop.
|
|
1356
1360
|
- Keep preview and production intent explicit in the final deploy command instead of hiding it in a generic script name.
|
|
1357
1361
|
- Use the nested help pages when a lifecycle command reaches `--apply`, account selection, rollback, or cleanup territory.
|
|
1358
1362
|
|
|
@@ -1389,7 +1393,8 @@ bunx --bun devflare deploy --prod
|
|
|
1389
1393
|
|
|
1390
1394
|
```bash
|
|
1391
1395
|
bunx --bun devflare config print --format wrangler
|
|
1392
|
-
bunx --bun devflare
|
|
1396
|
+
bunx --bun devflare config --phase local --format wrangler
|
|
1397
|
+
bunx --bun devflare doctor --scope local
|
|
1393
1398
|
bunx --bun devflare previews bindings --scope next
|
|
1394
1399
|
bunx --bun devflare productions versions
|
|
1395
1400
|
```
|
package/README.md
CHANGED
|
@@ -17,10 +17,11 @@ For a worker-only project, install only Devflare:
|
|
|
17
17
|
bun add -d devflare
|
|
18
18
|
```
|
|
19
19
|
|
|
20
|
-
For Vite-backed apps, add Vite
|
|
20
|
+
For Vite-backed apps, add Vite. Add the Cloudflare Vite plugin only when your
|
|
21
|
+
own `vite.config.*` calls it directly:
|
|
21
22
|
|
|
22
23
|
```bash
|
|
23
|
-
bun add -d devflare vite
|
|
24
|
+
bun add -d devflare vite
|
|
24
25
|
```
|
|
25
26
|
|
|
26
27
|
Assumptions used by the examples: Wrangler 4, Miniflare 4,
|
|
@@ -182,6 +183,13 @@ for examples with file paths.
|
|
|
182
183
|
| `devflare version` | print the installed version |
|
|
183
184
|
| `devflare worker` | run Worker control-plane helpers |
|
|
184
185
|
|
|
186
|
+
Useful local-first switches:
|
|
187
|
+
|
|
188
|
+
- `devflare dev --runtime-port 8788` or `DEVFLARE_RUNTIME_PORT=8788` moves the local Miniflare runtime/bridge off the default `127.0.0.1:8787`. `--bridge-port` and `DEVFLARE_BRIDGE_PORT` are aliases for the same runtime port.
|
|
189
|
+
- `devflare doctor --scope local` skips deploy-readiness artifact warnings during a local-only loop.
|
|
190
|
+
- `devflare config --phase local --format wrangler` prints the local-runtime Wrangler shape without Cloudflare account resource lookups.
|
|
191
|
+
- `devflare dev` and `devflare/vite` start `ref()` service-binding workers inside the same local runtime, including their local KV, D1, R2, Queue producer/consumer, vars, Durable Objects, and other Miniflare-backed bindings.
|
|
192
|
+
|
|
185
193
|
## Support Stance Index
|
|
186
194
|
|
|
187
195
|
The full support matrix is in `/docs/feature-index`. The short version is:
|
|
@@ -4,5 +4,5 @@
|
|
|
4
4
|
* module. All symbols are declared with `function`/`const` so they are
|
|
5
5
|
* hoisted in both embedding sites.
|
|
6
6
|
*/
|
|
7
|
-
export declare const GATEWAY_RUNTIME_JS = "\nconst RAW_EMAIL = 'EmailMessage::raw'\n\nfunction arrayBufferToBase64(buffer) {\n\tconst bytes = new Uint8Array(buffer)\n\tlet binary = ''\n\tfor (let i = 0; i < bytes.byteLength; i++) binary += String.fromCharCode(bytes[i])\n\treturn btoa(binary)\n}\n\nfunction base64ToArrayBuffer(base64) {\n\tconst binary = atob(base64)\n\tconst bytes = new Uint8Array(binary.length)\n\tfor (let i = 0; i < binary.length; i++) bytes[i] = binary.charCodeAt(i)\n\treturn bytes.buffer\n}\n\nfunction serializeR2Object(obj) {\n\tif (!obj) return null\n\treturn {\n\t\t__type: 'R2Object',\n\t\tkey: obj.key,\n\t\tversion: obj.version,\n\t\tsize: obj.size,\n\t\tetag: obj.etag,\n\t\thttpEtag: obj.httpEtag,\n\t\tchecksums: obj.checksums,\n\t\tuploaded: obj.uploaded?.toISOString(),\n\t\thttpMetadata: obj.httpMetadata,\n\t\tcustomMetadata: obj.customMetadata,\n\t\trange: obj.range,\n\t\tstorageClass: obj.storageClass\n\t}\n}\n\nfunction serializeR2ObjectBody(obj, bodyData) {\n\tif (!obj) return null\n\treturn {\n\t\t__type: 'R2ObjectBody',\n\t\tkey: obj.key,\n\t\tversion: obj.version,\n\t\tsize: obj.size,\n\t\tetag: obj.etag,\n\t\thttpEtag: obj.httpEtag,\n\t\tchecksums: obj.checksums,\n\t\tuploaded: obj.uploaded?.toISOString(),\n\t\thttpMetadata: obj.httpMetadata,\n\t\tcustomMetadata: obj.customMetadata,\n\t\trange: obj.range,\n\t\tstorageClass: obj.storageClass,\n\t\tbodyData\n\t}\n}\n\nfunction serializeR2Objects(result) {\n\tif (!result) return null\n\treturn {\n\t\tobjects: result.objects.map(serializeR2Object),\n\t\ttruncated: result.truncated,\n\t\tcursor: result.cursor,\n\t\tdelimitedPrefixes: result.delimitedPrefixes\n\t}\n}\n\nasync function serializeResponse(response) {\n\tlet body = null\n\tif (response.body) {\n\t\tconst bytes = await response.arrayBuffer()\n\t\tif (bytes.byteLength > 0) {\n\t\t\tbody = { type: 'bytes', data: arrayBufferToBase64(bytes) }\n\t\t}\n\t}\n\treturn {\n\t\tstatus: response.status,\n\t\tstatusText: response.statusText,\n\t\theaders: [...response.headers.entries()],\n\t\tbody\n\t}\n}\n\nfunction createEmailMessageRaw(raw) {\n\tif (typeof raw === 'string' || raw instanceof ReadableStream) {\n\t\treturn raw\n\t}\n\tif (raw instanceof ArrayBuffer || raw instanceof Uint8Array) {\n\t\treturn new Response(raw).body\n\t}\n\tthrow new Error('Unsupported EmailMessage raw payload')\n}\n\nfunction isDurableObjectNamespace(binding) {\n\treturn !!binding\n\t\t&& typeof binding.idFromName === 'function'\n\t\t&& typeof binding.idFromString === 'function'\n\t\t&& typeof binding.newUniqueId === 'function'\n}\n\n/**\n * Execute an RPC method against the gateway's bindings.\n *\n * Method format: \"binding.operation\". Operations must be namespaced by\n * binding kind (e.g. \"kv.get\", \"r2.head\", \"d1.stmt.first\", \"do.fetch\",\n * \"queue.send\", \"email.send\", \"ai.run\"). Bare verbs and the legacy\n * \"stmt.*\" / \"stub.*\" sub-prefixes were removed in B3-final and now throw.\n * Method vocabulary must stay in sync with the canonical server in\n * src/bridge/server.ts.\n */\nasync function executeRpcMethod(method, params, env, _ctx) {\n\tconst parts = method.split('.')\n\tif (parts.length < 2) throw new Error('Invalid method format: ' + method)\n\n\tconst bindingName = parts[0]\n\tconst operation = parts.slice(1).join('.')\n\tconst binding = env[bindingName]\n\n\tif (!binding) throw new Error('Binding not found: ' + bindingName)\n\n\tconst isNamespaced =\n\t\toperation.indexOf('kv.') === 0 ||\n\t\toperation.indexOf('r2.') === 0 ||\n\t\toperation.indexOf('d1.') === 0 ||\n\t\toperation.indexOf('do.') === 0 ||\n\t\toperation.indexOf('queue.') === 0 ||\n\t\toperation.indexOf('email.') === 0 ||\n\t\toperation.indexOf('ai.') === 0 ||\n\t\toperation.indexOf('workflow.') === 0 ||\n\t\toperation.indexOf('var.') === 0\n\tif (!isNamespaced) {\n\t\tthrow new Error(\n\t\t\t\"[devflare][bridge] Unsupported bridge operation '\" + operation + \"' for binding '\" + bindingName + \"'. \"\n\t\t\t+ \"Bare verbs and the legacy stmt.*/stub.* sub-prefixes were removed in B3-final; \"\n\t\t\t+ \"use the namespaced form (e.g. kv.get, r2.put, d1.stmt.first, do.fetch).\"\n\t\t)\n\t}\n\n\t// KV\n\tif (operation === 'kv.get') return binding.get(params[0], params[1])\n\tif (operation === 'kv.put') return binding.put(params[0], params[1], params[2])\n\tif (operation === 'kv.delete') return binding.delete(params[0])\n\tif (operation === 'kv.list') return binding.list(params[0])\n\tif (operation === 'kv.getWithMetadata') return binding.getWithMetadata(params[0], params[1])\n\n\t// DO get (returns DOStub reference)\n\tif (operation === 'do.get') {\n\t\treturn { __type: 'DOStub', binding: bindingName, id: params[0] }\n\t}\n\n\t// R2\n\tif (operation === 'r2.head') return serializeR2Object(await binding.head(params[0]))\n\tif (operation === 'r2.get') {\n\t\tconst obj = await binding.get(params[0], params[1])\n\t\tif (!obj) return null\n\t\tconst body = await obj.arrayBuffer()\n\t\treturn serializeR2ObjectBody(obj, arrayBufferToBase64(body))\n\t}\n\tif (operation === 'r2.put') {\n\t\tlet value = params[1]\n\t\tif (value && typeof value === 'object') {\n\t\t\tif (value.__type === 'ArrayBuffer' || value.__type === 'Uint8Array') {\n\t\t\t\tvalue = base64ToArrayBuffer(value.data)\n\t\t\t}\n\t\t}\n\t\treturn serializeR2Object(await binding.put(params[0], value, params[2]))\n\t}\n\tif (operation === 'r2.delete') return binding.delete(params[0])\n\tif (operation === 'r2.list') return serializeR2Objects(await binding.list(params[0]))\n\n\t// D1\n\tif (operation === 'd1.exec') return binding.exec(params[0])\n\tif (operation === 'd1.batch') {\n\t\tconst statements = params[0].map((s) => binding.prepare(s.sql).bind(...(s.bindings || [])))\n\t\treturn binding.batch(statements)\n\t}\n\tif (operation.indexOf('d1.stmt.') === 0) {\n\t\tconst mode = operation.split('.')[2]\n\t\tconst [sql, ...rest] = params\n\t\tlet bindings = rest\n\t\tlet extraParam\n\t\tif (mode === 'first' || mode === 'raw') {\n\t\t\textraParam = rest[rest.length - 1]\n\t\t\tbindings = rest.slice(0, -1)\n\t\t}\n\t\tlet stmt = binding.prepare(sql)\n\t\tif (bindings.length > 0) stmt = stmt.bind(...bindings)\n\t\tif (mode === 'first') {\n\t\t\tif (typeof extraParam === 'string' && extraParam.length > 0) return stmt.first(extraParam)\n\t\t\treturn stmt.first()\n\t\t}\n\t\tif (mode === 'all') return stmt.all()\n\t\tif (mode === 'run') return stmt.run()\n\t\tif (mode === 'raw') return stmt.raw(extraParam)\n\t\tthrow new Error('Unknown stmt mode: ' + mode)\n\t}\n\n\t// Durable Objects\n\tif (operation === 'do.idFromName') {\n\t\tconst id = binding.idFromName(params[0])\n\t\treturn { __type: 'DOId', hex: id.toString() }\n\t}\n\tif (operation === 'do.idFromString') {\n\t\tconst id = binding.idFromString(params[0])\n\t\treturn { __type: 'DOId', hex: id.toString() }\n\t}\n\tif (operation === 'do.newUniqueId') {\n\t\tconst id = binding.newUniqueId(params[0])\n\t\treturn { __type: 'DOId', hex: id.toString() }\n\t}\n\tif (operation === 'do.fetch') {\n\t\tconst [, serializedId, serializedReq] = params\n\t\tconst id = binding.idFromString(serializedId.hex)\n\t\tconst stub = binding.get(id)\n\t\tconst response = await stub.fetch(new Request(serializedReq.url, {\n\t\t\tmethod: serializedReq.method,\n\t\t\theaders: serializedReq.headers,\n\t\t\tbody: serializedReq.body?.type === 'bytes'\n\t\t\t\t? base64ToArrayBuffer(serializedReq.body.data)\n\t\t\t\t: undefined\n\t\t}))\n\t\treturn serializeResponse(response)\n\t}\n\tif (operation === 'do.rpc') {\n\t\tconst [, serializedId, methodName, args] = params\n\t\tconst id = binding.idFromString(serializedId.hex)\n\t\tconst stub = binding.get(id)\n\t\tconst response = await stub.fetch(new Request('http://do/_rpc', {\n\t\t\tmethod: 'POST',\n\t\t\theaders: { 'Content-Type': 'application/json' },\n\t\t\tbody: JSON.stringify({ method: methodName, params: args })\n\t\t}))\n\t\tconst result = await response.json()\n\t\tif (!result.ok) throw new Error(result.error?.message || 'RPC failed')\n\t\treturn result.result\n\t}\n\n\t// Queues\n\tif (operation === 'queue.send') return binding.send(params[0], params[1])\n\tif (operation === 'queue.sendBatch') return binding.sendBatch(params[0], params[1])\n\n\t// Send Email\n\tif (operation === 'email.send') {\n\t\tif (binding && typeof binding.send === 'function') {\n\t\t\tconst message = params[0]\n\t\t\tif (message && typeof message === 'object' && 'from' in message && 'to' in message && 'raw' in message) {\n\t\t\t\treturn binding.send({\n\t\t\t\t\tfrom: message.from,\n\t\t\t\t\tto: message.to,\n\t\t\t\t\t[RAW_EMAIL]: createEmailMessageRaw(message.raw)\n\t\t\t\t})\n\t\t\t}\n\t\t\treturn binding.send(message)\n\t\t}\n\t\treturn { ok: true, simulated: true }\n\t}\n\n\t// Workflows\n\tif (operation === 'workflow.create') {\n\t\treturn serializeWorkflowInstance(await binding.create(params[0]))\n\t}\n\tif (operation === 'workflow.get') {\n\t\treturn serializeWorkflowInstance(await binding.get(params[0]))\n\t}\n\tif (operation === 'workflow.status') {\n\t\treturn (await binding.get(params[0])).status()\n\t}\n\tif (operation === 'workflow.pause') {\n\t\treturn (await binding.get(params[0])).pause()\n\t}\n\tif (operation === 'workflow.resume') {\n\t\treturn (await binding.get(params[0])).resume()\n\t}\n\tif (operation === 'workflow.terminate') {\n\t\treturn (await binding.get(params[0])).terminate()\n\t}\n\tif (operation === 'workflow.restart') {\n\t\treturn (await binding.get(params[0])).restart()\n\t}\n\tif (operation === 'workflow.sendEvent') {\n\t\treturn (await binding.get(params[0])).sendEvent(params[1])\n\t}\n\n\t// AI / generic run()\n\tif (operation === 'ai.run') {\n\t\tif (typeof binding.run !== 'function') {\n\t\t\tthrow new Error('Binding ' + bindingName + ' does not support run(): ' + method)\n\t\t}\n\t\treturn binding.run(params[0], params[1])\n\t}\n\n\tthrow new Error('Unknown operation: ' + method)\n}\n\nfunction serializeWorkflowInstance(instance) {\n\treturn {\n\t\t__type: 'WorkflowInstance',\n\t\tid: instance.id\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// WebSocket bridge (shared with src/bridge/server.ts in shape)\n// ---------------------------------------------------------------------------\n// NOTE: wsProxies is intentionally created per handleBridgeWebSocket call so\n// state never leaks across connections or across gateway-script regenerations.\n\nasync function handleBridgeRpcCall(msg, ws, env, ctx) {\n\ttry {\n\t\tconst result = await executeRpcMethod(msg.method, msg.params, env, ctx)\n\t\tws.send(JSON.stringify({ t: 'rpc.ok', id: msg.id, result }))\n\t} catch (error) {\n\t\tws.send(JSON.stringify({\n\t\t\tt: 'rpc.err',\n\t\t\tid: msg.id,\n\t\t\terror: {\n\t\t\t\tcode: error?.code || 'INTERNAL_ERROR',\n\t\t\t\tmessage: error?.message || String(error)\n\t\t\t}\n\t\t}))\n\t}\n}\n\nasync function handleBridgeWsOpen(msg, ws, env, wsProxies) {\n\ttry {\n\t\tconst binding = env[msg.target.binding]\n\t\tconst id = binding.idFromString(msg.target.id)\n\t\tconst stub = binding.get(id)\n\n\t\tconst headers = new Headers(msg.target.headers || [])\n\t\theaders.set('Upgrade', 'websocket')\n\n\t\tconst response = await stub.fetch(new Request(msg.target.url, { method: 'GET', headers }))\n\t\tconst doWs = response.webSocket\n\n\t\tif (!doWs) {\n\t\t\tws.send(JSON.stringify({\n\t\t\t\tt: 'rpc.err',\n\t\t\t\tid: 'ws_' + msg.wid,\n\t\t\t\terror: { code: 'WS_FAILED', message: 'No WebSocket returned' }\n\t\t\t}))\n\t\t\treturn\n\t\t}\n\n\t\tdoWs.accept()\n\t\twsProxies.set(msg.wid, { doWs })\n\n\t\tdoWs.addEventListener('message', (event) => {\n\t\t\tconst isText = typeof event.data === 'string'\n\t\t\tconst data = isText ? event.data : arrayBufferToBase64(event.data)\n\t\t\tws.send(JSON.stringify({ t: 'ws.data', wid: msg.wid, data, isText }))\n\t\t})\n\n\t\tdoWs.addEventListener('close', (event) => {\n\t\t\tws.send(JSON.stringify({ t: 'ws.close', wid: msg.wid, code: event.code, reason: event.reason }))\n\t\t\twsProxies.delete(msg.wid)\n\t\t})\n\n\t\tws.send(JSON.stringify({ t: 'ws.opened', wid: msg.wid }))\n\t} catch (error) {\n\t\tws.send(JSON.stringify({\n\t\t\tt: 'rpc.err',\n\t\t\tid: 'ws_' + msg.wid,\n\t\t\terror: { code: 'WS_FAILED', message: error.message }\n\t\t}))\n\t}\n}\n\nfunction handleBridgeWsClose(msg, wsProxies) {\n\tconst proxy = wsProxies.get(msg.wid)\n\tif (proxy) {\n\t\tproxy.doWs.close(msg.code, msg.reason)\n\t\twsProxies.delete(msg.wid)\n\t}\n}\n\nasync function handleBridgeJsonMessage(data, ws, env, ctx, wsProxies) {\n\tconst msg = JSON.parse(data)\n\tswitch (msg.t) {\n\t\tcase 'hello':\n\t\t\t// v2 handshake \u2014 acknowledge with welcome echoing the negotiated\n\t\t\t// capability intersection. Capabilities advertised by the gateway\n\t\t\t// are kept in sync with src/bridge/client.ts (BRIDGE_CLIENT_CAPABILITIES).\n\t\t\tws.send(JSON.stringify({\n\t\t\t\tt: 'welcome',\n\t\t\t\tprotocolVersion: 2,\n\t\t\t\tcapabilities: ['streams', 'ws-relay', 'http-transfer']\n\t\t\t\t\t.filter((c) => Array.isArray(msg.capabilities) && msg.capabilities.includes(c))\n\t\t\t\t\t.sort()\n\t\t\t}))\n\t\t\tbreak\n\t\tcase 'rpc.call':\n\t\t\tawait handleBridgeRpcCall(msg, ws, env, ctx)\n\t\t\tbreak\n\t\tcase 'ws.open':\n\t\t\tawait handleBridgeWsOpen(msg, ws, env, wsProxies)\n\t\t\tbreak\n\t\tcase 'ws.close':\n\t\t\thandleBridgeWsClose(msg, wsProxies)\n\t\t\tbreak\n\t}\n}\n\nfunction handleBridgeWebSocket(request, env, ctx) {\n\tconst { 0: client, 1: server } = new WebSocketPair()\n\tserver.accept()\n\n\t// Per-connection state: recreated for every bridge client so reloads and\n\t// concurrent clients never share WS proxy entries.\n\tconst wsProxies = new Map()\n\n\tserver.addEventListener('message', async (event) => {\n\t\ttry {\n\t\t\tif (typeof event.data === 'string') {\n\t\t\t\tawait handleBridgeJsonMessage(event.data, server, env, ctx, wsProxies)\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tconsole.error('[Gateway] Error:', error)\n\t\t}\n\t})\n\n\tserver.addEventListener('close', () => {\n\t\tfor (const proxy of wsProxies.values()) {\n\t\t\t// Best-effort cleanup: the DO-side WS may already be closed or in an\n\t\t\t// invalid state; any throw here would abort sibling closes. Surface\n\t\t\t// the swallowed error when DEVFLARE_DEBUG_BRIDGE is enabled.\n\t\t\ttry { proxy.doWs.close() } catch (error) {\n\t\t\t\tif (globalThis.DEVFLARE_DEBUG_BRIDGE) {\n\t\t\t\t\tconsole.warn('[devflare:bridge] proxy.doWs.close() failed', error)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\twsProxies.clear()\n\t})\n\n\treturn new Response(null, { status: 101, webSocket: client })\n}\n\n// ---------------------------------------------------------------------------\n// HTTP transfer for R2 bodies (shared with src/bridge/server.ts in shape)\n// ---------------------------------------------------------------------------\n\nasync function handleHttpTransfer(request, env, url) {\n\tconst transferIdEncoded = url.pathname.split('/').pop()\n\tconst transferId = decodeURIComponent(transferIdEncoded || '')\n\tconst [binding, ...keyParts] = transferId.split(':')\n\tconst key = keyParts.join(':')\n\tconst bucket = env[binding]\n\n\tif (!bucket) return new Response('Bucket not found: ' + binding, { status: 404 })\n\n\tif (request.method === 'PUT' || request.method === 'POST') {\n\t\tconst result = await bucket.put(key, request.body)\n\t\treturn new Response(JSON.stringify(serializeR2Object(result)), {\n\t\t\theaders: { 'Content-Type': 'application/json' }\n\t\t})\n\t}\n\n\tif (request.method === 'GET') {\n\t\tconst object = await bucket.get(key)\n\t\tif (!object) return new Response('Not found', { status: 404 })\n\t\treturn new Response(object.body, {\n\t\t\theaders: {\n\t\t\t\t'Content-Type': object.httpMetadata?.contentType || 'application/octet-stream',\n\t\t\t\t'Content-Length': String(object.size)\n\t\t\t}\n\t\t})\n\t}\n\n\treturn new Response('Method not allowed', { status: 405 })\n}\n";
|
|
7
|
+
export declare const GATEWAY_RUNTIME_JS = "\nconst RAW_EMAIL = 'EmailMessage::raw'\n\nfunction arrayBufferToBase64(buffer) {\n\tconst bytes = new Uint8Array(buffer)\n\tlet binary = ''\n\tfor (let i = 0; i < bytes.byteLength; i++) binary += String.fromCharCode(bytes[i])\n\treturn btoa(binary)\n}\n\nfunction base64ToArrayBuffer(base64) {\n\tconst binary = atob(base64)\n\tconst bytes = new Uint8Array(binary.length)\n\tfor (let i = 0; i < binary.length; i++) bytes[i] = binary.charCodeAt(i)\n\treturn bytes.buffer\n}\n\nfunction serializeR2Object(obj) {\n\tif (!obj) return null\n\treturn {\n\t\t__type: 'R2Object',\n\t\tkey: obj.key,\n\t\tversion: obj.version,\n\t\tsize: obj.size,\n\t\tetag: obj.etag,\n\t\thttpEtag: obj.httpEtag,\n\t\tchecksums: obj.checksums,\n\t\tuploaded: obj.uploaded?.toISOString(),\n\t\thttpMetadata: obj.httpMetadata,\n\t\tcustomMetadata: obj.customMetadata,\n\t\trange: obj.range,\n\t\tstorageClass: obj.storageClass\n\t}\n}\n\nfunction serializeR2ObjectBody(obj, bodyData) {\n\tif (!obj) return null\n\treturn {\n\t\t__type: 'R2ObjectBody',\n\t\tkey: obj.key,\n\t\tversion: obj.version,\n\t\tsize: obj.size,\n\t\tetag: obj.etag,\n\t\thttpEtag: obj.httpEtag,\n\t\tchecksums: obj.checksums,\n\t\tuploaded: obj.uploaded?.toISOString(),\n\t\thttpMetadata: obj.httpMetadata,\n\t\tcustomMetadata: obj.customMetadata,\n\t\trange: obj.range,\n\t\tstorageClass: obj.storageClass,\n\t\tbodyData\n\t}\n}\n\nfunction serializeR2Objects(result) {\n\tif (!result) return null\n\treturn {\n\t\tobjects: result.objects.map(serializeR2Object),\n\t\ttruncated: result.truncated,\n\t\tcursor: result.cursor,\n\t\tdelimitedPrefixes: result.delimitedPrefixes\n\t}\n}\n\nasync function serializeResponse(response) {\n\tlet body = null\n\tif (response.body) {\n\t\tconst bytes = await response.arrayBuffer()\n\t\tif (bytes.byteLength > 0) {\n\t\t\tbody = { type: 'bytes', data: arrayBufferToBase64(bytes) }\n\t\t}\n\t}\n\treturn {\n\t\tstatus: response.status,\n\t\tstatusText: response.statusText,\n\t\theaders: [...response.headers.entries()],\n\t\tbody\n\t}\n}\n\nfunction deserializeRequest(serializedReq) {\n\treturn new Request(serializedReq.url, {\n\t\tmethod: serializedReq.method,\n\t\theaders: serializedReq.headers,\n\t\tbody: serializedReq.body?.type === 'bytes'\n\t\t\t? base64ToArrayBuffer(serializedReq.body.data)\n\t\t\t: undefined,\n\t\tredirect: serializedReq.redirect\n\t})\n}\n\nfunction createEmailMessageRaw(raw) {\n\tif (typeof raw === 'string' || raw instanceof ReadableStream) {\n\t\treturn raw\n\t}\n\tif (raw instanceof ArrayBuffer || raw instanceof Uint8Array) {\n\t\treturn new Response(raw).body\n\t}\n\tthrow new Error('Unsupported EmailMessage raw payload')\n}\n\nfunction isDurableObjectNamespace(binding) {\n\treturn !!binding\n\t\t&& typeof binding.idFromName === 'function'\n\t\t&& typeof binding.idFromString === 'function'\n\t\t&& typeof binding.newUniqueId === 'function'\n}\n\n/**\n * Execute an RPC method against the gateway's bindings.\n *\n * Method format: \"binding.operation\". Operations must be namespaced by\n * binding kind (e.g. \"kv.get\", \"r2.head\", \"d1.stmt.first\", \"do.fetch\",\n * \"service.fetch\", \"queue.send\", \"email.send\", \"ai.run\"). Bare verbs and the legacy\n * \"stmt.*\" / \"stub.*\" sub-prefixes were removed in B3-final and now throw.\n * Method vocabulary must stay in sync with the canonical server in\n * src/bridge/server.ts.\n */\nasync function executeRpcMethod(method, params, env, _ctx) {\n\tconst parts = method.split('.')\n\tif (parts.length < 2) throw new Error('Invalid method format: ' + method)\n\n\tconst bindingName = parts[0]\n\tconst operation = parts.slice(1).join('.')\n\tconst binding = env[bindingName]\n\n\tif (!binding) throw new Error('Binding not found: ' + bindingName)\n\n\tconst isNamespaced =\n\t\toperation.indexOf('kv.') === 0 ||\n\t\toperation.indexOf('r2.') === 0 ||\n\t\toperation.indexOf('d1.') === 0 ||\n\t\toperation.indexOf('do.') === 0 ||\n\t\toperation.indexOf('service.') === 0 ||\n\t\toperation.indexOf('queue.') === 0 ||\n\t\toperation.indexOf('email.') === 0 ||\n\t\toperation.indexOf('ai.') === 0 ||\n\t\toperation.indexOf('workflow.') === 0 ||\n\t\toperation.indexOf('var.') === 0\n\tif (!isNamespaced) {\n\t\tthrow new Error(createUnsupportedBridgeOperationErrorMessage(bindingName, operation))\n\t}\n\n\t// KV\n\tif (operation === 'kv.get') return binding.get(params[0], params[1])\n\tif (operation === 'kv.put') return binding.put(params[0], params[1], params[2])\n\tif (operation === 'kv.delete') return binding.delete(params[0])\n\tif (operation === 'kv.list') return binding.list(params[0])\n\tif (operation === 'kv.getWithMetadata') return binding.getWithMetadata(params[0], params[1])\n\n\t// DO get (returns DOStub reference)\n\tif (operation === 'do.get') {\n\t\treturn { __type: 'DOStub', binding: bindingName, id: params[0] }\n\t}\n\n\t// R2\n\tif (operation === 'r2.head') return serializeR2Object(await binding.head(params[0]))\n\tif (operation === 'r2.get') {\n\t\tconst obj = await binding.get(params[0], params[1])\n\t\tif (!obj) return null\n\t\tconst body = await obj.arrayBuffer()\n\t\treturn serializeR2ObjectBody(obj, arrayBufferToBase64(body))\n\t}\n\tif (operation === 'r2.put') {\n\t\tlet value = params[1]\n\t\tif (value && typeof value === 'object') {\n\t\t\tif (value.__type === 'ArrayBuffer' || value.__type === 'Uint8Array') {\n\t\t\t\tvalue = base64ToArrayBuffer(value.data)\n\t\t\t}\n\t\t}\n\t\treturn serializeR2Object(await binding.put(params[0], value, params[2]))\n\t}\n\tif (operation === 'r2.delete') return binding.delete(params[0])\n\tif (operation === 'r2.list') return serializeR2Objects(await binding.list(params[0]))\n\n\t// D1\n\tif (operation === 'd1.exec') return binding.exec(params[0])\n\tif (operation === 'd1.batch') {\n\t\tconst statements = params[0].map((s) => binding.prepare(s.sql).bind(...(s.bindings || [])))\n\t\treturn binding.batch(statements)\n\t}\n\tif (operation.indexOf('d1.stmt.') === 0) {\n\t\tconst mode = operation.split('.')[2]\n\t\tconst [sql, ...rest] = params\n\t\tlet bindings = rest\n\t\tlet extraParam\n\t\tif (mode === 'first' || mode === 'raw') {\n\t\t\textraParam = rest[rest.length - 1]\n\t\t\tbindings = rest.slice(0, -1)\n\t\t}\n\t\tlet stmt = binding.prepare(sql)\n\t\tif (bindings.length > 0) stmt = stmt.bind(...bindings)\n\t\tif (mode === 'first') {\n\t\t\tif (typeof extraParam === 'string' && extraParam.length > 0) return stmt.first(extraParam)\n\t\t\treturn stmt.first()\n\t\t}\n\t\tif (mode === 'all') return stmt.all()\n\t\tif (mode === 'run') return stmt.run()\n\t\tif (mode === 'raw') return stmt.raw(extraParam)\n\t\tthrow new Error('Unknown stmt mode: ' + mode)\n\t}\n\n\t// Durable Objects\n\tif (operation === 'do.idFromName') {\n\t\tconst id = binding.idFromName(params[0])\n\t\treturn { __type: 'DOId', hex: id.toString() }\n\t}\n\tif (operation === 'do.idFromString') {\n\t\tconst id = binding.idFromString(params[0])\n\t\treturn { __type: 'DOId', hex: id.toString() }\n\t}\n\tif (operation === 'do.newUniqueId') {\n\t\tconst id = binding.newUniqueId(params[0])\n\t\treturn { __type: 'DOId', hex: id.toString() }\n\t}\n\tif (operation === 'do.fetch') {\n\t\tconst [, serializedId, serializedReq] = params\n\t\tconst id = binding.idFromString(serializedId.hex)\n\t\tconst stub = binding.get(id)\n\t\tconst response = await stub.fetch(new Request(serializedReq.url, {\n\t\t\tmethod: serializedReq.method,\n\t\t\theaders: serializedReq.headers,\n\t\t\tbody: serializedReq.body?.type === 'bytes'\n\t\t\t\t? base64ToArrayBuffer(serializedReq.body.data)\n\t\t\t\t: undefined\n\t\t}))\n\t\treturn serializeResponse(response)\n\t}\n\tif (operation === 'do.rpc') {\n\t\tconst [, serializedId, methodName, args] = params\n\t\tconst id = binding.idFromString(serializedId.hex)\n\t\tconst stub = binding.get(id)\n\t\tconst response = await stub.fetch(new Request('http://do/_rpc', {\n\t\t\tmethod: 'POST',\n\t\t\theaders: { 'Content-Type': 'application/json' },\n\t\t\tbody: JSON.stringify({ method: methodName, params: args })\n\t\t}))\n\t\tconst result = await response.json()\n\t\tif (!result.ok) throw new Error(result.error?.message || 'RPC failed')\n\t\treturn result.result\n\t}\n\n\t// Service Bindings\n\tif (operation === 'service.fetch') {\n\t\tif (!binding || typeof binding.fetch !== 'function') {\n\t\t\tthrow new Error('Service binding ' + bindingName + ' does not support fetch()')\n\t\t}\n\t\tconst response = await binding.fetch(deserializeRequest(params[0]))\n\t\treturn serializeResponse(response)\n\t}\n\tif (operation === 'service.rpc') {\n\t\tconst methodName = params[0]\n\t\tif (typeof methodName !== 'string') {\n\t\t\tthrow new Error('Service binding ' + bindingName + ' RPC method name must be a string')\n\t\t}\n\t\tconst args = Array.isArray(params[1]) ? params[1] : []\n\t\tconst method = binding && binding[methodName]\n\t\tif (typeof method !== 'function') {\n\t\t\tthrow new Error('Service binding ' + bindingName + ' does not support ' + methodName + '()')\n\t\t}\n\t\treturn method.apply(binding, args)\n\t}\n\n\t// Queues\n\tif (operation === 'queue.send') return binding.send(params[0], params[1])\n\tif (operation === 'queue.sendBatch') return binding.sendBatch(params[0], params[1])\n\n\t// Send Email\n\tif (operation === 'email.send') {\n\t\tif (binding && typeof binding.send === 'function') {\n\t\t\tconst message = params[0]\n\t\t\tif (message && typeof message === 'object' && 'from' in message && 'to' in message && 'raw' in message) {\n\t\t\t\treturn binding.send({\n\t\t\t\t\tfrom: message.from,\n\t\t\t\t\tto: message.to,\n\t\t\t\t\t[RAW_EMAIL]: createEmailMessageRaw(message.raw)\n\t\t\t\t})\n\t\t\t}\n\t\t\treturn binding.send(message)\n\t\t}\n\t\treturn { ok: true, simulated: true }\n\t}\n\n\t// Workflows\n\tif (operation === 'workflow.create') {\n\t\treturn serializeWorkflowInstance(await binding.create(params[0]))\n\t}\n\tif (operation === 'workflow.get') {\n\t\treturn serializeWorkflowInstance(await binding.get(params[0]))\n\t}\n\tif (operation === 'workflow.status') {\n\t\treturn (await binding.get(params[0])).status()\n\t}\n\tif (operation === 'workflow.pause') {\n\t\treturn (await binding.get(params[0])).pause()\n\t}\n\tif (operation === 'workflow.resume') {\n\t\treturn (await binding.get(params[0])).resume()\n\t}\n\tif (operation === 'workflow.terminate') {\n\t\treturn (await binding.get(params[0])).terminate()\n\t}\n\tif (operation === 'workflow.restart') {\n\t\treturn (await binding.get(params[0])).restart()\n\t}\n\tif (operation === 'workflow.sendEvent') {\n\t\treturn (await binding.get(params[0])).sendEvent(params[1])\n\t}\n\n\t// AI / generic run()\n\tif (operation === 'ai.run') {\n\t\tif (typeof binding.run !== 'function') {\n\t\t\tthrow new Error('Binding ' + bindingName + ' does not support run(): ' + method)\n\t\t}\n\t\treturn binding.run(params[0], params[1])\n\t}\n\n\tthrow new Error('Unknown operation: ' + method)\n}\n\nfunction createUnsupportedBridgeOperationErrorMessage(bindingName, operation) {\n\tconst base = \"[devflare][bridge] Unsupported bridge operation '\" + operation + \"' for binding '\" + bindingName + \"'.\"\n\tif (operation === 'fetch') {\n\t\treturn base + ' Devflare could not dispatch fetch() for this binding through the local bridge. '\n\t\t\t+ 'Expected Cloudflare API: env.' + bindingName + '.fetch(request). '\n\t\t\t+ 'If this came from SvelteKit platform.env, make sure the binding is declared as a service binding; '\n\t\t\t+ 'this is a Devflare local bridge issue when service bindings fall back to a bare fetch operation.'\n\t}\n\tif (operation === 'toString') {\n\t\treturn base + ' A platform.env value was coerced to a string through the bridge. '\n\t\t\t+ 'For SvelteKit local dev, declared vars should be plain string values and missing env names should read as undefined.'\n\t}\n\treturn base + ' Bare verbs and the legacy stmt.*/stub.* sub-prefixes are not supported; '\n\t\t+ 'use the namespaced form (e.g. kv.get, r2.put, d1.stmt.first, do.fetch, service.fetch).'\n}\n\nfunction serializeWorkflowInstance(instance) {\n\treturn {\n\t\t__type: 'WorkflowInstance',\n\t\tid: instance.id\n\t}\n}\n\n// ---------------------------------------------------------------------------\n// WebSocket bridge (shared with src/bridge/server.ts in shape)\n// ---------------------------------------------------------------------------\n// NOTE: wsProxies is intentionally created per handleBridgeWebSocket call so\n// state never leaks across connections or across gateway-script regenerations.\n\nasync function handleBridgeRpcCall(msg, ws, env, ctx) {\n\ttry {\n\t\tconst result = await executeRpcMethod(msg.method, msg.params, env, ctx)\n\t\tws.send(JSON.stringify({ t: 'rpc.ok', id: msg.id, result }))\n\t} catch (error) {\n\t\tws.send(JSON.stringify({\n\t\t\tt: 'rpc.err',\n\t\t\tid: msg.id,\n\t\t\terror: {\n\t\t\t\tcode: error?.code || 'INTERNAL_ERROR',\n\t\t\t\tmessage: error?.message || String(error)\n\t\t\t}\n\t\t}))\n\t}\n}\n\nasync function handleBridgeWsOpen(msg, ws, env, wsProxies) {\n\ttry {\n\t\tconst binding = env[msg.target.binding]\n\t\tconst id = binding.idFromString(msg.target.id)\n\t\tconst stub = binding.get(id)\n\n\t\tconst headers = new Headers(msg.target.headers || [])\n\t\theaders.set('Upgrade', 'websocket')\n\n\t\tconst response = await stub.fetch(new Request(msg.target.url, { method: 'GET', headers }))\n\t\tconst doWs = response.webSocket\n\n\t\tif (!doWs) {\n\t\t\tws.send(JSON.stringify({\n\t\t\t\tt: 'rpc.err',\n\t\t\t\tid: 'ws_' + msg.wid,\n\t\t\t\terror: { code: 'WS_FAILED', message: 'No WebSocket returned' }\n\t\t\t}))\n\t\t\treturn\n\t\t}\n\n\t\tdoWs.accept()\n\t\twsProxies.set(msg.wid, { doWs })\n\n\t\tdoWs.addEventListener('message', (event) => {\n\t\t\tconst isText = typeof event.data === 'string'\n\t\t\tconst data = isText ? event.data : arrayBufferToBase64(event.data)\n\t\t\tws.send(JSON.stringify({ t: 'ws.data', wid: msg.wid, data, isText }))\n\t\t})\n\n\t\tdoWs.addEventListener('close', (event) => {\n\t\t\tws.send(JSON.stringify({ t: 'ws.close', wid: msg.wid, code: event.code, reason: event.reason }))\n\t\t\twsProxies.delete(msg.wid)\n\t\t})\n\n\t\tws.send(JSON.stringify({ t: 'ws.opened', wid: msg.wid }))\n\t} catch (error) {\n\t\tws.send(JSON.stringify({\n\t\t\tt: 'rpc.err',\n\t\t\tid: 'ws_' + msg.wid,\n\t\t\terror: { code: 'WS_FAILED', message: error.message }\n\t\t}))\n\t}\n}\n\nfunction handleBridgeWsClose(msg, wsProxies) {\n\tconst proxy = wsProxies.get(msg.wid)\n\tif (proxy) {\n\t\tproxy.doWs.close(msg.code, msg.reason)\n\t\twsProxies.delete(msg.wid)\n\t}\n}\n\nasync function handleBridgeJsonMessage(data, ws, env, ctx, wsProxies) {\n\tconst msg = JSON.parse(data)\n\tswitch (msg.t) {\n\t\tcase 'hello':\n\t\t\t// v2 handshake \u2014 acknowledge with welcome echoing the negotiated\n\t\t\t// capability intersection. Capabilities advertised by the gateway\n\t\t\t// are kept in sync with src/bridge/client.ts (BRIDGE_CLIENT_CAPABILITIES).\n\t\t\tws.send(JSON.stringify({\n\t\t\t\tt: 'welcome',\n\t\t\t\tprotocolVersion: 2,\n\t\t\t\tcapabilities: ['streams', 'ws-relay', 'http-transfer']\n\t\t\t\t\t.filter((c) => Array.isArray(msg.capabilities) && msg.capabilities.includes(c))\n\t\t\t\t\t.sort()\n\t\t\t}))\n\t\t\tbreak\n\t\tcase 'rpc.call':\n\t\t\tawait handleBridgeRpcCall(msg, ws, env, ctx)\n\t\t\tbreak\n\t\tcase 'ws.open':\n\t\t\tawait handleBridgeWsOpen(msg, ws, env, wsProxies)\n\t\t\tbreak\n\t\tcase 'ws.close':\n\t\t\thandleBridgeWsClose(msg, wsProxies)\n\t\t\tbreak\n\t}\n}\n\nfunction handleBridgeWebSocket(request, env, ctx) {\n\tconst { 0: client, 1: server } = new WebSocketPair()\n\tserver.accept()\n\n\t// Per-connection state: recreated for every bridge client so reloads and\n\t// concurrent clients never share WS proxy entries.\n\tconst wsProxies = new Map()\n\n\tserver.addEventListener('message', async (event) => {\n\t\ttry {\n\t\t\tif (typeof event.data === 'string') {\n\t\t\t\tawait handleBridgeJsonMessage(event.data, server, env, ctx, wsProxies)\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tconsole.error('[Gateway] Error:', error)\n\t\t}\n\t})\n\n\tserver.addEventListener('close', () => {\n\t\tfor (const proxy of wsProxies.values()) {\n\t\t\t// Best-effort cleanup: the DO-side WS may already be closed or in an\n\t\t\t// invalid state; any throw here would abort sibling closes. Surface\n\t\t\t// the swallowed error when DEVFLARE_DEBUG_BRIDGE is enabled.\n\t\t\ttry { proxy.doWs.close() } catch (error) {\n\t\t\t\tif (globalThis.DEVFLARE_DEBUG_BRIDGE) {\n\t\t\t\t\tconsole.warn('[devflare:bridge] proxy.doWs.close() failed', error)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\twsProxies.clear()\n\t})\n\n\treturn new Response(null, { status: 101, webSocket: client })\n}\n\n// ---------------------------------------------------------------------------\n// HTTP transfer for R2 bodies (shared with src/bridge/server.ts in shape)\n// ---------------------------------------------------------------------------\n\nasync function handleHttpTransfer(request, env, url) {\n\tconst transferIdEncoded = url.pathname.split('/').pop()\n\tconst transferId = decodeURIComponent(transferIdEncoded || '')\n\tconst [binding, ...keyParts] = transferId.split(':')\n\tconst key = keyParts.join(':')\n\tconst bucket = env[binding]\n\n\tif (!bucket) return new Response('Bucket not found: ' + binding, { status: 404 })\n\n\tif (request.method === 'PUT' || request.method === 'POST') {\n\t\tconst result = await bucket.put(key, request.body)\n\t\treturn new Response(JSON.stringify(serializeR2Object(result)), {\n\t\t\theaders: { 'Content-Type': 'application/json' }\n\t\t})\n\t}\n\n\tif (request.method === 'GET') {\n\t\tconst object = await bucket.get(key)\n\t\tif (!object) return new Response('Not found', { status: 404 })\n\t\treturn new Response(object.body, {\n\t\t\theaders: {\n\t\t\t\t'Content-Type': object.httpMetadata?.contentType || 'application/octet-stream',\n\t\t\t\t'Content-Length': String(object.size)\n\t\t\t}\n\t\t})\n\t}\n\n\treturn new Response('Method not allowed', { status: 405 })\n}\n";
|
|
8
8
|
//# sourceMappingURL=gateway-runtime.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"gateway-runtime.d.ts","sourceRoot":"","sources":["../../src/bridge/gateway-runtime.ts"],"names":[],"mappings":"AAcA;;;;;GAKG;AACH,eAAO,MAAM,kBAAkB,
|
|
1
|
+
{"version":3,"file":"gateway-runtime.d.ts","sourceRoot":"","sources":["../../src/bridge/gateway-runtime.ts"],"names":[],"mappings":"AAcA;;;;;GAKG;AACH,eAAO,MAAM,kBAAkB,skjBAigB9B,CAAA"}
|
package/dist/bridge/proxy.d.ts
CHANGED
|
@@ -4,6 +4,8 @@ export interface EnvProxyOptions {
|
|
|
4
4
|
client?: BridgeClient;
|
|
5
5
|
/** Lazily connect on first access */
|
|
6
6
|
lazy?: boolean;
|
|
7
|
+
/** Return undefined for names that are not present in binding hints */
|
|
8
|
+
strict?: boolean;
|
|
7
9
|
/** Transform results before returning (e.g., for transport decoding) */
|
|
8
10
|
transformResult?: (result: unknown) => unknown;
|
|
9
11
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"proxy.d.ts","sourceRoot":"","sources":["../../src/bridge/proxy.ts"],"names":[],"mappings":"AAMA,OAAO,EAAa,KAAK,YAAY,EAAE,MAAM,UAAU,CAAA;AAavD,MAAM,WAAW,eAAe;IAC/B,0DAA0D;IAC1D,MAAM,CAAC,EAAE,YAAY,CAAA;IACrB,qCAAqC;IACrC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,wEAAwE;IACxE,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,KAAK,OAAO,CAAA;CAC9C;
|
|
1
|
+
{"version":3,"file":"proxy.d.ts","sourceRoot":"","sources":["../../src/bridge/proxy.ts"],"names":[],"mappings":"AAMA,OAAO,EAAa,KAAK,YAAY,EAAE,MAAM,UAAU,CAAA;AAavD,MAAM,WAAW,eAAe;IAC/B,0DAA0D;IAC1D,MAAM,CAAC,EAAE,YAAY,CAAA;IACrB,qCAAqC;IACrC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,uEAAuE;IACvE,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,wEAAwE;IACxE,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,OAAO,KAAK,OAAO,CAAA;CAC9C;AAilBD,mDAAmD;AACnD,MAAM,MAAM,WAAW,GACpB,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,IAAI,GACJ,OAAO,GACP,IAAI,GACJ,SAAS,GACT,WAAW,GACX,UAAU,GACV,QAAQ,GACR,KAAK,CAAA;AAER,MAAM,WAAW,YAAY;IAC5B,CAAC,GAAG,EAAE,MAAM,GAAG,WAAW,CAAA;CAC1B;AAKD;;GAEG;AACH,wBAAgB,cAAc,CAAC,OAAO,GAAE,eAAe,GAAG;IAAE,KAAK,CAAC,EAAE,YAAY,CAAA;CAAO,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAoFhH;AAoFD;;;;;;;;;;;;;;;GAeG;AACH,eAAO,MAAM,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAO5C,CAAA;AAEF;;GAEG;AACH,wBAAgB,OAAO,CAAC,OAAO,GAAE,eAAoB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAG9E;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,YAAY,GAAG,IAAI,CAIzD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/bridge/server.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/bridge/server.ts"],"names":[],"mappings":"AAuCA,MAAM,WAAW,UAAU;IAE1B,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CACtB;;IAiBM,KAAK,UAAU,OAAO,OAAO,UAAU,OAAO,gBAAgB,GAAG,OAAO,CAAC,QAAQ,CAAC;;;AA8KzF,wBAAsB,gBAAgB,CACrC,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,OAAO,EAAE,EACjB,GAAG,EAAE,UAAU,EACf,GAAG,EAAE,gBAAgB,GACnB,OAAO,CAAC,OAAO,CAAC,CA2JlB"}
|
package/dist/browser.js
CHANGED
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
} from "./index-62b3gt2g.js";
|
|
4
4
|
import {
|
|
5
5
|
env
|
|
6
|
-
} from "./index-
|
|
6
|
+
} from "./index-xp0qkkxf.js";
|
|
7
7
|
import {
|
|
8
8
|
durableObject,
|
|
9
9
|
getDurableObjectOptions
|
|
@@ -14,8 +14,8 @@ import {
|
|
|
14
14
|
getClient,
|
|
15
15
|
initEnv,
|
|
16
16
|
setBindingHints
|
|
17
|
-
} from "./index-
|
|
18
|
-
import"./index-
|
|
17
|
+
} from "./index-c1cj9085.js";
|
|
18
|
+
import"./index-hpwa6vsw.js";
|
|
19
19
|
import {
|
|
20
20
|
defineConfig,
|
|
21
21
|
ref
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import {
|
|
2
|
+
prepareBuildArtifacts
|
|
3
|
+
} from "./index-kxc4gtyt.js";
|
|
4
|
+
import"./index-aabgympv.js";
|
|
5
|
+
import"./index-z9gy8w6b.js";
|
|
6
|
+
import"./index-gn5wy09x.js";
|
|
7
|
+
import"./index-627srx16.js";
|
|
8
|
+
import {
|
|
9
|
+
createCliTheme,
|
|
10
|
+
cyanBold,
|
|
11
|
+
dim,
|
|
12
|
+
logLine
|
|
13
|
+
} from "./index-stgn34cr.js";
|
|
14
|
+
import"./index-3t6rypgc.js";
|
|
15
|
+
import"./index-gq39t0rx.js";
|
|
16
|
+
import"./index-t4fhcx1n.js";
|
|
17
|
+
import"./index-8x745h59.js";
|
|
18
|
+
import"./index-qf2dkqxh.js";
|
|
19
|
+
import"./index-qwgr4q7s.js";
|
|
20
|
+
import"./index-65e7xx1a.js";
|
|
21
|
+
import"./index-vhqww6tt.js";
|
|
22
|
+
import"./index-c3nxftnp.js";
|
|
23
|
+
import"./index-syscwrjp.js";
|
|
24
|
+
import"./index-1d4jg11n.js";
|
|
25
|
+
import"./index-mg8vwqxf.js";
|
|
26
|
+
import"./index-z40mjts9.js";
|
|
27
|
+
import"./index-q8f4kawk.js";
|
|
28
|
+
import"./index-37x76zdn.js";
|
|
29
|
+
|
|
30
|
+
// src/cli/commands/build.ts
|
|
31
|
+
async function runBuildCommand(parsed, logger, options) {
|
|
32
|
+
const theme = createCliTheme(parsed.options);
|
|
33
|
+
logLine(logger);
|
|
34
|
+
logLine(logger, `${cyanBold("build", theme)} ${dim("Preparing production artifacts", theme)}`);
|
|
35
|
+
try {
|
|
36
|
+
await prepareBuildArtifacts(parsed, logger, options);
|
|
37
|
+
logger.success("Generated .devflare/wrangler.jsonc");
|
|
38
|
+
logger.success("Generated .devflare/build/wrangler.jsonc");
|
|
39
|
+
logger.success("Generated .wrangler/deploy/config.json");
|
|
40
|
+
logger.success("Build complete!");
|
|
41
|
+
return { exitCode: 0 };
|
|
42
|
+
} catch (error) {
|
|
43
|
+
if (error instanceof Error) {
|
|
44
|
+
logger.error("Build failed:", error.message);
|
|
45
|
+
if (parsed.options.debug) {
|
|
46
|
+
logger.error(error.stack);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return { exitCode: 1 };
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
export {
|
|
53
|
+
runBuildCommand
|
|
54
|
+
};
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import {
|
|
2
|
+
prepareBuildArtifacts
|
|
3
|
+
} from "./index-4se6krdj.js";
|
|
4
|
+
import"./index-aabgympv.js";
|
|
5
|
+
import"./index-z9gy8w6b.js";
|
|
6
|
+
import"./index-gn5wy09x.js";
|
|
7
|
+
import"./index-627srx16.js";
|
|
8
|
+
import {
|
|
9
|
+
createCliTheme,
|
|
10
|
+
cyanBold,
|
|
11
|
+
dim,
|
|
12
|
+
logLine
|
|
13
|
+
} from "./index-stgn34cr.js";
|
|
14
|
+
import"./index-3t6rypgc.js";
|
|
15
|
+
import"./index-w36q6819.js";
|
|
16
|
+
import"./index-zpy9caxn.js";
|
|
17
|
+
import"./index-s9q605sq.js";
|
|
18
|
+
import"./index-3edvz3hs.js";
|
|
19
|
+
import"./index-qwgr4q7s.js";
|
|
20
|
+
import"./index-aqrwyy57.js";
|
|
21
|
+
import"./index-vhqww6tt.js";
|
|
22
|
+
import"./index-c3nxftnp.js";
|
|
23
|
+
import"./index-syscwrjp.js";
|
|
24
|
+
import"./index-1d4jg11n.js";
|
|
25
|
+
import"./index-mg8vwqxf.js";
|
|
26
|
+
import"./index-z40mjts9.js";
|
|
27
|
+
import"./index-q8f4kawk.js";
|
|
28
|
+
import"./index-37x76zdn.js";
|
|
29
|
+
|
|
30
|
+
// src/cli/commands/build.ts
|
|
31
|
+
async function runBuildCommand(parsed, logger, options) {
|
|
32
|
+
const theme = createCliTheme(parsed.options);
|
|
33
|
+
logLine(logger);
|
|
34
|
+
logLine(logger, `${cyanBold("build", theme)} ${dim("Preparing production artifacts", theme)}`);
|
|
35
|
+
try {
|
|
36
|
+
await prepareBuildArtifacts(parsed, logger, options);
|
|
37
|
+
logger.success("Generated .devflare/wrangler.jsonc");
|
|
38
|
+
logger.success("Generated .devflare/build/wrangler.jsonc");
|
|
39
|
+
logger.success("Generated .wrangler/deploy/config.json");
|
|
40
|
+
logger.success("Build complete!");
|
|
41
|
+
return { exitCode: 0 };
|
|
42
|
+
} catch (error) {
|
|
43
|
+
if (error instanceof Error) {
|
|
44
|
+
logger.error("Build failed:", error.message);
|
|
45
|
+
if (parsed.options.debug) {
|
|
46
|
+
logger.error(error.stack);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return { exitCode: 1 };
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
export {
|
|
53
|
+
runBuildCommand
|
|
54
|
+
};
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import {
|
|
2
|
+
prepareBuildArtifacts
|
|
3
|
+
} from "./index-dgww0ewn.js";
|
|
4
|
+
import"./index-aabgympv.js";
|
|
5
|
+
import"./index-z9gy8w6b.js";
|
|
6
|
+
import"./index-gn5wy09x.js";
|
|
7
|
+
import"./index-627srx16.js";
|
|
8
|
+
import {
|
|
9
|
+
createCliTheme,
|
|
10
|
+
cyanBold,
|
|
11
|
+
dim,
|
|
12
|
+
logLine
|
|
13
|
+
} from "./index-stgn34cr.js";
|
|
14
|
+
import"./index-3t6rypgc.js";
|
|
15
|
+
import"./index-666tdx14.js";
|
|
16
|
+
import"./index-05pbj4hy.js";
|
|
17
|
+
import"./index-p7q23nce.js";
|
|
18
|
+
import"./index-3edvz3hs.js";
|
|
19
|
+
import"./index-qwgr4q7s.js";
|
|
20
|
+
import"./index-aqrwyy57.js";
|
|
21
|
+
import"./index-vhqww6tt.js";
|
|
22
|
+
import"./index-c3nxftnp.js";
|
|
23
|
+
import"./index-syscwrjp.js";
|
|
24
|
+
import"./index-1d4jg11n.js";
|
|
25
|
+
import"./index-mg8vwqxf.js";
|
|
26
|
+
import"./index-z40mjts9.js";
|
|
27
|
+
import"./index-q8f4kawk.js";
|
|
28
|
+
import"./index-37x76zdn.js";
|
|
29
|
+
|
|
30
|
+
// src/cli/commands/build.ts
|
|
31
|
+
async function runBuildCommand(parsed, logger, options) {
|
|
32
|
+
const theme = createCliTheme(parsed.options);
|
|
33
|
+
logLine(logger);
|
|
34
|
+
logLine(logger, `${cyanBold("build", theme)} ${dim("Preparing production artifacts", theme)}`);
|
|
35
|
+
try {
|
|
36
|
+
await prepareBuildArtifacts(parsed, logger, options);
|
|
37
|
+
logger.success("Generated .devflare/wrangler.jsonc");
|
|
38
|
+
logger.success("Generated .devflare/build/wrangler.jsonc");
|
|
39
|
+
logger.success("Generated .wrangler/deploy/config.json");
|
|
40
|
+
logger.success("Build complete!");
|
|
41
|
+
return { exitCode: 0 };
|
|
42
|
+
} catch (error) {
|
|
43
|
+
if (error instanceof Error) {
|
|
44
|
+
logger.error("Build failed:", error.message);
|
|
45
|
+
if (parsed.options.debug) {
|
|
46
|
+
logger.error(error.stack);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return { exitCode: 1 };
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
export {
|
|
53
|
+
runBuildCommand
|
|
54
|
+
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAA;
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAA;AAS9C,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAA;AAoCjE,wBAAsB,gBAAgB,CACrC,MAAM,EAAE,UAAU,EAClB,MAAM,EAAE,eAAe,EACvB,OAAO,EAAE,UAAU,GACjB,OAAO,CAAC,SAAS,CAAC,CA4DpB"}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { type ConsolaInstance } from 'consola';
|
|
2
2
|
import type { ParsedArgs, CliOptions, CliResult } from '../index';
|
|
3
|
+
export declare function resolveDevRuntimePort(options: Record<string, string | boolean>, env?: NodeJS.ProcessEnv): number;
|
|
3
4
|
export declare function runDevCommand(parsed: ParsedArgs, logger: ConsolaInstance, options: CliOptions): Promise<CliResult>;
|
|
4
5
|
//# sourceMappingURL=dev.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dev.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/dev.ts"],"names":[],"mappings":"AAqBA,OAAO,EAAiB,KAAK,eAAe,EAAE,MAAM,SAAS,CAAA;AAE7D,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAA;
|
|
1
|
+
{"version":3,"file":"dev.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/dev.ts"],"names":[],"mappings":"AAqBA,OAAO,EAAiB,KAAK,eAAe,EAAE,MAAM,SAAS,CAAA;AAE7D,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAA;AA4CjE,wBAAgB,qBAAqB,CACpC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,EACzC,GAAG,GAAE,MAAM,CAAC,UAAwB,GAClC,MAAM,CAYR;AAsDD,wBAAsB,aAAa,CAClC,MAAM,EAAE,UAAU,EAClB,MAAM,EAAE,eAAe,EACvB,OAAO,EAAE,UAAU,GACjB,OAAO,CAAC,SAAS,CAAC,CA2KpB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"doctor.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/doctor.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAA;AAE9C,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAA;
|
|
1
|
+
{"version":3,"file":"doctor.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/doctor.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAA;AAE9C,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAA;AAejE,wBAAsB,gBAAgB,CACrC,MAAM,EAAE,UAAU,EAClB,MAAM,EAAE,eAAe,EACvB,OAAO,EAAE,UAAU,GACjB,OAAO,CAAC,SAAS,CAAC,CA0PpB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"core.d.ts","sourceRoot":"","sources":["../../../../src/cli/help-pages/pages/core.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAGxC,eAAO,MAAM,eAAe,EAAE,QAAQ,
|
|
1
|
+
{"version":3,"file":"core.d.ts","sourceRoot":"","sources":["../../../../src/cli/help-pages/pages/core.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAGxC,eAAO,MAAM,eAAe,EAAE,QAAQ,EA4WrC,CAAA;AAED,eAAO,MAAM,aAAa,2LAAW,CAAA"}
|
package/dist/cli/index.js
CHANGED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import {
|
|
2
|
+
compileBuildConfig,
|
|
3
|
+
compileConfig
|
|
4
|
+
} from "./index-c3nxftnp.js";
|
|
5
|
+
import {
|
|
6
|
+
ConfigResourceResolutionError,
|
|
7
|
+
loadConfig,
|
|
8
|
+
loadResolvedConfig,
|
|
9
|
+
resolveResources
|
|
10
|
+
} from "./index-syscwrjp.js";
|
|
11
|
+
import"./index-1d4jg11n.js";
|
|
12
|
+
import"./index-mg8vwqxf.js";
|
|
13
|
+
import"./index-z40mjts9.js";
|
|
14
|
+
import"./index-q8f4kawk.js";
|
|
15
|
+
import"./index-37x76zdn.js";
|
|
16
|
+
|
|
17
|
+
// src/cli/commands/config.ts
|
|
18
|
+
function isSupportedFormat(value) {
|
|
19
|
+
return value === "devflare" || value === "wrangler";
|
|
20
|
+
}
|
|
21
|
+
function isSupportedPhase(value) {
|
|
22
|
+
return value === "build" || value === "local" || value === "deploy";
|
|
23
|
+
}
|
|
24
|
+
async function loadConfigForPhase(options) {
|
|
25
|
+
if (options.phase === "deploy") {
|
|
26
|
+
return await loadResolvedConfig({
|
|
27
|
+
cwd: options.cwd,
|
|
28
|
+
configFile: options.configPath,
|
|
29
|
+
environment: options.environment
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
const config = await loadConfig({
|
|
33
|
+
cwd: options.cwd,
|
|
34
|
+
configFile: options.configPath
|
|
35
|
+
});
|
|
36
|
+
return await resolveResources(config, {
|
|
37
|
+
phase: options.phase,
|
|
38
|
+
environment: options.environment
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
async function runConfigCommand(parsed, logger, options) {
|
|
42
|
+
const cwd = options.cwd || process.cwd();
|
|
43
|
+
const configPath = parsed.options.config;
|
|
44
|
+
const environment = parsed.options.env;
|
|
45
|
+
const subcommand = parsed.args[0] ?? "print";
|
|
46
|
+
const formatOption = parsed.options.format;
|
|
47
|
+
const format = formatOption ?? "devflare";
|
|
48
|
+
const phaseOption = parsed.options.local === true ? "local" : parsed.options.phase ?? "deploy";
|
|
49
|
+
if (subcommand !== "print") {
|
|
50
|
+
logger.error(`Unknown config subcommand: ${subcommand}`);
|
|
51
|
+
logger.info("Supported subcommands: print");
|
|
52
|
+
return { exitCode: 1 };
|
|
53
|
+
}
|
|
54
|
+
if (!isSupportedFormat(format)) {
|
|
55
|
+
logger.error(`Unsupported config format: ${format}`);
|
|
56
|
+
logger.info("Supported formats: devflare, wrangler");
|
|
57
|
+
return { exitCode: 1 };
|
|
58
|
+
}
|
|
59
|
+
if (!isSupportedPhase(phaseOption)) {
|
|
60
|
+
logger.error(`Unsupported config phase: ${phaseOption}`);
|
|
61
|
+
logger.info("Supported phases: build, local, deploy");
|
|
62
|
+
return { exitCode: 1 };
|
|
63
|
+
}
|
|
64
|
+
try {
|
|
65
|
+
const resolvedConfig = await loadConfigForPhase({
|
|
66
|
+
cwd,
|
|
67
|
+
configPath,
|
|
68
|
+
environment,
|
|
69
|
+
phase: phaseOption
|
|
70
|
+
});
|
|
71
|
+
const output = format === "wrangler" ? phaseOption === "build" ? compileBuildConfig(resolvedConfig, undefined, { alreadyResolved: true }) : compileConfig(resolvedConfig) : resolvedConfig;
|
|
72
|
+
const text = JSON.stringify(output, null, "\t");
|
|
73
|
+
if (!options.silent) {
|
|
74
|
+
process.stdout.write(`${text}
|
|
75
|
+
`);
|
|
76
|
+
}
|
|
77
|
+
return {
|
|
78
|
+
exitCode: 0,
|
|
79
|
+
output: text
|
|
80
|
+
};
|
|
81
|
+
} catch (error) {
|
|
82
|
+
if (error instanceof Error) {
|
|
83
|
+
logger.error("Config command failed:", error.message);
|
|
84
|
+
if (error instanceof ConfigResourceResolutionError) {
|
|
85
|
+
logger.info("For offline inspection, run `devflare config --phase local` or `devflare config --phase build --format wrangler`.");
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return { exitCode: 1 };
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
export {
|
|
92
|
+
runConfigCommand
|
|
93
|
+
};
|