rwsdk 1.0.0-alpha.5 → 1.0.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/e2e/browser.d.mts +10 -0
- package/dist/lib/e2e/browser.mjs +107 -0
- package/dist/lib/e2e/dev.d.mts +8 -0
- package/dist/lib/e2e/dev.mjs +232 -0
- package/dist/lib/e2e/environment.d.mts +14 -0
- package/dist/lib/e2e/environment.mjs +201 -0
- package/dist/lib/e2e/index.d.mts +7 -0
- package/dist/lib/e2e/index.mjs +7 -0
- package/dist/lib/e2e/release.d.mts +56 -0
- package/dist/lib/e2e/release.mjs +537 -0
- package/dist/lib/e2e/tarball.d.mts +14 -0
- package/dist/lib/e2e/tarball.mjs +189 -0
- package/dist/lib/e2e/testHarness.d.mts +98 -0
- package/dist/lib/e2e/testHarness.mjs +393 -0
- package/dist/lib/e2e/types.d.mts +31 -0
- package/dist/lib/e2e/types.mjs +1 -0
- package/dist/lib/smokeTests/browser.mjs +3 -94
- package/dist/lib/smokeTests/development.mjs +2 -223
- package/dist/lib/smokeTests/environment.d.mts +4 -11
- package/dist/lib/smokeTests/environment.mjs +10 -158
- package/dist/lib/smokeTests/release.d.mts +2 -49
- package/dist/lib/smokeTests/release.mjs +3 -503
- package/dist/runtime/lib/injectHtmlAtMarker.d.ts +11 -0
- package/dist/runtime/lib/injectHtmlAtMarker.js +90 -0
- package/dist/runtime/lib/realtime/worker.d.ts +1 -1
- package/dist/runtime/lib/router.js +32 -20
- package/dist/runtime/lib/router.test.js +506 -1
- package/dist/runtime/lib/rwContext.d.ts +22 -0
- package/dist/runtime/lib/rwContext.js +1 -0
- package/dist/runtime/render/assembleDocument.d.ts +6 -0
- package/dist/runtime/render/assembleDocument.js +22 -0
- package/dist/runtime/render/createThenableFromReadableStream.d.ts +1 -0
- package/dist/runtime/render/createThenableFromReadableStream.js +9 -0
- package/dist/runtime/render/normalizeActionResult.d.ts +1 -0
- package/dist/runtime/render/normalizeActionResult.js +43 -0
- package/dist/runtime/render/preloads.d.ts +2 -2
- package/dist/runtime/render/preloads.js +2 -3
- package/dist/runtime/render/{renderRscThenableToHtmlStream.d.ts → renderDocumentHtmlStream.d.ts} +3 -3
- package/dist/runtime/render/renderDocumentHtmlStream.js +39 -0
- package/dist/runtime/render/renderHtmlStream.d.ts +7 -0
- package/dist/runtime/render/renderHtmlStream.js +31 -0
- package/dist/runtime/render/renderToRscStream.d.ts +2 -3
- package/dist/runtime/render/renderToRscStream.js +2 -41
- package/dist/runtime/render/renderToStream.d.ts +2 -1
- package/dist/runtime/render/renderToStream.js +15 -8
- package/dist/runtime/render/stylesheets.d.ts +2 -2
- package/dist/runtime/render/stylesheets.js +2 -3
- package/dist/runtime/ssrBridge.d.ts +2 -1
- package/dist/runtime/ssrBridge.js +2 -1
- package/dist/runtime/worker.d.ts +1 -0
- package/dist/runtime/worker.js +11 -6
- package/dist/vite/configPlugin.mjs +2 -2
- package/package.json +8 -4
- package/dist/runtime/render/renderRscThenableToHtmlStream.js +0 -54
- package/dist/runtime/render/transformRscToHtmlStream.d.ts +0 -8
- package/dist/runtime/render/transformRscToHtmlStream.js +0 -19
|
@@ -1,380 +1,13 @@
|
|
|
1
|
-
import { join, basename } from "path";
|
|
2
1
|
import { setTimeout } from "node:timers/promises";
|
|
3
2
|
import { log } from "./constants.mjs";
|
|
4
3
|
import { checkUrl, checkServerUp } from "./browser.mjs";
|
|
5
|
-
import { $ } from "../../lib
|
|
6
|
-
|
|
7
|
-
import { existsSync, readFileSync } from "fs";
|
|
8
|
-
import { pathExists } from "fs-extra";
|
|
9
|
-
import { parse as parseJsonc } from "jsonc-parser";
|
|
10
|
-
import * as fs from "fs/promises";
|
|
11
|
-
import { extractLastJson, parseJson } from "../../lib/jsonUtils.mjs";
|
|
12
|
-
/**
|
|
13
|
-
* A mini expect-like utility for handling interactive CLI prompts and verifying output
|
|
14
|
-
* @param command The command to execute
|
|
15
|
-
* @param expectations Array of {expect, send} objects for interactive responses and verification
|
|
16
|
-
* @param options Additional options for command execution including working directory and environment
|
|
17
|
-
* @returns Promise that resolves when the command completes
|
|
18
|
-
*/
|
|
19
|
-
export async function $expect(command, expectations, options = {
|
|
20
|
-
reject: true,
|
|
21
|
-
}) {
|
|
22
|
-
return new Promise((resolve, reject) => {
|
|
23
|
-
log("$expect starting with command: %s", command);
|
|
24
|
-
log("Working directory: %s", options.cwd ?? process.cwd());
|
|
25
|
-
log("Expected patterns: %O", expectations.map((e) => e.expect.toString()));
|
|
26
|
-
console.log(`Running command: ${command}`);
|
|
27
|
-
// Spawn the process with pipes for interaction
|
|
28
|
-
const childProcess = execaCommand(command, {
|
|
29
|
-
cwd: options.cwd ?? process.cwd(),
|
|
30
|
-
stdio: "pipe",
|
|
31
|
-
reject: false, // Never reject so we can handle the error ourselves
|
|
32
|
-
env: options.env ?? process.env,
|
|
33
|
-
});
|
|
34
|
-
log("Process spawned with PID: %s", childProcess.pid);
|
|
35
|
-
let stdout = "";
|
|
36
|
-
let stderr = "";
|
|
37
|
-
let buffer = "";
|
|
38
|
-
let lastMatchIndex = 0; // Track the index where the last match occurred
|
|
39
|
-
// Track patterns that have been matched
|
|
40
|
-
const matchHistory = new Map();
|
|
41
|
-
// Track current expectation index to process them in order
|
|
42
|
-
let currentExpectationIndex = 0;
|
|
43
|
-
// Initialize match count for each pattern
|
|
44
|
-
expectations.forEach(({ expect: expectPattern }) => {
|
|
45
|
-
matchHistory.set(expectPattern, 0);
|
|
46
|
-
log("Initialized pattern match count for: %s", expectPattern.toString());
|
|
47
|
-
});
|
|
48
|
-
// Collect stdout
|
|
49
|
-
childProcess.stdout?.on("data", (data) => {
|
|
50
|
-
const chunk = data.toString();
|
|
51
|
-
stdout += chunk;
|
|
52
|
-
buffer += chunk;
|
|
53
|
-
// Print to console
|
|
54
|
-
process.stdout.write(chunk);
|
|
55
|
-
// Only process expectations that haven't been fully matched yet
|
|
56
|
-
// and in the order they were provided
|
|
57
|
-
while (currentExpectationIndex < expectations.length) {
|
|
58
|
-
const { expect: expectPattern, send } = expectations[currentExpectationIndex];
|
|
59
|
-
const pattern = expectPattern instanceof RegExp
|
|
60
|
-
? expectPattern
|
|
61
|
-
: new RegExp(expectPattern, "m");
|
|
62
|
-
// Only search in the unmatched portion of the buffer
|
|
63
|
-
const searchBuffer = buffer.substring(lastMatchIndex);
|
|
64
|
-
log("Testing pattern: %s against buffer from position %d (%d chars)", pattern.toString(), lastMatchIndex, searchBuffer.length);
|
|
65
|
-
// Enhanced debugging: show actual search buffer content
|
|
66
|
-
log("Search buffer content for debugging: %O", searchBuffer);
|
|
67
|
-
const match = searchBuffer.match(pattern);
|
|
68
|
-
if (match) {
|
|
69
|
-
// Found a match
|
|
70
|
-
const patternStr = expectPattern.toString();
|
|
71
|
-
const matchCount = matchHistory.get(expectPattern) || 0;
|
|
72
|
-
// Update the lastMatchIndex to point after this match
|
|
73
|
-
// Calculate the absolute position in the full buffer
|
|
74
|
-
const matchStartPosition = lastMatchIndex + match.index;
|
|
75
|
-
const matchEndPosition = matchStartPosition + match[0].length;
|
|
76
|
-
lastMatchIndex = matchEndPosition;
|
|
77
|
-
log(`Pattern matched: "${patternStr}" (occurrence #${matchCount + 1}) at position ${matchStartPosition}-${matchEndPosition}`);
|
|
78
|
-
// Only send a response if one is specified
|
|
79
|
-
if (send) {
|
|
80
|
-
log(`Sending response: "${send.replace(/\r/g, "\\r")}" to stdin`);
|
|
81
|
-
childProcess.stdin?.write(send);
|
|
82
|
-
}
|
|
83
|
-
else {
|
|
84
|
-
log(`Pattern "${patternStr}" matched (verification only)`);
|
|
85
|
-
}
|
|
86
|
-
// Increment the match count for this pattern
|
|
87
|
-
matchHistory.set(expectPattern, matchCount + 1);
|
|
88
|
-
log("Updated match count for %s: %d", patternStr, matchCount + 1);
|
|
89
|
-
// Move to the next expectation
|
|
90
|
-
currentExpectationIndex++;
|
|
91
|
-
// If we've processed all expectations but need to wait for stdin response,
|
|
92
|
-
// delay closing stdin until the next data event
|
|
93
|
-
if (currentExpectationIndex >= expectations.length && send) {
|
|
94
|
-
log("All patterns matched, closing stdin after last response");
|
|
95
|
-
childProcess.stdin?.end();
|
|
96
|
-
}
|
|
97
|
-
break; // Exit the while loop to process next chunk
|
|
98
|
-
}
|
|
99
|
-
else {
|
|
100
|
-
log("Pattern not matched. Attempting to diagnose the mismatch:");
|
|
101
|
-
// Try to find the closest substring that might partially match
|
|
102
|
-
const patternString = pattern.toString();
|
|
103
|
-
const patternCore = patternString.substring(1, patternString.lastIndexOf("/") > 0
|
|
104
|
-
? patternString.lastIndexOf("/")
|
|
105
|
-
: patternString.length);
|
|
106
|
-
// Try partial matches to diagnose the issue
|
|
107
|
-
for (let i = 3; i < patternCore.length; i++) {
|
|
108
|
-
const partialPattern = patternCore.substring(0, i);
|
|
109
|
-
const partialRegex = new RegExp(partialPattern, "m");
|
|
110
|
-
const matches = partialRegex.test(searchBuffer);
|
|
111
|
-
log(" Partial pattern '%s': %s", partialPattern, matches ? "matched" : "not matched");
|
|
112
|
-
// Once we find where the matching starts to fail, stop
|
|
113
|
-
if (!matches)
|
|
114
|
-
break;
|
|
115
|
-
}
|
|
116
|
-
// Break the while loop as this pattern doesn't match yet
|
|
117
|
-
break;
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
// If all expectations have been matched, we can close stdin if not already closed
|
|
121
|
-
if (currentExpectationIndex >= expectations.length &&
|
|
122
|
-
childProcess.stdin?.writable) {
|
|
123
|
-
log("All patterns matched, ensuring stdin is closed");
|
|
124
|
-
childProcess.stdin.end();
|
|
125
|
-
}
|
|
126
|
-
});
|
|
127
|
-
// Collect stderr if needed
|
|
128
|
-
if (childProcess.stderr) {
|
|
129
|
-
childProcess.stderr.on("data", (data) => {
|
|
130
|
-
const chunk = data.toString();
|
|
131
|
-
stderr += chunk;
|
|
132
|
-
// Also write stderr to console
|
|
133
|
-
process.stderr.write(chunk);
|
|
134
|
-
});
|
|
135
|
-
}
|
|
136
|
-
// Handle process completion
|
|
137
|
-
childProcess.on("close", (code) => {
|
|
138
|
-
log("Process closed with code: %s", code);
|
|
139
|
-
// Log the number of matches for each pattern
|
|
140
|
-
log("Pattern match summary:");
|
|
141
|
-
for (const [pattern, count] of matchHistory.entries()) {
|
|
142
|
-
log(` - "${pattern.toString()}": ${count} matches`);
|
|
143
|
-
}
|
|
144
|
-
// Check if any required patterns were not matched
|
|
145
|
-
const unmatchedPatterns = Array.from(matchHistory.entries())
|
|
146
|
-
.filter(([_, count]) => count === 0)
|
|
147
|
-
.map(([pattern, _]) => pattern.toString());
|
|
148
|
-
if (unmatchedPatterns.length > 0) {
|
|
149
|
-
log("WARNING: Some expected patterns were not matched: %O", unmatchedPatterns);
|
|
150
|
-
}
|
|
151
|
-
log("$expect completed. Total stdout: %d bytes, stderr: %d bytes", stdout.length, stderr.length);
|
|
152
|
-
resolve({ stdout, stderr, code });
|
|
153
|
-
});
|
|
154
|
-
childProcess.on("error", (err) => {
|
|
155
|
-
log("Process error: %O", err);
|
|
156
|
-
if (options.reject) {
|
|
157
|
-
reject(new Error(`Failed to execute command: ${err.message}`));
|
|
158
|
-
}
|
|
159
|
-
else {
|
|
160
|
-
resolve({ stdout, stderr, code: null });
|
|
161
|
-
}
|
|
162
|
-
});
|
|
163
|
-
});
|
|
164
|
-
}
|
|
165
|
-
/**
|
|
166
|
-
* Ensures Cloudflare account ID is set in environment
|
|
167
|
-
* First checks wrangler cache, then environment variables, and finally guides the user
|
|
168
|
-
*/
|
|
169
|
-
export async function ensureCloudflareAccountId(cwd, projectDir) {
|
|
170
|
-
// Skip if already set
|
|
171
|
-
if (process.env.CLOUDFLARE_ACCOUNT_ID) {
|
|
172
|
-
log("CLOUDFLARE_ACCOUNT_ID is already set: %s", process.env.CLOUDFLARE_ACCOUNT_ID);
|
|
173
|
-
console.log(`Using existing CLOUDFLARE_ACCOUNT_ID: ${process.env.CLOUDFLARE_ACCOUNT_ID}`);
|
|
174
|
-
return;
|
|
175
|
-
}
|
|
176
|
-
console.log("CLOUDFLARE_ACCOUNT_ID not set, checking wrangler cache...");
|
|
177
|
-
try {
|
|
178
|
-
// Check wrangler cache in the project directory, not the current working directory
|
|
179
|
-
projectDir = projectDir || cwd || process.cwd();
|
|
180
|
-
log("Looking for wrangler cache in project directory: %s", projectDir);
|
|
181
|
-
const accountCachePath = join(projectDir, "node_modules/.cache/wrangler/wrangler-account.json");
|
|
182
|
-
if (existsSync(accountCachePath)) {
|
|
183
|
-
try {
|
|
184
|
-
const accountCache = JSON.parse(readFileSync(accountCachePath, "utf8"));
|
|
185
|
-
if (accountCache.account?.id) {
|
|
186
|
-
const accountId = accountCache.account.id;
|
|
187
|
-
process.env.CLOUDFLARE_ACCOUNT_ID = accountId;
|
|
188
|
-
log("Found CLOUDFLARE_ACCOUNT_ID in wrangler cache: %s", accountId);
|
|
189
|
-
console.log(`✅ Setting CLOUDFLARE_ACCOUNT_ID to ${accountId} (from wrangler cache)`);
|
|
190
|
-
return;
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
catch (parseError) {
|
|
194
|
-
log("Failed to parse wrangler account cache: %O", parseError);
|
|
195
|
-
// Continue to other methods if cache parsing fails
|
|
196
|
-
}
|
|
197
|
-
}
|
|
198
|
-
else {
|
|
199
|
-
log("Wrangler account cache not found at: %s", accountCachePath);
|
|
200
|
-
}
|
|
201
|
-
// If we get here, we couldn't find the account ID in the cache
|
|
202
|
-
// Give clear guidance to the user
|
|
203
|
-
console.log("⚠️ Could not find Cloudflare account ID");
|
|
204
|
-
console.log("Please either:");
|
|
205
|
-
console.log(" 1. Run 'npx wrangler login' to authenticate with Cloudflare");
|
|
206
|
-
console.log(" 2. Set CLOUDFLARE_ACCOUNT_ID and CLOUDFLARE_API_TOKEN environment variables");
|
|
207
|
-
// Try wrangler whoami as a final attempt
|
|
208
|
-
console.log("\nAttempting to get account info from wrangler...");
|
|
209
|
-
const result = await $({
|
|
210
|
-
cwd: projectDir,
|
|
211
|
-
stdio: "pipe",
|
|
212
|
-
}) `npx wrangler whoami`;
|
|
213
|
-
// First try regex pattern matching on the text output
|
|
214
|
-
if (result.stdout) {
|
|
215
|
-
const accountIdMatch = result.stdout.match(/Account ID: ([a-f0-9]{32})/);
|
|
216
|
-
if (accountIdMatch && accountIdMatch[1]) {
|
|
217
|
-
const accountId = accountIdMatch[1];
|
|
218
|
-
process.env.CLOUDFLARE_ACCOUNT_ID = accountId;
|
|
219
|
-
log("Extracted CLOUDFLARE_ACCOUNT_ID from whoami text: %s", accountId);
|
|
220
|
-
console.log(`✅ Setting CLOUDFLARE_ACCOUNT_ID to ${accountId} (from wrangler whoami)`);
|
|
221
|
-
return;
|
|
222
|
-
}
|
|
223
|
-
}
|
|
224
|
-
// Fallback: try to extract any JSON that might be in the output
|
|
225
|
-
const accountInfo = extractLastJson(result.stdout);
|
|
226
|
-
if (accountInfo && accountInfo.account && accountInfo.account.id) {
|
|
227
|
-
const accountId = accountInfo.account.id;
|
|
228
|
-
process.env.CLOUDFLARE_ACCOUNT_ID = accountId;
|
|
229
|
-
log("Extracted CLOUDFLARE_ACCOUNT_ID from whoami JSON: %s", accountId);
|
|
230
|
-
console.log(`✅ Setting CLOUDFLARE_ACCOUNT_ID to ${accountId} (from wrangler whoami)`);
|
|
231
|
-
return;
|
|
232
|
-
}
|
|
233
|
-
// If we get here, we've exhausted all options
|
|
234
|
-
throw new Error("Could not find Cloudflare account ID. Please login with 'npx wrangler login' or set CLOUDFLARE_ACCOUNT_ID manually.");
|
|
235
|
-
}
|
|
236
|
-
catch (error) {
|
|
237
|
-
log("Error during account ID detection: %O", error);
|
|
238
|
-
throw error;
|
|
239
|
-
}
|
|
240
|
-
}
|
|
4
|
+
import { runRelease as runE2ERelease, deleteWorker, deleteD1Database, isRelatedToTest, $expect, listD1Databases, } from "../../lib/e2e/release.mjs";
|
|
5
|
+
export { deleteWorker, deleteD1Database, isRelatedToTest, $expect, listD1Databases, };
|
|
241
6
|
/**
|
|
242
7
|
* Run the release command to deploy to Cloudflare
|
|
243
8
|
*/
|
|
244
9
|
export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
245
|
-
|
|
246
|
-
console.log("\n🚀 Deploying worker to Cloudflare...");
|
|
247
|
-
try {
|
|
248
|
-
// Make sure we have an account ID
|
|
249
|
-
await ensureCloudflareAccountId(cwd, projectDir);
|
|
250
|
-
// Extract worker name from directory name to ensure consistency
|
|
251
|
-
const dirName = cwd ? basename(cwd) : "unknown-worker";
|
|
252
|
-
// Ensure resource unique key is included in worker name for tracking
|
|
253
|
-
if (resourceUniqueKey && !dirName.includes(resourceUniqueKey)) {
|
|
254
|
-
log(`Worker name doesn't contain our unique key, this is unexpected: ${dirName}, key: ${resourceUniqueKey}`);
|
|
255
|
-
console.log(`⚠️ Worker name doesn't contain our unique key. This might cause cleanup issues.`);
|
|
256
|
-
}
|
|
257
|
-
// Ensure the worker name in wrangler.jsonc matches our unique name
|
|
258
|
-
if (cwd) {
|
|
259
|
-
try {
|
|
260
|
-
const wranglerPath = join(cwd, "wrangler.jsonc");
|
|
261
|
-
if (await pathExists(wranglerPath)) {
|
|
262
|
-
log("Updating wrangler.jsonc to use our unique worker name: %s", dirName);
|
|
263
|
-
// Read the wrangler config - handle both jsonc and json formats
|
|
264
|
-
const wranglerContent = await fs.readFile(wranglerPath, "utf-8");
|
|
265
|
-
// Use parseJsonc which handles comments and is more tolerant
|
|
266
|
-
let wranglerConfig;
|
|
267
|
-
try {
|
|
268
|
-
wranglerConfig = parseJsonc(wranglerContent);
|
|
269
|
-
}
|
|
270
|
-
catch (parseError) {
|
|
271
|
-
// Fallback to standard JSON if jsonc parsing fails
|
|
272
|
-
log("JSONC parsing failed, trying standard JSON: %O", parseError);
|
|
273
|
-
wranglerConfig = JSON.parse(wranglerContent);
|
|
274
|
-
}
|
|
275
|
-
// Update the name
|
|
276
|
-
if (wranglerConfig.name !== dirName) {
|
|
277
|
-
wranglerConfig.name = dirName;
|
|
278
|
-
await fs.writeFile(wranglerPath, JSON.stringify(wranglerConfig, null, 2));
|
|
279
|
-
log("Updated wrangler.jsonc with unique worker name: %s", dirName);
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
catch (error) {
|
|
284
|
-
log("Error updating wrangler.jsonc: %O", error);
|
|
285
|
-
console.error(`Warning: Could not update wrangler.jsonc: ${error}`);
|
|
286
|
-
}
|
|
287
|
-
}
|
|
288
|
-
// Run release command with our interactive $expect utility and retry logic
|
|
289
|
-
log("Running release command with interactive prompts and retries");
|
|
290
|
-
const MAX_RETRIES = 3;
|
|
291
|
-
let lastError = null;
|
|
292
|
-
let result = null;
|
|
293
|
-
for (let i = 0; i < MAX_RETRIES; i++) {
|
|
294
|
-
try {
|
|
295
|
-
console.log(`\n🚀 Deploying worker to Cloudflare (Attempt ${i + 1}/${MAX_RETRIES})...`);
|
|
296
|
-
result = await $expect("npm run release", [
|
|
297
|
-
{
|
|
298
|
-
// Make the pattern more flexible to account for potential whitespace differences
|
|
299
|
-
expect: /Do you want to proceed with deployment\?\s*\(y\/N\)/i,
|
|
300
|
-
send: "y\r",
|
|
301
|
-
},
|
|
302
|
-
], {
|
|
303
|
-
reject: false, // Add reject: false to prevent uncaught promise rejections
|
|
304
|
-
env: {
|
|
305
|
-
RWSDK_RENAME_WORKER: "1",
|
|
306
|
-
RWSDK_RENAME_DB: "1",
|
|
307
|
-
...process.env,
|
|
308
|
-
},
|
|
309
|
-
cwd,
|
|
310
|
-
});
|
|
311
|
-
// Check exit code to ensure command succeeded
|
|
312
|
-
if (result.code === 0) {
|
|
313
|
-
log(`Release command succeeded on attempt ${i + 1}`);
|
|
314
|
-
lastError = null; // Clear last error on success
|
|
315
|
-
break; // Exit the loop on success
|
|
316
|
-
}
|
|
317
|
-
else {
|
|
318
|
-
throw new Error(`Release command failed with exit code ${result.code}`);
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
catch (error) {
|
|
322
|
-
lastError = error;
|
|
323
|
-
log(`Attempt ${i + 1} failed: ${lastError.message}`);
|
|
324
|
-
if (i < MAX_RETRIES - 1) {
|
|
325
|
-
console.log(` Waiting 5 seconds before retrying...`);
|
|
326
|
-
await setTimeout(5000);
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
}
|
|
330
|
-
if (lastError || !result) {
|
|
331
|
-
log("ERROR: Release command failed after all retries.");
|
|
332
|
-
throw lastError || new Error("Release command failed after all retries.");
|
|
333
|
-
}
|
|
334
|
-
// Check exit code to ensure command succeeded
|
|
335
|
-
if (result.code !== 0) {
|
|
336
|
-
// Add more contextual information about the error
|
|
337
|
-
let errorMessage = `Release command failed with exit code ${result.code}`;
|
|
338
|
-
// Add stderr output to the error message if available
|
|
339
|
-
if (result.stderr && result.stderr.trim().length > 0) {
|
|
340
|
-
// Extract the most relevant part of the error message
|
|
341
|
-
const errorLines = result.stderr
|
|
342
|
-
.split("\n")
|
|
343
|
-
.filter((line) => line.includes("ERROR") ||
|
|
344
|
-
line.includes("error:") ||
|
|
345
|
-
line.includes("failed"))
|
|
346
|
-
.slice(0, 3) // Take just the first few error lines
|
|
347
|
-
.join("\n");
|
|
348
|
-
if (errorLines) {
|
|
349
|
-
errorMessage += `\nError details: ${errorLines}`;
|
|
350
|
-
}
|
|
351
|
-
}
|
|
352
|
-
log("ERROR: %s", errorMessage);
|
|
353
|
-
throw new Error(errorMessage);
|
|
354
|
-
}
|
|
355
|
-
const stdout = result.stdout;
|
|
356
|
-
// Extract deployment URL from output
|
|
357
|
-
log("Extracting deployment URL from output");
|
|
358
|
-
const urlMatch = stdout.match(/https:\/\/([a-zA-Z0-9-]+)\.redwoodjs\.workers\.dev/);
|
|
359
|
-
if (!urlMatch || !urlMatch[0]) {
|
|
360
|
-
log("ERROR: Could not extract deployment URL from release output");
|
|
361
|
-
// Log more details about the output for debugging
|
|
362
|
-
log("Release command stdout: %s", stdout);
|
|
363
|
-
if (result.stderr) {
|
|
364
|
-
log("Release command stderr: %s", result.stderr);
|
|
365
|
-
}
|
|
366
|
-
throw new Error("Could not extract deployment URL from release output");
|
|
367
|
-
}
|
|
368
|
-
const url = urlMatch[0];
|
|
369
|
-
const workerName = urlMatch[1];
|
|
370
|
-
log("Successfully deployed to %s (worker: %s)", url, workerName);
|
|
371
|
-
console.log(`✅ Successfully deployed to ${url}`);
|
|
372
|
-
return { url, workerName };
|
|
373
|
-
}
|
|
374
|
-
catch (error) {
|
|
375
|
-
log("ERROR: Failed to run release command: %O", error);
|
|
376
|
-
throw error;
|
|
377
|
-
}
|
|
10
|
+
return runE2ERelease(cwd, projectDir, resourceUniqueKey);
|
|
378
11
|
}
|
|
379
12
|
/**
|
|
380
13
|
* Runs tests against the production deployment
|
|
@@ -416,136 +49,3 @@ export async function runReleaseTest(artifactDir, resources, browserPath, headle
|
|
|
416
49
|
throw error;
|
|
417
50
|
}
|
|
418
51
|
}
|
|
419
|
-
/**
|
|
420
|
-
* Check if a resource name includes a specific resource unique key
|
|
421
|
-
* This is used to identify resources created during our tests
|
|
422
|
-
*/
|
|
423
|
-
export function isRelatedToTest(resourceName, resourceUniqueKey) {
|
|
424
|
-
return resourceName.includes(resourceUniqueKey);
|
|
425
|
-
}
|
|
426
|
-
/**
|
|
427
|
-
* Delete the worker using wrangler
|
|
428
|
-
*/
|
|
429
|
-
export async function deleteWorker(name, cwd, resourceUniqueKey) {
|
|
430
|
-
console.log(`Cleaning up: Deleting worker ${name}...`);
|
|
431
|
-
// Safety check: if we have a resourceUniqueKey, verify this worker name contains it
|
|
432
|
-
if (resourceUniqueKey && !isRelatedToTest(name, resourceUniqueKey)) {
|
|
433
|
-
log(`Worker ${name} does not contain unique key ${resourceUniqueKey}, not deleting for safety`);
|
|
434
|
-
console.log(`⚠️ Worker ${name} does not seem to be created by this test, skipping deletion for safety`);
|
|
435
|
-
return;
|
|
436
|
-
}
|
|
437
|
-
try {
|
|
438
|
-
// Use our $expect utility to handle any confirmation prompts
|
|
439
|
-
log("Running wrangler delete command with interactive prompts");
|
|
440
|
-
await $expect(`npx wrangler delete ${name}`, [
|
|
441
|
-
{
|
|
442
|
-
expect: "Are you sure you want to delete",
|
|
443
|
-
send: "y\r",
|
|
444
|
-
},
|
|
445
|
-
], {
|
|
446
|
-
cwd,
|
|
447
|
-
});
|
|
448
|
-
console.log(`✅ Worker ${name} deleted successfully`);
|
|
449
|
-
}
|
|
450
|
-
catch (error) {
|
|
451
|
-
console.error(`Failed to delete worker ${name}: ${error}`);
|
|
452
|
-
// Retry with force flag if the first attempt failed
|
|
453
|
-
try {
|
|
454
|
-
console.log("Retrying with force flag...");
|
|
455
|
-
await $expect(`npx wrangler delete ${name} --yes --force`, [
|
|
456
|
-
{
|
|
457
|
-
expect: "Are you sure you want to delete",
|
|
458
|
-
send: "y\r",
|
|
459
|
-
},
|
|
460
|
-
], {
|
|
461
|
-
cwd,
|
|
462
|
-
});
|
|
463
|
-
console.log(`✅ Worker ${name} force deleted successfully`);
|
|
464
|
-
}
|
|
465
|
-
catch (retryError) {
|
|
466
|
-
console.error(`Failed to force delete worker ${name}: ${retryError}`);
|
|
467
|
-
}
|
|
468
|
-
}
|
|
469
|
-
}
|
|
470
|
-
/**
|
|
471
|
-
* List D1 databases using wrangler
|
|
472
|
-
*/
|
|
473
|
-
export async function listD1Databases(cwd) {
|
|
474
|
-
log("Listing D1 databases");
|
|
475
|
-
try {
|
|
476
|
-
const result = await $({
|
|
477
|
-
cwd,
|
|
478
|
-
stdio: "pipe",
|
|
479
|
-
}) `npx wrangler d1 list --json`;
|
|
480
|
-
// Parse the JSON output to extract the last valid JSON
|
|
481
|
-
const data = parseJson(result.stdout, []);
|
|
482
|
-
if (Array.isArray(data)) {
|
|
483
|
-
log("Found %d D1 databases in parsed array", data.length);
|
|
484
|
-
return data;
|
|
485
|
-
}
|
|
486
|
-
else if (data.databases && Array.isArray(data.databases)) {
|
|
487
|
-
log("Found %d D1 databases in 'databases' property", data.databases.length);
|
|
488
|
-
return data.databases;
|
|
489
|
-
}
|
|
490
|
-
// If nothing worked, return an empty array
|
|
491
|
-
log("Could not parse JSON from output, returning empty array");
|
|
492
|
-
return [];
|
|
493
|
-
}
|
|
494
|
-
catch (error) {
|
|
495
|
-
log("Error listing D1 databases: %O", error);
|
|
496
|
-
console.error(`Failed to list D1 databases: ${error}`);
|
|
497
|
-
return [];
|
|
498
|
-
}
|
|
499
|
-
}
|
|
500
|
-
/**
|
|
501
|
-
* Delete a D1 database using wrangler
|
|
502
|
-
*/
|
|
503
|
-
export async function deleteD1Database(name, cwd, resourceUniqueKey) {
|
|
504
|
-
console.log(`Cleaning up: Deleting D1 database ${name}...`);
|
|
505
|
-
try {
|
|
506
|
-
// First check if the database exists
|
|
507
|
-
const databases = await listD1Databases(cwd);
|
|
508
|
-
const exists = databases.some((db) => db.name === name);
|
|
509
|
-
if (!exists) {
|
|
510
|
-
log(`D1 database ${name} not found, skipping deletion`);
|
|
511
|
-
console.log(`⚠️ D1 database ${name} not found, skipping deletion`);
|
|
512
|
-
return;
|
|
513
|
-
}
|
|
514
|
-
// Extra safety check: if we have a resourceUniqueKey, verify this database is related to our test
|
|
515
|
-
if (resourceUniqueKey && !isRelatedToTest(name, resourceUniqueKey)) {
|
|
516
|
-
log(`D1 database ${name} does not contain unique key ${resourceUniqueKey}, not deleting for safety`);
|
|
517
|
-
console.log(`⚠️ D1 database ${name} does not seem to be created by this test, skipping deletion for safety`);
|
|
518
|
-
return;
|
|
519
|
-
}
|
|
520
|
-
// Use our $expect utility to handle any confirmation prompts
|
|
521
|
-
log("Running wrangler d1 delete command with interactive prompts");
|
|
522
|
-
await $expect(`npx wrangler d1 delete ${name}`, [
|
|
523
|
-
{
|
|
524
|
-
expect: "Are you sure you want to delete",
|
|
525
|
-
send: "y\r",
|
|
526
|
-
},
|
|
527
|
-
], {
|
|
528
|
-
cwd,
|
|
529
|
-
});
|
|
530
|
-
console.log(`✅ D1 database ${name} deleted successfully`);
|
|
531
|
-
}
|
|
532
|
-
catch (error) {
|
|
533
|
-
console.error(`Failed to delete D1 database ${name}: ${error}`);
|
|
534
|
-
// Retry with force flag if the first attempt failed
|
|
535
|
-
try {
|
|
536
|
-
console.log("Retrying with force flag...");
|
|
537
|
-
await $expect(`npx wrangler d1 delete ${name} --yes --force`, [
|
|
538
|
-
{
|
|
539
|
-
expect: "Are you sure you want to delete",
|
|
540
|
-
send: "y\r",
|
|
541
|
-
},
|
|
542
|
-
], {
|
|
543
|
-
cwd,
|
|
544
|
-
});
|
|
545
|
-
console.log(`✅ D1 database ${name} force deleted successfully`);
|
|
546
|
-
}
|
|
547
|
-
catch (retryError) {
|
|
548
|
-
console.error(`Failed to force delete D1 database ${name}: ${retryError}`);
|
|
549
|
-
}
|
|
550
|
-
}
|
|
551
|
-
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Injects HTML content from one stream into another stream at a specified marker.
|
|
3
|
+
* This preserves streaming behavior by processing chunks incrementally without
|
|
4
|
+
* buffering the entire streams.
|
|
5
|
+
*
|
|
6
|
+
* @param outerHtml - The outer HTML stream containing the marker
|
|
7
|
+
* @param innerHtml - The inner HTML stream to inject at the marker
|
|
8
|
+
* @param marker - The text marker where injection should occur
|
|
9
|
+
* @returns A new ReadableStream with the inner HTML injected at the marker
|
|
10
|
+
*/
|
|
11
|
+
export declare function injectHtmlAtMarker(outerHtml: ReadableStream<Uint8Array>, innerHtml: ReadableStream<Uint8Array>, marker: string): ReadableStream<Uint8Array>;
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Injects HTML content from one stream into another stream at a specified marker.
|
|
3
|
+
* This preserves streaming behavior by processing chunks incrementally without
|
|
4
|
+
* buffering the entire streams.
|
|
5
|
+
*
|
|
6
|
+
* @param outerHtml - The outer HTML stream containing the marker
|
|
7
|
+
* @param innerHtml - The inner HTML stream to inject at the marker
|
|
8
|
+
* @param marker - The text marker where injection should occur
|
|
9
|
+
* @returns A new ReadableStream with the inner HTML injected at the marker
|
|
10
|
+
*/
|
|
11
|
+
export function injectHtmlAtMarker(outerHtml, innerHtml, marker) {
|
|
12
|
+
const decoder = new TextDecoder();
|
|
13
|
+
const encoder = new TextEncoder();
|
|
14
|
+
let buffer = "";
|
|
15
|
+
let injected = false;
|
|
16
|
+
return new ReadableStream({
|
|
17
|
+
async start(controller) {
|
|
18
|
+
const outerReader = outerHtml.getReader();
|
|
19
|
+
const flushText = (text) => {
|
|
20
|
+
if (text.length > 0) {
|
|
21
|
+
controller.enqueue(encoder.encode(text));
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
const pumpInnerStream = async () => {
|
|
25
|
+
const innerReader = innerHtml.getReader();
|
|
26
|
+
try {
|
|
27
|
+
while (true) {
|
|
28
|
+
const { done, value } = await innerReader.read();
|
|
29
|
+
if (done) {
|
|
30
|
+
break;
|
|
31
|
+
}
|
|
32
|
+
controller.enqueue(value);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
finally {
|
|
36
|
+
innerReader.releaseLock();
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
try {
|
|
40
|
+
while (true) {
|
|
41
|
+
const { done, value } = await outerReader.read();
|
|
42
|
+
if (done) {
|
|
43
|
+
// End of outer stream - flush any remaining buffer
|
|
44
|
+
if (buffer.length > 0) {
|
|
45
|
+
flushText(buffer);
|
|
46
|
+
}
|
|
47
|
+
controller.close();
|
|
48
|
+
break;
|
|
49
|
+
}
|
|
50
|
+
// Decode the chunk and add to buffer
|
|
51
|
+
buffer += decoder.decode(value, { stream: true });
|
|
52
|
+
if (!injected) {
|
|
53
|
+
// Look for the marker in the buffer
|
|
54
|
+
const markerIndex = buffer.indexOf(marker);
|
|
55
|
+
if (markerIndex !== -1) {
|
|
56
|
+
// Found the marker - emit everything before it
|
|
57
|
+
flushText(buffer.slice(0, markerIndex));
|
|
58
|
+
// Inject the inner HTML stream
|
|
59
|
+
await pumpInnerStream();
|
|
60
|
+
// Keep everything after the marker for next iteration
|
|
61
|
+
buffer = buffer.slice(markerIndex + marker.length);
|
|
62
|
+
injected = true;
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
// Marker not found yet - flush all but potential partial marker
|
|
66
|
+
// Keep overlap to handle markers split across chunks
|
|
67
|
+
const overlap = Math.max(0, marker.length - 1);
|
|
68
|
+
const cutoff = Math.max(0, buffer.length - overlap);
|
|
69
|
+
if (cutoff > 0) {
|
|
70
|
+
flushText(buffer.slice(0, cutoff));
|
|
71
|
+
buffer = buffer.slice(cutoff);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
// Already injected - just pass through remaining content
|
|
77
|
+
flushText(buffer);
|
|
78
|
+
buffer = "";
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
controller.error(error);
|
|
84
|
+
}
|
|
85
|
+
finally {
|
|
86
|
+
outerReader.releaseLock();
|
|
87
|
+
}
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
import type { RealtimeDurableObject } from "./durableObject";
|
|
2
2
|
export { renderRealtimeClients } from "./renderRealtimeClients";
|
|
3
|
-
export declare const realtimeRoute: (getDurableObjectNamespace: (env: Cloudflare.Env) => DurableObjectNamespace<RealtimeDurableObject>) => import("../router").RouteDefinition<import("../../
|
|
3
|
+
export declare const realtimeRoute: (getDurableObjectNamespace: (env: Cloudflare.Env) => DurableObjectNamespace<RealtimeDurableObject>) => import("../router").RouteDefinition<import("../../worker").RequestInfo<any, import("../../worker").DefaultAppContext>>;
|