rwsdk 1.0.0-beta.5 → 1.0.0-beta.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/rw-scripts.mjs +13 -13
- package/dist/lib/constants.d.mts +1 -0
- package/dist/lib/constants.mjs +7 -4
- package/dist/lib/e2e/browser.mjs +6 -2
- package/dist/lib/e2e/constants.d.mts +4 -0
- package/dist/lib/e2e/constants.mjs +49 -12
- package/dist/lib/e2e/dev.mjs +49 -57
- package/dist/lib/e2e/environment.d.mts +2 -0
- package/dist/lib/e2e/environment.mjs +201 -64
- package/dist/lib/e2e/index.d.mts +2 -0
- package/dist/lib/e2e/index.mjs +2 -0
- package/dist/lib/e2e/poll.d.mts +1 -1
- package/dist/lib/e2e/release.d.mts +1 -0
- package/dist/lib/e2e/release.mjs +57 -52
- package/dist/lib/e2e/tarball.mjs +2 -34
- package/dist/lib/e2e/testHarness.d.mts +39 -3
- package/dist/lib/e2e/testHarness.mjs +239 -92
- package/dist/lib/e2e/utils.d.mts +1 -0
- package/dist/lib/e2e/utils.mjs +15 -0
- package/dist/lib/normalizeModulePath.mjs +1 -1
- package/dist/runtime/client/client.d.ts +64 -2
- package/dist/runtime/client/client.js +156 -15
- package/dist/runtime/client/navigation.d.ts +45 -0
- package/dist/runtime/client/navigation.js +68 -14
- package/dist/runtime/client/navigationCache.d.ts +68 -0
- package/dist/runtime/client/navigationCache.js +294 -0
- package/dist/runtime/client/navigationCache.test.js +469 -0
- package/dist/runtime/client/types.d.ts +26 -5
- package/dist/runtime/client/types.js +8 -1
- package/dist/runtime/entries/no-react-server-ssr-bridge.d.ts +0 -0
- package/dist/runtime/entries/no-react-server-ssr-bridge.js +2 -0
- package/dist/runtime/entries/no-react-server.js +3 -1
- package/dist/runtime/entries/react-server-only.js +1 -1
- package/dist/runtime/entries/router.d.ts +1 -0
- package/dist/runtime/entries/routerClient.d.ts +1 -0
- package/dist/runtime/entries/routerClient.js +1 -0
- package/dist/runtime/entries/worker.d.ts +4 -0
- package/dist/runtime/entries/worker.js +4 -0
- package/dist/runtime/imports/__mocks__/use-client-lookup.d.ts +6 -0
- package/dist/runtime/imports/__mocks__/use-client-lookup.js +6 -0
- package/dist/runtime/lib/db/SqliteDurableObject.d.ts +2 -2
- package/dist/runtime/lib/db/SqliteDurableObject.js +2 -2
- package/dist/runtime/lib/db/createDb.d.ts +1 -2
- package/dist/runtime/lib/db/createDb.js +4 -0
- package/dist/runtime/lib/db/typeInference/builders/alterTable.d.ts +13 -3
- package/dist/runtime/lib/db/typeInference/builders/columnDefinition.d.ts +35 -21
- package/dist/runtime/lib/db/typeInference/builders/createTable.d.ts +9 -2
- package/dist/runtime/lib/db/typeInference/database.d.ts +16 -2
- package/dist/runtime/lib/db/typeInference/typetests/alterTable.typetest.js +80 -5
- package/dist/runtime/lib/db/typeInference/typetests/createTable.typetest.js +104 -2
- package/dist/runtime/lib/db/typeInference/typetests/testUtils.d.ts +1 -0
- package/dist/runtime/lib/db/typeInference/utils.d.ts +59 -9
- package/dist/runtime/lib/links.d.ts +21 -7
- package/dist/runtime/lib/links.js +84 -26
- package/dist/runtime/lib/links.test.d.ts +1 -0
- package/dist/runtime/lib/links.test.js +20 -0
- package/dist/runtime/lib/manifest.d.ts +1 -1
- package/dist/runtime/lib/manifest.js +7 -4
- package/dist/runtime/lib/realtime/client.js +28 -6
- package/dist/runtime/lib/realtime/worker.d.ts +1 -1
- package/dist/runtime/lib/router.d.ts +154 -35
- package/dist/runtime/lib/router.js +491 -105
- package/dist/runtime/lib/router.test.js +611 -1
- package/dist/runtime/lib/stitchDocumentAndAppStreams.d.ts +66 -0
- package/dist/runtime/lib/stitchDocumentAndAppStreams.js +302 -35
- package/dist/runtime/lib/stitchDocumentAndAppStreams.test.d.ts +1 -0
- package/dist/runtime/lib/stitchDocumentAndAppStreams.test.js +418 -0
- package/dist/runtime/lib/{rwContext.d.ts → types.d.ts} +1 -0
- package/dist/runtime/lib/types.js +1 -0
- package/dist/runtime/register/client.d.ts +1 -1
- package/dist/runtime/register/client.js +10 -3
- package/dist/runtime/register/worker.js +13 -4
- package/dist/runtime/render/normalizeActionResult.js +8 -1
- package/dist/runtime/render/renderDocumentHtmlStream.d.ts +1 -1
- package/dist/runtime/render/renderToStream.d.ts +4 -2
- package/dist/runtime/render/renderToStream.js +53 -24
- package/dist/runtime/render/renderToString.d.ts +3 -6
- package/dist/runtime/requestInfo/types.d.ts +5 -1
- package/dist/runtime/requestInfo/utils.d.ts +9 -0
- package/dist/runtime/requestInfo/utils.js +45 -0
- package/dist/runtime/requestInfo/worker.d.ts +0 -1
- package/dist/runtime/requestInfo/worker.js +5 -11
- package/dist/runtime/script.d.ts +1 -3
- package/dist/runtime/script.js +1 -10
- package/dist/runtime/server.d.ts +52 -0
- package/dist/runtime/server.js +88 -0
- package/dist/runtime/state.d.ts +3 -0
- package/dist/runtime/state.js +13 -0
- package/dist/runtime/worker.d.ts +3 -1
- package/dist/runtime/worker.js +45 -2
- package/dist/scripts/debug-sync.mjs +18 -20
- package/dist/scripts/worker-run.d.mts +1 -1
- package/dist/scripts/worker-run.mjs +59 -113
- package/dist/use-synced-state/SyncedStateServer.d.mts +36 -0
- package/dist/use-synced-state/SyncedStateServer.mjs +196 -0
- package/dist/use-synced-state/__tests__/SyncStateServer.test.d.mts +1 -0
- package/dist/use-synced-state/__tests__/SyncStateServer.test.mjs +116 -0
- package/dist/use-synced-state/__tests__/useSyncState.test.d.ts +1 -0
- package/dist/use-synced-state/__tests__/useSyncState.test.js +115 -0
- package/dist/use-synced-state/__tests__/useSyncedState.test.d.ts +1 -0
- package/dist/use-synced-state/__tests__/useSyncedState.test.js +115 -0
- package/dist/use-synced-state/__tests__/worker.test.d.mts +1 -0
- package/dist/use-synced-state/__tests__/worker.test.mjs +70 -0
- package/dist/use-synced-state/client-core.d.ts +29 -0
- package/dist/use-synced-state/client-core.js +103 -0
- package/dist/use-synced-state/client.d.ts +3 -0
- package/dist/use-synced-state/client.js +4 -0
- package/dist/use-synced-state/constants.d.mts +1 -0
- package/dist/use-synced-state/constants.mjs +1 -0
- package/dist/use-synced-state/useSyncedState.d.ts +21 -0
- package/dist/use-synced-state/useSyncedState.js +64 -0
- package/dist/use-synced-state/worker.d.mts +14 -0
- package/dist/use-synced-state/worker.mjs +135 -0
- package/dist/vite/buildApp.mjs +34 -2
- package/dist/vite/cloudflarePreInitPlugin.d.mts +11 -0
- package/dist/vite/cloudflarePreInitPlugin.mjs +40 -0
- package/dist/vite/configPlugin.mjs +9 -14
- package/dist/vite/constants.d.mts +1 -0
- package/dist/vite/constants.mjs +1 -0
- package/dist/vite/createDirectiveLookupPlugin.mjs +10 -7
- package/dist/vite/devServerTimingPlugin.mjs +4 -0
- package/dist/vite/diagnosticAssetGraphPlugin.d.mts +4 -0
- package/dist/vite/diagnosticAssetGraphPlugin.mjs +41 -0
- package/dist/vite/directiveModulesDevPlugin.mjs +9 -1
- package/dist/vite/directivesPlugin.mjs +4 -4
- package/dist/vite/envResolvers.d.mts +11 -0
- package/dist/vite/envResolvers.mjs +20 -0
- package/dist/vite/getViteEsbuild.mjs +2 -1
- package/dist/vite/hmrStabilityPlugin.d.mts +2 -0
- package/dist/vite/hmrStabilityPlugin.mjs +73 -0
- package/dist/vite/injectVitePreamblePlugin.mjs +0 -4
- package/dist/vite/knownDepsResolverPlugin.d.mts +0 -6
- package/dist/vite/knownDepsResolverPlugin.mjs +25 -17
- package/dist/vite/linkerPlugin.d.mts +2 -1
- package/dist/vite/linkerPlugin.mjs +11 -3
- package/dist/vite/linkerPlugin.test.mjs +15 -0
- package/dist/vite/miniflareHMRPlugin.mjs +6 -38
- package/dist/vite/moveStaticAssetsPlugin.mjs +35 -4
- package/dist/vite/redwoodPlugin.mjs +9 -11
- package/dist/vite/redwoodPlugin.test.mjs +4 -4
- package/dist/vite/runDirectivesScan.mjs +75 -19
- package/dist/vite/ssrBridgePlugin.mjs +132 -40
- package/dist/vite/ssrBridgeWrapPlugin.d.mts +2 -0
- package/dist/vite/ssrBridgeWrapPlugin.mjs +85 -0
- package/dist/vite/staleDepRetryPlugin.d.mts +2 -0
- package/dist/vite/staleDepRetryPlugin.mjs +74 -0
- package/dist/vite/statePlugin.d.mts +4 -0
- package/dist/vite/statePlugin.mjs +62 -0
- package/dist/vite/transformClientComponents.test.mjs +32 -0
- package/dist/vite/transformJsxScriptTagsPlugin.mjs +0 -5
- package/dist/vite/transformServerFunctions.mjs +66 -4
- package/dist/vite/transformServerFunctions.test.mjs +35 -0
- package/dist/vite/virtualPlugin.mjs +6 -7
- package/package.json +45 -20
- package/dist/vite/manifestPlugin.d.mts +0 -4
- package/dist/vite/manifestPlugin.mjs +0 -63
- /package/dist/runtime/{lib/rwContext.js → client/navigationCache.test.d.ts} +0 -0
|
@@ -1,19 +1,88 @@
|
|
|
1
|
+
import { createHash } from "crypto";
|
|
1
2
|
import debug from "debug";
|
|
2
3
|
import { copy, pathExists } from "fs-extra";
|
|
3
4
|
import ignore from "ignore";
|
|
4
5
|
import * as fs from "node:fs";
|
|
5
6
|
import path from "node:path";
|
|
6
|
-
import os from "os";
|
|
7
7
|
import { basename, join, relative, resolve } from "path";
|
|
8
8
|
import tmp from "tmp-promise";
|
|
9
9
|
import { $ } from "../../lib/$.mjs";
|
|
10
10
|
import { ROOT_DIR } from "../constants.mjs";
|
|
11
11
|
import { INSTALL_DEPENDENCIES_RETRIES } from "./constants.mjs";
|
|
12
12
|
import { retry } from "./retry.mjs";
|
|
13
|
+
import { ensureTmpDir } from "./utils.mjs";
|
|
13
14
|
const log = debug("rwsdk:e2e:environment");
|
|
15
|
+
const IS_CACHE_ENABLED = !process.env.RWSDK_E2E_CACHE_DISABLED;
|
|
16
|
+
if (IS_CACHE_ENABLED) {
|
|
17
|
+
log("E2E test caching is enabled.");
|
|
18
|
+
}
|
|
19
|
+
async function getProjectDependencyHash(projectDir) {
|
|
20
|
+
const hash = createHash("md5");
|
|
21
|
+
const dependencyFiles = [
|
|
22
|
+
"package.json",
|
|
23
|
+
"pnpm-lock.yaml",
|
|
24
|
+
"yarn.lock",
|
|
25
|
+
"package-lock.json",
|
|
26
|
+
];
|
|
27
|
+
for (const file of dependencyFiles) {
|
|
28
|
+
const filePath = path.join(projectDir, file);
|
|
29
|
+
if (await pathExists(filePath)) {
|
|
30
|
+
const data = await fs.promises.readFile(filePath);
|
|
31
|
+
hash.update(path.basename(filePath));
|
|
32
|
+
hash.update(data);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return hash.digest("hex");
|
|
36
|
+
}
|
|
37
|
+
export async function getFilesRecursively(directory) {
|
|
38
|
+
const entries = await fs.promises.readdir(directory, { withFileTypes: true });
|
|
39
|
+
const files = await Promise.all(entries.map((entry) => {
|
|
40
|
+
const fullPath = path.join(directory, entry.name);
|
|
41
|
+
return entry.isDirectory() ? getFilesRecursively(fullPath) : fullPath;
|
|
42
|
+
}));
|
|
43
|
+
return files.flat();
|
|
44
|
+
}
|
|
45
|
+
export async function getDirectoryHash(directory) {
|
|
46
|
+
const hash = createHash("md5");
|
|
47
|
+
if (!(await pathExists(directory))) {
|
|
48
|
+
return "";
|
|
49
|
+
}
|
|
50
|
+
const files = await getFilesRecursively(directory);
|
|
51
|
+
files.sort();
|
|
52
|
+
for (const file of files) {
|
|
53
|
+
const relativePath = path.relative(directory, file);
|
|
54
|
+
const data = await fs.promises.readFile(file);
|
|
55
|
+
hash.update(relativePath.replace(/\\/g, "/")); // Normalize path separators
|
|
56
|
+
hash.update(data);
|
|
57
|
+
}
|
|
58
|
+
return hash.digest("hex");
|
|
59
|
+
}
|
|
14
60
|
const getTempDir = async () => {
|
|
15
|
-
|
|
61
|
+
const tmpDir = await ensureTmpDir();
|
|
62
|
+
const projectsTempDir = path.join(tmpDir, "e2e-projects");
|
|
63
|
+
await fs.promises.mkdir(projectsTempDir, { recursive: true });
|
|
64
|
+
const tempDir = await tmp.dir({
|
|
65
|
+
unsafeCleanup: true,
|
|
66
|
+
tmpdir: projectsTempDir,
|
|
67
|
+
});
|
|
68
|
+
// context(justinvdm, 2 Nov 2025): On Windows CI, tmp.dir() can return a
|
|
69
|
+
// short path (e.g., RUNNER~1). Vite's internals may later resolve this to a
|
|
70
|
+
// long path (e.g., runneradmin), causing alias resolution to fail due to
|
|
71
|
+
// path mismatch. Using realpathSync ensures we always use the canonical
|
|
72
|
+
// path, avoiding this inconsistency.
|
|
73
|
+
if (process.platform === "win32") {
|
|
74
|
+
tempDir.path = fs.realpathSync.native(tempDir.path);
|
|
75
|
+
}
|
|
76
|
+
await fs.promises.mkdir(tempDir.path, { recursive: true });
|
|
77
|
+
return tempDir;
|
|
16
78
|
};
|
|
79
|
+
function slugify(str) {
|
|
80
|
+
return str
|
|
81
|
+
.toLowerCase()
|
|
82
|
+
.replace(/[^a-z0-9-]/g, "-")
|
|
83
|
+
.replace(/--+/g, "-")
|
|
84
|
+
.replace(/^-|-$/g, "");
|
|
85
|
+
}
|
|
17
86
|
const createSdkTarball = async () => {
|
|
18
87
|
const existingTarballPath = process.env.RWSKD_SMOKE_TEST_TARBALL_PATH;
|
|
19
88
|
if (existingTarballPath) {
|
|
@@ -28,15 +97,32 @@ const createSdkTarball = async () => {
|
|
|
28
97
|
}, // No-op cleanup
|
|
29
98
|
};
|
|
30
99
|
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
100
|
+
// Create a temporary directory to receive the tarball, ensuring a stable path.
|
|
101
|
+
let tempDir = await fs.promises.mkdtemp(path.join(await ensureTmpDir(), "rwsdk-tarball-"));
|
|
102
|
+
// context(justinvdm, 2 Nov 2025): Normalize the temp dir on Windows
|
|
103
|
+
// to prevent short/long path mismatches.
|
|
104
|
+
if (process.platform === "win32") {
|
|
105
|
+
tempDir = fs.realpathSync.native(tempDir);
|
|
106
|
+
}
|
|
107
|
+
await $({
|
|
108
|
+
cwd: ROOT_DIR,
|
|
109
|
+
stdio: "pipe",
|
|
110
|
+
}) `npm pack --pack-destination=${tempDir}`;
|
|
111
|
+
// We need to determine the tarball's name, as it's version-dependent.
|
|
112
|
+
// Running `npm pack --dry-run` gives us the filename without creating a file.
|
|
113
|
+
const packDryRun = await $({
|
|
114
|
+
cwd: ROOT_DIR,
|
|
115
|
+
stdio: "pipe",
|
|
116
|
+
}) `npm pack --dry-run`;
|
|
117
|
+
const tarballName = packDryRun.stdout?.trim();
|
|
118
|
+
const tarballPath = path.join(tempDir, tarballName);
|
|
119
|
+
if (!fs.existsSync(tarballPath)) {
|
|
120
|
+
throw new Error(`Tarball was not created in the expected location: ${tarballPath}`);
|
|
121
|
+
}
|
|
122
|
+
log(`📦 Created tarball in stable temp location: ${tarballPath}`);
|
|
35
123
|
const cleanupTarball = async () => {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
await fs.promises.rm(tarballPath, { force: true });
|
|
39
|
-
}
|
|
124
|
+
log(`🧹 Cleaning up tarball directory: ${tempDir}`);
|
|
125
|
+
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
|
40
126
|
};
|
|
41
127
|
return { tarballPath, cleanupTarball };
|
|
42
128
|
};
|
|
@@ -58,8 +144,13 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
58
144
|
// Determine the source directory to copy from
|
|
59
145
|
const sourceDir = monorepoRoot || projectDir;
|
|
60
146
|
// Create unique project directory name
|
|
147
|
+
// Format: {projectName}-t-{hash} (kept under 54 chars for Cloudflare limit)
|
|
61
148
|
const originalDirName = basename(sourceDir);
|
|
62
|
-
const
|
|
149
|
+
const slugified = slugify(originalDirName);
|
|
150
|
+
// Truncate project name to leave room for "-t-" (3 chars) + hash (8 chars) = 11 chars
|
|
151
|
+
// Max project name: 54 - 11 = 43 chars
|
|
152
|
+
const truncatedProjectName = slugified.substring(0, 43);
|
|
153
|
+
const workerName = `${truncatedProjectName}-t-${resourceUniqueKey}`;
|
|
63
154
|
const tempCopyRoot = resolve(tempDir.path, workerName);
|
|
64
155
|
// If it's a monorepo, the targetDir for commands is a subdirectory
|
|
65
156
|
const targetDir = monorepoRoot
|
|
@@ -115,7 +206,9 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
115
206
|
const workspaces = rwsdkWs.workspaces;
|
|
116
207
|
if (packageManager === "pnpm") {
|
|
117
208
|
const pnpmWsPath = join(tempCopyRoot, "pnpm-workspace.yaml");
|
|
118
|
-
const pnpmWsConfig = `packages:\n${workspaces
|
|
209
|
+
const pnpmWsConfig = `packages:\n${workspaces
|
|
210
|
+
.map((w) => ` - '${w}'`)
|
|
211
|
+
.join("\n")}\n`;
|
|
119
212
|
await fs.promises.writeFile(pnpmWsPath, pnpmWsConfig);
|
|
120
213
|
log("Created pnpm-workspace.yaml");
|
|
121
214
|
}
|
|
@@ -133,31 +226,32 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
133
226
|
log("⚙️ Configuring temp project to not use frozen lockfile...");
|
|
134
227
|
const npmrcPath = join(targetDir, ".npmrc");
|
|
135
228
|
await fs.promises.writeFile(npmrcPath, "frozen-lockfile=false\n");
|
|
229
|
+
const tmpDir = await ensureTmpDir();
|
|
136
230
|
if (packageManager === "yarn") {
|
|
137
231
|
const yarnrcPath = join(targetDir, ".yarnrc.yml");
|
|
138
|
-
const yarnCacheDir = path.join(
|
|
232
|
+
const yarnCacheDir = path.join(tmpDir, "yarn-cache");
|
|
139
233
|
await fs.promises.mkdir(yarnCacheDir, { recursive: true });
|
|
140
234
|
const yarnConfig = [
|
|
141
235
|
// todo(justinvdm, 23-09-23): Support yarn pnpm
|
|
142
236
|
"nodeLinker: node-modules",
|
|
143
237
|
"enableImmutableInstalls: false",
|
|
144
|
-
`cacheFolder: "${yarnCacheDir}"`,
|
|
238
|
+
`cacheFolder: "${yarnCacheDir.replace(/\\/g, "/")}"`,
|
|
145
239
|
].join("\n");
|
|
146
240
|
await fs.promises.writeFile(yarnrcPath, yarnConfig);
|
|
147
241
|
log("Created .yarnrc.yml to allow lockfile changes for yarn");
|
|
148
242
|
}
|
|
149
243
|
if (packageManager === "yarn-classic") {
|
|
150
244
|
const yarnrcPath = join(targetDir, ".yarnrc");
|
|
151
|
-
const yarnCacheDir = path.join(
|
|
245
|
+
const yarnCacheDir = path.join(tmpDir, "yarn-classic-cache");
|
|
152
246
|
await fs.promises.mkdir(yarnCacheDir, { recursive: true });
|
|
153
|
-
const yarnConfig = `cache-folder "${yarnCacheDir}"`;
|
|
247
|
+
const yarnConfig = `cache-folder "${yarnCacheDir.replace(/\\/g, "/")}"`;
|
|
154
248
|
await fs.promises.writeFile(yarnrcPath, yarnConfig);
|
|
155
249
|
log("Created .yarnrc with cache-folder for yarn-classic");
|
|
156
250
|
}
|
|
157
251
|
await setTarballDependency(targetDir, tarballFilename);
|
|
158
252
|
// Install dependencies in the target directory
|
|
159
253
|
const installDir = monorepoRoot ? tempCopyRoot : targetDir;
|
|
160
|
-
await retry(() => installDependencies(installDir, packageManager), {
|
|
254
|
+
await retry(() => installDependencies(installDir, packageManager, projectDir, monorepoRoot), {
|
|
161
255
|
retries: INSTALL_DEPENDENCIES_RETRIES,
|
|
162
256
|
delay: 1000,
|
|
163
257
|
});
|
|
@@ -168,9 +262,61 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
168
262
|
await cleanupTarball();
|
|
169
263
|
}
|
|
170
264
|
}
|
|
171
|
-
async function installDependencies(targetDir, packageManager = "pnpm") {
|
|
172
|
-
|
|
173
|
-
|
|
265
|
+
async function installDependencies(targetDir, packageManager = "pnpm", projectDir, monorepoRoot) {
|
|
266
|
+
let cacheRoot = null;
|
|
267
|
+
let nodeModulesCachePath = null;
|
|
268
|
+
if (IS_CACHE_ENABLED) {
|
|
269
|
+
const dependencyHash = await getProjectDependencyHash(monorepoRoot || projectDir);
|
|
270
|
+
const cacheDirName = monorepoRoot
|
|
271
|
+
? basename(monorepoRoot)
|
|
272
|
+
: basename(projectDir);
|
|
273
|
+
cacheRoot = path.join(await ensureTmpDir(), "rwsdk-e2e-cache", `${cacheDirName}-${dependencyHash.substring(0, 8)}`);
|
|
274
|
+
nodeModulesCachePath = path.join(cacheRoot, "node_modules");
|
|
275
|
+
if (await pathExists(nodeModulesCachePath)) {
|
|
276
|
+
console.log(`✅ CACHE HIT for dependencies: Found cached node_modules. Hard-linking from ${nodeModulesCachePath}`);
|
|
277
|
+
try {
|
|
278
|
+
const destNodeModules = join(targetDir, "node_modules");
|
|
279
|
+
if (process.platform === "win32") {
|
|
280
|
+
await copy(nodeModulesCachePath, destNodeModules);
|
|
281
|
+
}
|
|
282
|
+
else {
|
|
283
|
+
// On non-windows, use cp -al for performance
|
|
284
|
+
await $("cp", ["-al", nodeModulesCachePath, targetDir]);
|
|
285
|
+
}
|
|
286
|
+
console.log(`✅ Cache restored successfully.`);
|
|
287
|
+
console.log(`📦 Installing local SDK into cached node_modules...`);
|
|
288
|
+
await runInstall(targetDir, packageManager, true);
|
|
289
|
+
return;
|
|
290
|
+
}
|
|
291
|
+
catch (e) {
|
|
292
|
+
console.warn(`⚠️ Cache restore failed. Error: ${e.message}. Proceeding with clean install.`);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
else {
|
|
296
|
+
console.log(`ℹ️ CACHE MISS for dependencies: No cached node_modules found at ${nodeModulesCachePath}. Proceeding with clean installation.`);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
await runInstall(targetDir, packageManager, false);
|
|
300
|
+
if (IS_CACHE_ENABLED && nodeModulesCachePath) {
|
|
301
|
+
console.log(`Caching node_modules to ${nodeModulesCachePath} for future runs...`);
|
|
302
|
+
await fs.promises.mkdir(path.dirname(nodeModulesCachePath), {
|
|
303
|
+
recursive: true,
|
|
304
|
+
});
|
|
305
|
+
if (process.platform === "win32") {
|
|
306
|
+
await copy(join(targetDir, "node_modules"), nodeModulesCachePath);
|
|
307
|
+
}
|
|
308
|
+
else {
|
|
309
|
+
await $("cp", [
|
|
310
|
+
"-al",
|
|
311
|
+
join(targetDir, "node_modules"),
|
|
312
|
+
nodeModulesCachePath,
|
|
313
|
+
]);
|
|
314
|
+
}
|
|
315
|
+
console.log(`✅ node_modules cached successfully.`);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
async function runInstall(targetDir, packageManager, isCacheHit) {
|
|
319
|
+
if (!isCacheHit) {
|
|
174
320
|
// Clean up any pre-existing node_modules and lockfiles
|
|
175
321
|
log("Cleaning up pre-existing node_modules and lockfiles...");
|
|
176
322
|
await Promise.all([
|
|
@@ -183,51 +329,42 @@ async function installDependencies(targetDir, packageManager = "pnpm") {
|
|
|
183
329
|
fs.promises.rm(join(targetDir, "package-lock.json"), { force: true }),
|
|
184
330
|
]);
|
|
185
331
|
log("Cleanup complete.");
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
}
|
|
196
|
-
else if (packageManager === "yarn-classic") {
|
|
197
|
-
log(`Preparing yarn@1.22.19 with corepack...`);
|
|
198
|
-
await $("corepack", ["prepare", "yarn@1.x", "--activate"], {
|
|
199
|
-
cwd: targetDir,
|
|
200
|
-
stdio: "pipe",
|
|
201
|
-
});
|
|
202
|
-
}
|
|
332
|
+
}
|
|
333
|
+
if (packageManager.startsWith("yarn")) {
|
|
334
|
+
log(`Enabling corepack...`);
|
|
335
|
+
await $("corepack", ["enable"], { cwd: targetDir, stdio: "pipe" });
|
|
336
|
+
if (packageManager === "yarn") {
|
|
337
|
+
log(`Preparing yarn@stable with corepack...`);
|
|
338
|
+
await $("corepack", ["prepare", "yarn@stable", "--activate"], {
|
|
339
|
+
cwd: targetDir,
|
|
340
|
+
stdio: "pipe",
|
|
341
|
+
});
|
|
203
342
|
}
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
"yarn-classic": ["yarn"],
|
|
211
|
-
}[packageManager];
|
|
212
|
-
// Run install command in the target directory
|
|
213
|
-
log(`Running ${installCommand.join(" ")}`);
|
|
214
|
-
const [command, ...args] = installCommand;
|
|
215
|
-
const result = await $(command, args, {
|
|
216
|
-
cwd: targetDir,
|
|
217
|
-
stdio: "pipe", // Capture output
|
|
218
|
-
env: {
|
|
219
|
-
YARN_ENABLE_HARDENED_MODE: "0",
|
|
220
|
-
},
|
|
221
|
-
});
|
|
222
|
-
console.log("✅ Dependencies installed successfully");
|
|
223
|
-
// Log installation details at debug level
|
|
224
|
-
if (result.stdout) {
|
|
225
|
-
log(`${packageManager} install output: %s`, result.stdout);
|
|
343
|
+
else if (packageManager === "yarn-classic") {
|
|
344
|
+
log(`Preparing yarn@1.22.19 with corepack...`);
|
|
345
|
+
await $("corepack", ["prepare", "yarn@1.x", "--activate"], {
|
|
346
|
+
cwd: targetDir,
|
|
347
|
+
stdio: "pipe",
|
|
348
|
+
});
|
|
226
349
|
}
|
|
227
350
|
}
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
351
|
+
const npmCacheDir = path.join(await ensureTmpDir(), "npm-cache");
|
|
352
|
+
await fs.promises.mkdir(npmCacheDir, { recursive: true });
|
|
353
|
+
const installCommand = {
|
|
354
|
+
pnpm: ["pnpm", "install", "--reporter=silent"],
|
|
355
|
+
npm: ["npm", "install", "--cache", npmCacheDir, "--silent"],
|
|
356
|
+
yarn: ["yarn", "install", "--silent"],
|
|
357
|
+
"yarn-classic": ["yarn", "--silent"],
|
|
358
|
+
}[packageManager];
|
|
359
|
+
// Run install command in the target directory
|
|
360
|
+
log(`Running ${installCommand.join(" ")}`);
|
|
361
|
+
const [command, ...args] = installCommand;
|
|
362
|
+
await $(command, args, {
|
|
363
|
+
cwd: targetDir,
|
|
364
|
+
stdio: "pipe",
|
|
365
|
+
env: {
|
|
366
|
+
YARN_ENABLE_HARDENED_MODE: "0",
|
|
367
|
+
},
|
|
368
|
+
});
|
|
369
|
+
console.log("✅ Dependencies installed successfully");
|
|
233
370
|
}
|
package/dist/lib/e2e/index.d.mts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
export * from "../$.mjs";
|
|
1
2
|
export * from "./browser.mjs";
|
|
2
3
|
export * from "./dev.mjs";
|
|
3
4
|
export * from "./environment.mjs";
|
|
@@ -5,4 +6,5 @@ export * from "./poll.mjs";
|
|
|
5
6
|
export * from "./release.mjs";
|
|
6
7
|
export * from "./tarball.mjs";
|
|
7
8
|
export * from "./testHarness.mjs";
|
|
9
|
+
export { SKIP_DEPLOYMENT_TESTS, SKIP_DEV_SERVER_TESTS, } from "./testHarness.mjs";
|
|
8
10
|
export * from "./types.mjs";
|
package/dist/lib/e2e/index.mjs
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
export * from "../$.mjs";
|
|
1
2
|
export * from "./browser.mjs";
|
|
2
3
|
export * from "./dev.mjs";
|
|
3
4
|
export * from "./environment.mjs";
|
|
@@ -5,4 +6,5 @@ export * from "./poll.mjs";
|
|
|
5
6
|
export * from "./release.mjs";
|
|
6
7
|
export * from "./tarball.mjs";
|
|
7
8
|
export * from "./testHarness.mjs";
|
|
9
|
+
export { SKIP_DEPLOYMENT_TESTS, SKIP_DEV_SERVER_TESTS, } from "./testHarness.mjs";
|
|
8
10
|
export * from "./types.mjs";
|
package/dist/lib/e2e/poll.d.mts
CHANGED
|
@@ -4,5 +4,5 @@ export interface PollOptions {
|
|
|
4
4
|
minTries: number;
|
|
5
5
|
onRetry?: (error: unknown, tries: number) => void;
|
|
6
6
|
}
|
|
7
|
-
export declare function poll(fn: () => Promise<boolean>, options?: Partial<PollOptions>): Promise<void>;
|
|
7
|
+
export declare function poll(fn: () => boolean | Promise<boolean>, options?: Partial<PollOptions>): Promise<void>;
|
|
8
8
|
export declare function pollValue<T>(fn: () => Promise<T>, options?: Partial<PollOptions>): Promise<T>;
|
|
@@ -39,6 +39,7 @@ export declare function runRelease(cwd: string, projectDir: string, resourceUniq
|
|
|
39
39
|
/**
|
|
40
40
|
* Check if a resource name includes a specific resource unique key
|
|
41
41
|
* This is used to identify resources created during our tests
|
|
42
|
+
* Handles both full format (adjective-animal-hash) and hash-only format
|
|
42
43
|
*/
|
|
43
44
|
export declare function isRelatedToTest(resourceName: string, resourceUniqueKey: string): boolean;
|
|
44
45
|
/**
|
package/dist/lib/e2e/release.mjs
CHANGED
|
@@ -8,6 +8,7 @@ import { setTimeout } from "node:timers/promises";
|
|
|
8
8
|
import { basename, dirname, join, resolve } from "path";
|
|
9
9
|
import { $ } from "../../lib/$.mjs";
|
|
10
10
|
import { extractLastJson, parseJson } from "../../lib/jsonUtils.mjs";
|
|
11
|
+
import { IS_DEBUG_MODE } from "./constants.mjs";
|
|
11
12
|
const log = debug("rwsdk:e2e:release");
|
|
12
13
|
/**
|
|
13
14
|
* Find wrangler cache by searching up the directory tree for node_modules/.cache/wrangler
|
|
@@ -44,10 +45,6 @@ export async function $expect(command, expectations, options = {
|
|
|
44
45
|
reject: true,
|
|
45
46
|
}) {
|
|
46
47
|
return new Promise((resolve, reject) => {
|
|
47
|
-
log("$expect starting with command: %s", command);
|
|
48
|
-
log("Working directory: %s", options.cwd ?? process.cwd());
|
|
49
|
-
log("Expected patterns: %O", expectations.map((e) => e.expect.toString()));
|
|
50
|
-
console.log(`Running command: ${command}`);
|
|
51
48
|
// Spawn the process with pipes for interaction
|
|
52
49
|
const childProcess = execaCommand(command, {
|
|
53
50
|
cwd: options.cwd ?? process.cwd(),
|
|
@@ -55,7 +52,6 @@ export async function $expect(command, expectations, options = {
|
|
|
55
52
|
reject: false, // Never reject so we can handle the error ourselves
|
|
56
53
|
env: options.env ?? process.env,
|
|
57
54
|
});
|
|
58
|
-
log("Process spawned with PID: %s", childProcess.pid);
|
|
59
55
|
let stdout = "";
|
|
60
56
|
let stderr = "";
|
|
61
57
|
let buffer = "";
|
|
@@ -67,15 +63,16 @@ export async function $expect(command, expectations, options = {
|
|
|
67
63
|
// Initialize match count for each pattern
|
|
68
64
|
expectations.forEach(({ expect: expectPattern }) => {
|
|
69
65
|
matchHistory.set(expectPattern, 0);
|
|
70
|
-
log("Initialized pattern match count for: %s", expectPattern.toString());
|
|
71
66
|
});
|
|
72
67
|
// Collect stdout
|
|
73
68
|
childProcess.stdout?.on("data", (data) => {
|
|
74
69
|
const chunk = data.toString();
|
|
75
70
|
stdout += chunk;
|
|
76
71
|
buffer += chunk;
|
|
77
|
-
// Print to console
|
|
78
|
-
|
|
72
|
+
// Print to console in debug mode
|
|
73
|
+
if (IS_DEBUG_MODE) {
|
|
74
|
+
process.stdout.write(chunk);
|
|
75
|
+
}
|
|
79
76
|
// Only process expectations that haven't been fully matched yet
|
|
80
77
|
// and in the order they were provided
|
|
81
78
|
while (currentExpectationIndex < expectations.length) {
|
|
@@ -85,9 +82,6 @@ export async function $expect(command, expectations, options = {
|
|
|
85
82
|
: new RegExp(expectPattern, "m");
|
|
86
83
|
// Only search in the unmatched portion of the buffer
|
|
87
84
|
const searchBuffer = buffer.substring(lastMatchIndex);
|
|
88
|
-
log("Testing pattern: %s against buffer from position %d (%d chars)", pattern.toString(), lastMatchIndex, searchBuffer.length);
|
|
89
|
-
// Enhanced debugging: show actual search buffer content
|
|
90
|
-
log("Search buffer content for debugging: %O", searchBuffer);
|
|
91
85
|
const match = searchBuffer.match(pattern);
|
|
92
86
|
if (match) {
|
|
93
87
|
// Found a match
|
|
@@ -98,30 +92,21 @@ export async function $expect(command, expectations, options = {
|
|
|
98
92
|
const matchStartPosition = lastMatchIndex + match.index;
|
|
99
93
|
const matchEndPosition = matchStartPosition + match[0].length;
|
|
100
94
|
lastMatchIndex = matchEndPosition;
|
|
101
|
-
log(`Pattern matched: "${patternStr}" (occurrence #${matchCount + 1}) at position ${matchStartPosition}-${matchEndPosition}`);
|
|
102
|
-
// Only send a response if one is specified
|
|
103
95
|
if (send) {
|
|
104
|
-
log(`Sending response: "${send.replace(/\r/g, "\\r")}" to stdin`);
|
|
105
96
|
childProcess.stdin?.write(send);
|
|
106
97
|
}
|
|
107
|
-
else {
|
|
108
|
-
log(`Pattern "${patternStr}" matched (verification only)`);
|
|
109
|
-
}
|
|
110
98
|
// Increment the match count for this pattern
|
|
111
99
|
matchHistory.set(expectPattern, matchCount + 1);
|
|
112
|
-
log("Updated match count for %s: %d", patternStr, matchCount + 1);
|
|
113
100
|
// Move to the next expectation
|
|
114
101
|
currentExpectationIndex++;
|
|
115
102
|
// If we've processed all expectations but need to wait for stdin response,
|
|
116
103
|
// delay closing stdin until the next data event
|
|
117
104
|
if (currentExpectationIndex >= expectations.length && send) {
|
|
118
|
-
log("All patterns matched, closing stdin after last response");
|
|
119
105
|
childProcess.stdin?.end();
|
|
120
106
|
}
|
|
121
107
|
break; // Exit the while loop to process next chunk
|
|
122
108
|
}
|
|
123
109
|
else {
|
|
124
|
-
log("Pattern not matched. Attempting to diagnose the mismatch:");
|
|
125
110
|
// Try to find the closest substring that might partially match
|
|
126
111
|
const patternString = pattern.toString();
|
|
127
112
|
const patternCore = patternString.substring(1, patternString.lastIndexOf("/") > 0
|
|
@@ -132,7 +117,6 @@ export async function $expect(command, expectations, options = {
|
|
|
132
117
|
const partialPattern = patternCore.substring(0, i);
|
|
133
118
|
const partialRegex = new RegExp(partialPattern, "m");
|
|
134
119
|
const matches = partialRegex.test(searchBuffer);
|
|
135
|
-
log(" Partial pattern '%s': %s", partialPattern, matches ? "matched" : "not matched");
|
|
136
120
|
// Once we find where the matching starts to fail, stop
|
|
137
121
|
if (!matches)
|
|
138
122
|
break;
|
|
@@ -144,7 +128,6 @@ export async function $expect(command, expectations, options = {
|
|
|
144
128
|
// If all expectations have been matched, we can close stdin if not already closed
|
|
145
129
|
if (currentExpectationIndex >= expectations.length &&
|
|
146
130
|
childProcess.stdin?.writable) {
|
|
147
|
-
log("All patterns matched, ensuring stdin is closed");
|
|
148
131
|
childProcess.stdin.end();
|
|
149
132
|
}
|
|
150
133
|
});
|
|
@@ -153,26 +136,19 @@ export async function $expect(command, expectations, options = {
|
|
|
153
136
|
childProcess.stderr.on("data", (data) => {
|
|
154
137
|
const chunk = data.toString();
|
|
155
138
|
stderr += chunk;
|
|
156
|
-
// Also write stderr to console
|
|
157
|
-
|
|
139
|
+
// Also write stderr to console in debug mode
|
|
140
|
+
if (IS_DEBUG_MODE) {
|
|
141
|
+
process.stderr.write(chunk);
|
|
142
|
+
}
|
|
158
143
|
});
|
|
159
144
|
}
|
|
160
145
|
// Handle process completion
|
|
161
146
|
childProcess.on("close", (code) => {
|
|
162
147
|
log("Process closed with code: %s", code);
|
|
163
|
-
// Log the number of matches for each pattern
|
|
164
|
-
log("Pattern match summary:");
|
|
165
|
-
for (const [pattern, count] of matchHistory.entries()) {
|
|
166
|
-
log(` - "${pattern.toString()}": ${count} matches`);
|
|
167
|
-
}
|
|
168
148
|
// Check if any required patterns were not matched
|
|
169
149
|
const unmatchedPatterns = Array.from(matchHistory.entries())
|
|
170
150
|
.filter(([_, count]) => count === 0)
|
|
171
151
|
.map(([pattern, _]) => pattern.toString());
|
|
172
|
-
if (unmatchedPatterns.length > 0) {
|
|
173
|
-
log("WARNING: Some expected patterns were not matched: %O", unmatchedPatterns);
|
|
174
|
-
}
|
|
175
|
-
log("$expect completed. Total stdout: %d bytes, stderr: %d bytes", stdout.length, stderr.length);
|
|
176
152
|
resolve({ stdout, stderr, code });
|
|
177
153
|
});
|
|
178
154
|
childProcess.on("error", (err) => {
|
|
@@ -274,9 +250,15 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
274
250
|
await ensureCloudflareAccountId(cwd, projectDir);
|
|
275
251
|
// Extract worker name from directory name to ensure consistency
|
|
276
252
|
const dirName = cwd ? basename(cwd) : "unknown-worker";
|
|
253
|
+
// Extract hash part from resourceUniqueKey for matching
|
|
254
|
+
// resourceUniqueKey format is typically "adjective-animal-hash" or just "hash"
|
|
255
|
+
const hashPart = resourceUniqueKey.includes("-")
|
|
256
|
+
? resourceUniqueKey.split("-").pop() || resourceUniqueKey.substring(0, 8)
|
|
257
|
+
: resourceUniqueKey.substring(0, 8);
|
|
258
|
+
const uniqueKeyForMatching = hashPart.substring(0, 8);
|
|
277
259
|
// Ensure resource unique key is included in worker name for tracking
|
|
278
|
-
if (resourceUniqueKey && !dirName.includes(
|
|
279
|
-
log(`Worker name doesn't contain our unique key, this is unexpected: ${dirName}, key: ${
|
|
260
|
+
if (resourceUniqueKey && !dirName.includes(uniqueKeyForMatching)) {
|
|
261
|
+
log(`Worker name doesn't contain our unique key, this is unexpected: ${dirName}, key: ${uniqueKeyForMatching}`);
|
|
280
262
|
console.log(`⚠️ Worker name doesn't contain our unique key. This might cause cleanup issues.`);
|
|
281
263
|
}
|
|
282
264
|
// Ensure the worker name in wrangler.jsonc matches our unique name
|
|
@@ -315,6 +297,36 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
315
297
|
const MAX_RETRIES = 3;
|
|
316
298
|
let lastError = null;
|
|
317
299
|
let result = null;
|
|
300
|
+
const formatReleaseCommandFailure = (res) => {
|
|
301
|
+
let message = `Release command failed with exit code ${res.code}`;
|
|
302
|
+
const stdout = res.stdout?.trim() ? res.stdout : "";
|
|
303
|
+
const stderr = res.stderr?.trim() ? res.stderr : "";
|
|
304
|
+
if (stderr) {
|
|
305
|
+
const errorLines = stderr
|
|
306
|
+
.split("\n")
|
|
307
|
+
.filter((line) => line.includes("ERROR") ||
|
|
308
|
+
line.includes("error:") ||
|
|
309
|
+
line.includes("failed"))
|
|
310
|
+
.slice(0, 3)
|
|
311
|
+
.join("\n");
|
|
312
|
+
if (errorLines) {
|
|
313
|
+
message += `\nError details: ${errorLines}`;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
if (stdout) {
|
|
317
|
+
const stdoutTail = stdout.split("\n").slice(-40).join("\n").trim();
|
|
318
|
+
if (stdoutTail) {
|
|
319
|
+
message += `\n\nstdout (tail):\n${stdoutTail}`;
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
if (stderr) {
|
|
323
|
+
const stderrTail = stderr.split("\n").slice(-40).join("\n").trim();
|
|
324
|
+
if (stderrTail) {
|
|
325
|
+
message += `\n\nstderr (tail):\n${stderrTail}`;
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
return message;
|
|
329
|
+
};
|
|
318
330
|
for (let i = 0; i < MAX_RETRIES; i++) {
|
|
319
331
|
try {
|
|
320
332
|
console.log(`\n🚀 Deploying worker to Cloudflare (Attempt ${i + 1}/${MAX_RETRIES})...`);
|
|
@@ -341,7 +353,7 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
341
353
|
break; // Exit the loop on success
|
|
342
354
|
}
|
|
343
355
|
else {
|
|
344
|
-
throw new Error(
|
|
356
|
+
throw new Error(formatReleaseCommandFailure(result));
|
|
345
357
|
}
|
|
346
358
|
}
|
|
347
359
|
catch (error) {
|
|
@@ -360,21 +372,7 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
360
372
|
// Check exit code to ensure command succeeded
|
|
361
373
|
if (result.code !== 0) {
|
|
362
374
|
// Add more contextual information about the error
|
|
363
|
-
|
|
364
|
-
// Add stderr output to the error message if available
|
|
365
|
-
if (result.stderr && result.stderr.trim().length > 0) {
|
|
366
|
-
// Extract the most relevant part of the error message
|
|
367
|
-
const errorLines = result.stderr
|
|
368
|
-
.split("\n")
|
|
369
|
-
.filter((line) => line.includes("ERROR") ||
|
|
370
|
-
line.includes("error:") ||
|
|
371
|
-
line.includes("failed"))
|
|
372
|
-
.slice(0, 3) // Take just the first few error lines
|
|
373
|
-
.join("\n");
|
|
374
|
-
if (errorLines) {
|
|
375
|
-
errorMessage += `\nError details: ${errorLines}`;
|
|
376
|
-
}
|
|
377
|
-
}
|
|
375
|
+
const errorMessage = formatReleaseCommandFailure(result);
|
|
378
376
|
log("ERROR: %s", errorMessage);
|
|
379
377
|
throw new Error(errorMessage);
|
|
380
378
|
}
|
|
@@ -405,9 +403,16 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
405
403
|
/**
|
|
406
404
|
* Check if a resource name includes a specific resource unique key
|
|
407
405
|
* This is used to identify resources created during our tests
|
|
406
|
+
* Handles both full format (adjective-animal-hash) and hash-only format
|
|
408
407
|
*/
|
|
409
408
|
export function isRelatedToTest(resourceName, resourceUniqueKey) {
|
|
410
|
-
|
|
409
|
+
// Extract hash part if resourceUniqueKey contains dashes (full format)
|
|
410
|
+
// Otherwise use as-is (hash-only format)
|
|
411
|
+
const hashPart = resourceUniqueKey.includes("-")
|
|
412
|
+
? resourceUniqueKey.split("-").pop() || resourceUniqueKey.substring(0, 8)
|
|
413
|
+
: resourceUniqueKey;
|
|
414
|
+
const uniqueKeyForMatching = hashPart.substring(0, 8);
|
|
415
|
+
return resourceName.includes(uniqueKeyForMatching);
|
|
411
416
|
}
|
|
412
417
|
/**
|
|
413
418
|
* Delete the worker using wrangler
|