rwsdk 1.0.0-beta.3 → 1.0.0-beta.30-test.20251119220440
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/constants.d.mts +1 -0
- package/dist/lib/constants.mjs +7 -4
- package/dist/lib/e2e/browser.mjs +6 -2
- package/dist/lib/e2e/constants.d.mts +16 -0
- package/dist/lib/e2e/constants.mjs +77 -0
- package/dist/lib/e2e/dev.mjs +37 -49
- package/dist/lib/e2e/environment.d.mts +2 -0
- package/dist/lib/e2e/environment.mjs +202 -65
- package/dist/lib/e2e/index.d.mts +1 -0
- package/dist/lib/e2e/index.mjs +1 -0
- package/dist/lib/e2e/poll.d.mts +1 -1
- package/dist/lib/e2e/release.d.mts +1 -0
- package/dist/lib/e2e/release.mjs +16 -32
- package/dist/lib/e2e/tarball.mjs +2 -34
- package/dist/lib/e2e/testHarness.d.mts +36 -4
- package/dist/lib/e2e/testHarness.mjs +216 -128
- package/dist/lib/e2e/utils.d.mts +1 -0
- package/dist/lib/e2e/utils.mjs +15 -0
- package/dist/runtime/client/client.d.ts +35 -0
- package/dist/runtime/client/client.js +35 -0
- package/dist/runtime/client/navigation.d.ts +49 -0
- package/dist/runtime/client/navigation.js +80 -31
- package/dist/runtime/entries/clientSSR.d.ts +1 -0
- package/dist/runtime/entries/clientSSR.js +3 -0
- package/dist/runtime/entries/router.d.ts +1 -0
- package/dist/runtime/entries/routerClient.d.ts +1 -0
- package/dist/runtime/entries/routerClient.js +1 -0
- package/dist/runtime/entries/worker.d.ts +2 -0
- package/dist/runtime/entries/worker.js +2 -0
- package/dist/runtime/imports/__mocks__/use-client-lookup.d.ts +6 -0
- package/dist/runtime/imports/__mocks__/use-client-lookup.js +6 -0
- package/dist/runtime/lib/db/SqliteDurableObject.d.ts +2 -2
- package/dist/runtime/lib/db/SqliteDurableObject.js +2 -2
- package/dist/runtime/lib/db/createDb.d.ts +1 -2
- package/dist/runtime/lib/db/createDb.js +4 -0
- package/dist/runtime/lib/db/typeInference/builders/alterTable.d.ts +13 -3
- package/dist/runtime/lib/db/typeInference/builders/columnDefinition.d.ts +35 -21
- package/dist/runtime/lib/db/typeInference/builders/createTable.d.ts +9 -2
- package/dist/runtime/lib/db/typeInference/database.d.ts +16 -2
- package/dist/runtime/lib/db/typeInference/typetests/alterTable.typetest.js +80 -5
- package/dist/runtime/lib/db/typeInference/typetests/createTable.typetest.js +104 -2
- package/dist/runtime/lib/db/typeInference/typetests/testUtils.d.ts +1 -0
- package/dist/runtime/lib/db/typeInference/utils.d.ts +59 -9
- package/dist/runtime/lib/links.d.ts +18 -7
- package/dist/runtime/lib/links.js +70 -24
- package/dist/runtime/lib/links.test.js +20 -0
- package/dist/runtime/lib/manifest.d.ts +1 -1
- package/dist/runtime/lib/manifest.js +7 -4
- package/dist/runtime/lib/realtime/client.js +8 -2
- package/dist/runtime/lib/realtime/worker.d.ts +1 -1
- package/dist/runtime/lib/router.d.ts +147 -33
- package/dist/runtime/lib/router.js +169 -20
- package/dist/runtime/lib/router.test.js +241 -0
- package/dist/runtime/lib/stitchDocumentAndAppStreams.d.ts +66 -0
- package/dist/runtime/lib/stitchDocumentAndAppStreams.js +302 -35
- package/dist/runtime/lib/stitchDocumentAndAppStreams.test.d.ts +1 -0
- package/dist/runtime/lib/stitchDocumentAndAppStreams.test.js +418 -0
- package/dist/runtime/lib/{rwContext.d.ts → types.d.ts} +1 -0
- package/dist/runtime/lib/types.js +1 -0
- package/dist/runtime/render/renderDocumentHtmlStream.d.ts +1 -1
- package/dist/runtime/render/renderToStream.d.ts +4 -2
- package/dist/runtime/render/renderToStream.js +53 -24
- package/dist/runtime/render/renderToString.d.ts +3 -1
- package/dist/runtime/requestInfo/types.d.ts +4 -1
- package/dist/runtime/requestInfo/utils.d.ts +9 -0
- package/dist/runtime/requestInfo/utils.js +44 -0
- package/dist/runtime/requestInfo/worker.d.ts +0 -1
- package/dist/runtime/requestInfo/worker.js +3 -10
- package/dist/runtime/script.d.ts +1 -3
- package/dist/runtime/script.js +1 -10
- package/dist/runtime/state.d.ts +3 -0
- package/dist/runtime/state.js +13 -0
- package/dist/runtime/worker.d.ts +3 -1
- package/dist/runtime/worker.js +26 -0
- package/dist/scripts/debug-sync.mjs +18 -20
- package/dist/scripts/worker-run.d.mts +1 -1
- package/dist/scripts/worker-run.mjs +52 -113
- package/dist/use-synced-state/SyncStateServer.d.mts +20 -0
- package/dist/use-synced-state/SyncStateServer.mjs +124 -0
- package/dist/use-synced-state/__tests__/SyncStateServer.test.d.mts +1 -0
- package/dist/use-synced-state/__tests__/SyncStateServer.test.mjs +109 -0
- package/dist/use-synced-state/__tests__/useSyncState.test.d.ts +1 -0
- package/dist/use-synced-state/__tests__/useSyncState.test.js +115 -0
- package/dist/use-synced-state/__tests__/useSyncedState.test.d.ts +1 -0
- package/dist/use-synced-state/__tests__/useSyncedState.test.js +115 -0
- package/dist/use-synced-state/__tests__/worker.test.d.mts +1 -0
- package/dist/use-synced-state/__tests__/worker.test.mjs +69 -0
- package/dist/use-synced-state/client.d.ts +28 -0
- package/dist/use-synced-state/client.js +39 -0
- package/dist/use-synced-state/constants.d.mts +1 -0
- package/dist/use-synced-state/constants.mjs +1 -0
- package/dist/use-synced-state/useSyncState.d.ts +20 -0
- package/dist/use-synced-state/useSyncState.js +58 -0
- package/dist/use-synced-state/useSyncedState.d.ts +20 -0
- package/dist/use-synced-state/useSyncedState.js +58 -0
- package/dist/use-synced-state/worker.d.mts +14 -0
- package/dist/use-synced-state/worker.mjs +73 -0
- package/dist/vite/buildApp.mjs +34 -2
- package/dist/vite/configPlugin.mjs +8 -14
- package/dist/vite/constants.d.mts +1 -0
- package/dist/vite/constants.mjs +1 -0
- package/dist/vite/createDirectiveLookupPlugin.mjs +4 -0
- package/dist/vite/devServerTimingPlugin.mjs +4 -0
- package/dist/vite/directiveModulesDevPlugin.mjs +9 -1
- package/dist/vite/directivesPlugin.mjs +4 -0
- package/dist/vite/envResolvers.d.mts +11 -0
- package/dist/vite/envResolvers.mjs +20 -0
- package/dist/vite/getViteEsbuild.mjs +2 -1
- package/dist/vite/hmrStabilityPlugin.d.mts +2 -0
- package/dist/vite/hmrStabilityPlugin.mjs +73 -0
- package/dist/vite/knownDepsResolverPlugin.d.mts +0 -6
- package/dist/vite/knownDepsResolverPlugin.mjs +32 -14
- package/dist/vite/linkerPlugin.d.mts +2 -1
- package/dist/vite/linkerPlugin.mjs +11 -3
- package/dist/vite/linkerPlugin.test.mjs +15 -0
- package/dist/vite/miniflareHMRPlugin.mjs +6 -38
- package/dist/vite/moveStaticAssetsPlugin.mjs +14 -4
- package/dist/vite/redwoodPlugin.mjs +6 -10
- package/dist/vite/runDirectivesScan.mjs +59 -14
- package/dist/vite/ssrBridgePlugin.mjs +126 -34
- package/dist/vite/ssrBridgeWrapPlugin.d.mts +2 -0
- package/dist/vite/ssrBridgeWrapPlugin.mjs +85 -0
- package/dist/vite/staleDepRetryPlugin.d.mts +2 -0
- package/dist/vite/staleDepRetryPlugin.mjs +74 -0
- package/dist/vite/statePlugin.d.mts +4 -0
- package/dist/vite/statePlugin.mjs +62 -0
- package/package.json +26 -10
- package/dist/vite/manifestPlugin.d.mts +0 -4
- package/dist/vite/manifestPlugin.mjs +0 -63
- /package/dist/runtime/lib/{rwContext.js → links.test.d.ts} +0 -0
|
@@ -1,19 +1,88 @@
|
|
|
1
|
+
import { createHash } from "crypto";
|
|
1
2
|
import debug from "debug";
|
|
2
3
|
import { copy, pathExists } from "fs-extra";
|
|
3
4
|
import ignore from "ignore";
|
|
4
5
|
import * as fs from "node:fs";
|
|
5
6
|
import path from "node:path";
|
|
6
|
-
import os from "os";
|
|
7
7
|
import { basename, join, relative, resolve } from "path";
|
|
8
8
|
import tmp from "tmp-promise";
|
|
9
9
|
import { $ } from "../../lib/$.mjs";
|
|
10
10
|
import { ROOT_DIR } from "../constants.mjs";
|
|
11
|
+
import { INSTALL_DEPENDENCIES_RETRIES } from "./constants.mjs";
|
|
11
12
|
import { retry } from "./retry.mjs";
|
|
12
|
-
import {
|
|
13
|
+
import { ensureTmpDir } from "./utils.mjs";
|
|
13
14
|
const log = debug("rwsdk:e2e:environment");
|
|
15
|
+
const IS_CACHE_ENABLED = !process.env.RWSDK_E2E_CACHE_DISABLED;
|
|
16
|
+
if (IS_CACHE_ENABLED) {
|
|
17
|
+
log("E2E test caching is enabled.");
|
|
18
|
+
}
|
|
19
|
+
async function getProjectDependencyHash(projectDir) {
|
|
20
|
+
const hash = createHash("md5");
|
|
21
|
+
const dependencyFiles = [
|
|
22
|
+
"package.json",
|
|
23
|
+
"pnpm-lock.yaml",
|
|
24
|
+
"yarn.lock",
|
|
25
|
+
"package-lock.json",
|
|
26
|
+
];
|
|
27
|
+
for (const file of dependencyFiles) {
|
|
28
|
+
const filePath = path.join(projectDir, file);
|
|
29
|
+
if (await pathExists(filePath)) {
|
|
30
|
+
const data = await fs.promises.readFile(filePath);
|
|
31
|
+
hash.update(path.basename(filePath));
|
|
32
|
+
hash.update(data);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return hash.digest("hex");
|
|
36
|
+
}
|
|
37
|
+
export async function getFilesRecursively(directory) {
|
|
38
|
+
const entries = await fs.promises.readdir(directory, { withFileTypes: true });
|
|
39
|
+
const files = await Promise.all(entries.map((entry) => {
|
|
40
|
+
const fullPath = path.join(directory, entry.name);
|
|
41
|
+
return entry.isDirectory() ? getFilesRecursively(fullPath) : fullPath;
|
|
42
|
+
}));
|
|
43
|
+
return files.flat();
|
|
44
|
+
}
|
|
45
|
+
export async function getDirectoryHash(directory) {
|
|
46
|
+
const hash = createHash("md5");
|
|
47
|
+
if (!(await pathExists(directory))) {
|
|
48
|
+
return "";
|
|
49
|
+
}
|
|
50
|
+
const files = await getFilesRecursively(directory);
|
|
51
|
+
files.sort();
|
|
52
|
+
for (const file of files) {
|
|
53
|
+
const relativePath = path.relative(directory, file);
|
|
54
|
+
const data = await fs.promises.readFile(file);
|
|
55
|
+
hash.update(relativePath.replace(/\\/g, "/")); // Normalize path separators
|
|
56
|
+
hash.update(data);
|
|
57
|
+
}
|
|
58
|
+
return hash.digest("hex");
|
|
59
|
+
}
|
|
14
60
|
const getTempDir = async () => {
|
|
15
|
-
|
|
61
|
+
const tmpDir = await ensureTmpDir();
|
|
62
|
+
const projectsTempDir = path.join(tmpDir, "e2e-projects");
|
|
63
|
+
await fs.promises.mkdir(projectsTempDir, { recursive: true });
|
|
64
|
+
const tempDir = await tmp.dir({
|
|
65
|
+
unsafeCleanup: true,
|
|
66
|
+
tmpdir: projectsTempDir,
|
|
67
|
+
});
|
|
68
|
+
// context(justinvdm, 2 Nov 2025): On Windows CI, tmp.dir() can return a
|
|
69
|
+
// short path (e.g., RUNNER~1). Vite's internals may later resolve this to a
|
|
70
|
+
// long path (e.g., runneradmin), causing alias resolution to fail due to
|
|
71
|
+
// path mismatch. Using realpathSync ensures we always use the canonical
|
|
72
|
+
// path, avoiding this inconsistency.
|
|
73
|
+
if (process.platform === "win32") {
|
|
74
|
+
tempDir.path = fs.realpathSync.native(tempDir.path);
|
|
75
|
+
}
|
|
76
|
+
await fs.promises.mkdir(tempDir.path, { recursive: true });
|
|
77
|
+
return tempDir;
|
|
16
78
|
};
|
|
79
|
+
function slugify(str) {
|
|
80
|
+
return str
|
|
81
|
+
.toLowerCase()
|
|
82
|
+
.replace(/[^a-z0-9-]/g, "-")
|
|
83
|
+
.replace(/--+/g, "-")
|
|
84
|
+
.replace(/^-|-$/g, "");
|
|
85
|
+
}
|
|
17
86
|
const createSdkTarball = async () => {
|
|
18
87
|
const existingTarballPath = process.env.RWSKD_SMOKE_TEST_TARBALL_PATH;
|
|
19
88
|
if (existingTarballPath) {
|
|
@@ -28,15 +97,32 @@ const createSdkTarball = async () => {
|
|
|
28
97
|
}, // No-op cleanup
|
|
29
98
|
};
|
|
30
99
|
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
100
|
+
// Create a temporary directory to receive the tarball, ensuring a stable path.
|
|
101
|
+
let tempDir = await fs.promises.mkdtemp(path.join(await ensureTmpDir(), "rwsdk-tarball-"));
|
|
102
|
+
// context(justinvdm, 2 Nov 2025): Normalize the temp dir on Windows
|
|
103
|
+
// to prevent short/long path mismatches.
|
|
104
|
+
if (process.platform === "win32") {
|
|
105
|
+
tempDir = fs.realpathSync.native(tempDir);
|
|
106
|
+
}
|
|
107
|
+
await $({
|
|
108
|
+
cwd: ROOT_DIR,
|
|
109
|
+
stdio: "pipe",
|
|
110
|
+
}) `npm pack --pack-destination=${tempDir}`;
|
|
111
|
+
// We need to determine the tarball's name, as it's version-dependent.
|
|
112
|
+
// Running `npm pack --dry-run` gives us the filename without creating a file.
|
|
113
|
+
const packDryRun = await $({
|
|
114
|
+
cwd: ROOT_DIR,
|
|
115
|
+
stdio: "pipe",
|
|
116
|
+
}) `npm pack --dry-run`;
|
|
117
|
+
const tarballName = packDryRun.stdout?.trim();
|
|
118
|
+
const tarballPath = path.join(tempDir, tarballName);
|
|
119
|
+
if (!fs.existsSync(tarballPath)) {
|
|
120
|
+
throw new Error(`Tarball was not created in the expected location: ${tarballPath}`);
|
|
121
|
+
}
|
|
122
|
+
log(`📦 Created tarball in stable temp location: ${tarballPath}`);
|
|
35
123
|
const cleanupTarball = async () => {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
await fs.promises.rm(tarballPath, { force: true });
|
|
39
|
-
}
|
|
124
|
+
log(`🧹 Cleaning up tarball directory: ${tempDir}`);
|
|
125
|
+
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
|
40
126
|
};
|
|
41
127
|
return { tarballPath, cleanupTarball };
|
|
42
128
|
};
|
|
@@ -58,8 +144,13 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
58
144
|
// Determine the source directory to copy from
|
|
59
145
|
const sourceDir = monorepoRoot || projectDir;
|
|
60
146
|
// Create unique project directory name
|
|
147
|
+
// Format: {projectName}-t-{hash} (kept under 54 chars for Cloudflare limit)
|
|
61
148
|
const originalDirName = basename(sourceDir);
|
|
62
|
-
const
|
|
149
|
+
const slugified = slugify(originalDirName);
|
|
150
|
+
// Truncate project name to leave room for "-t-" (3 chars) + hash (8 chars) = 11 chars
|
|
151
|
+
// Max project name: 54 - 11 = 43 chars
|
|
152
|
+
const truncatedProjectName = slugified.substring(0, 43);
|
|
153
|
+
const workerName = `${truncatedProjectName}-t-${resourceUniqueKey}`;
|
|
63
154
|
const tempCopyRoot = resolve(tempDir.path, workerName);
|
|
64
155
|
// If it's a monorepo, the targetDir for commands is a subdirectory
|
|
65
156
|
const targetDir = monorepoRoot
|
|
@@ -115,7 +206,9 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
115
206
|
const workspaces = rwsdkWs.workspaces;
|
|
116
207
|
if (packageManager === "pnpm") {
|
|
117
208
|
const pnpmWsPath = join(tempCopyRoot, "pnpm-workspace.yaml");
|
|
118
|
-
const pnpmWsConfig = `packages:\n${workspaces
|
|
209
|
+
const pnpmWsConfig = `packages:\n${workspaces
|
|
210
|
+
.map((w) => ` - '${w}'`)
|
|
211
|
+
.join("\n")}\n`;
|
|
119
212
|
await fs.promises.writeFile(pnpmWsPath, pnpmWsConfig);
|
|
120
213
|
log("Created pnpm-workspace.yaml");
|
|
121
214
|
}
|
|
@@ -133,31 +226,32 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
133
226
|
log("⚙️ Configuring temp project to not use frozen lockfile...");
|
|
134
227
|
const npmrcPath = join(targetDir, ".npmrc");
|
|
135
228
|
await fs.promises.writeFile(npmrcPath, "frozen-lockfile=false\n");
|
|
229
|
+
const tmpDir = await ensureTmpDir();
|
|
136
230
|
if (packageManager === "yarn") {
|
|
137
231
|
const yarnrcPath = join(targetDir, ".yarnrc.yml");
|
|
138
|
-
const yarnCacheDir = path.join(
|
|
232
|
+
const yarnCacheDir = path.join(tmpDir, "yarn-cache");
|
|
139
233
|
await fs.promises.mkdir(yarnCacheDir, { recursive: true });
|
|
140
234
|
const yarnConfig = [
|
|
141
235
|
// todo(justinvdm, 23-09-23): Support yarn pnpm
|
|
142
236
|
"nodeLinker: node-modules",
|
|
143
237
|
"enableImmutableInstalls: false",
|
|
144
|
-
`cacheFolder: "${yarnCacheDir}"`,
|
|
238
|
+
`cacheFolder: "${yarnCacheDir.replace(/\\/g, "/")}"`,
|
|
145
239
|
].join("\n");
|
|
146
240
|
await fs.promises.writeFile(yarnrcPath, yarnConfig);
|
|
147
241
|
log("Created .yarnrc.yml to allow lockfile changes for yarn");
|
|
148
242
|
}
|
|
149
243
|
if (packageManager === "yarn-classic") {
|
|
150
244
|
const yarnrcPath = join(targetDir, ".yarnrc");
|
|
151
|
-
const yarnCacheDir = path.join(
|
|
245
|
+
const yarnCacheDir = path.join(tmpDir, "yarn-classic-cache");
|
|
152
246
|
await fs.promises.mkdir(yarnCacheDir, { recursive: true });
|
|
153
|
-
const yarnConfig = `cache-folder "${yarnCacheDir}"`;
|
|
247
|
+
const yarnConfig = `cache-folder "${yarnCacheDir.replace(/\\/g, "/")}"`;
|
|
154
248
|
await fs.promises.writeFile(yarnrcPath, yarnConfig);
|
|
155
249
|
log("Created .yarnrc with cache-folder for yarn-classic");
|
|
156
250
|
}
|
|
157
251
|
await setTarballDependency(targetDir, tarballFilename);
|
|
158
252
|
// Install dependencies in the target directory
|
|
159
253
|
const installDir = monorepoRoot ? tempCopyRoot : targetDir;
|
|
160
|
-
await retry(() => installDependencies(installDir, packageManager), {
|
|
254
|
+
await retry(() => installDependencies(installDir, packageManager, projectDir, monorepoRoot), {
|
|
161
255
|
retries: INSTALL_DEPENDENCIES_RETRIES,
|
|
162
256
|
delay: 1000,
|
|
163
257
|
});
|
|
@@ -168,9 +262,61 @@ export async function copyProjectToTempDir(projectDir, resourceUniqueKey, packag
|
|
|
168
262
|
await cleanupTarball();
|
|
169
263
|
}
|
|
170
264
|
}
|
|
171
|
-
async function installDependencies(targetDir, packageManager = "pnpm") {
|
|
172
|
-
|
|
173
|
-
|
|
265
|
+
async function installDependencies(targetDir, packageManager = "pnpm", projectDir, monorepoRoot) {
|
|
266
|
+
let cacheRoot = null;
|
|
267
|
+
let nodeModulesCachePath = null;
|
|
268
|
+
if (IS_CACHE_ENABLED) {
|
|
269
|
+
const dependencyHash = await getProjectDependencyHash(monorepoRoot || projectDir);
|
|
270
|
+
const cacheDirName = monorepoRoot
|
|
271
|
+
? basename(monorepoRoot)
|
|
272
|
+
: basename(projectDir);
|
|
273
|
+
cacheRoot = path.join(await ensureTmpDir(), "rwsdk-e2e-cache", `${cacheDirName}-${dependencyHash.substring(0, 8)}`);
|
|
274
|
+
nodeModulesCachePath = path.join(cacheRoot, "node_modules");
|
|
275
|
+
if (await pathExists(nodeModulesCachePath)) {
|
|
276
|
+
console.log(`✅ CACHE HIT for dependencies: Found cached node_modules. Hard-linking from ${nodeModulesCachePath}`);
|
|
277
|
+
try {
|
|
278
|
+
const destNodeModules = join(targetDir, "node_modules");
|
|
279
|
+
if (process.platform === "win32") {
|
|
280
|
+
await copy(nodeModulesCachePath, destNodeModules);
|
|
281
|
+
}
|
|
282
|
+
else {
|
|
283
|
+
// On non-windows, use cp -al for performance
|
|
284
|
+
await $("cp", ["-al", nodeModulesCachePath, targetDir]);
|
|
285
|
+
}
|
|
286
|
+
console.log(`✅ Cache restored successfully.`);
|
|
287
|
+
console.log(`📦 Installing local SDK into cached node_modules...`);
|
|
288
|
+
await runInstall(targetDir, packageManager, true);
|
|
289
|
+
return;
|
|
290
|
+
}
|
|
291
|
+
catch (e) {
|
|
292
|
+
console.warn(`⚠️ Cache restore failed. Error: ${e.message}. Proceeding with clean install.`);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
else {
|
|
296
|
+
console.log(`ℹ️ CACHE MISS for dependencies: No cached node_modules found at ${nodeModulesCachePath}. Proceeding with clean installation.`);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
await runInstall(targetDir, packageManager, false);
|
|
300
|
+
if (IS_CACHE_ENABLED && nodeModulesCachePath) {
|
|
301
|
+
console.log(`Caching node_modules to ${nodeModulesCachePath} for future runs...`);
|
|
302
|
+
await fs.promises.mkdir(path.dirname(nodeModulesCachePath), {
|
|
303
|
+
recursive: true,
|
|
304
|
+
});
|
|
305
|
+
if (process.platform === "win32") {
|
|
306
|
+
await copy(join(targetDir, "node_modules"), nodeModulesCachePath);
|
|
307
|
+
}
|
|
308
|
+
else {
|
|
309
|
+
await $("cp", [
|
|
310
|
+
"-al",
|
|
311
|
+
join(targetDir, "node_modules"),
|
|
312
|
+
nodeModulesCachePath,
|
|
313
|
+
]);
|
|
314
|
+
}
|
|
315
|
+
console.log(`✅ node_modules cached successfully.`);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
async function runInstall(targetDir, packageManager, isCacheHit) {
|
|
319
|
+
if (!isCacheHit) {
|
|
174
320
|
// Clean up any pre-existing node_modules and lockfiles
|
|
175
321
|
log("Cleaning up pre-existing node_modules and lockfiles...");
|
|
176
322
|
await Promise.all([
|
|
@@ -183,51 +329,42 @@ async function installDependencies(targetDir, packageManager = "pnpm") {
|
|
|
183
329
|
fs.promises.rm(join(targetDir, "package-lock.json"), { force: true }),
|
|
184
330
|
]);
|
|
185
331
|
log("Cleanup complete.");
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
}
|
|
196
|
-
else if (packageManager === "yarn-classic") {
|
|
197
|
-
log(`Preparing yarn@1.22.19 with corepack...`);
|
|
198
|
-
await $("corepack", ["prepare", "yarn@1.x", "--activate"], {
|
|
199
|
-
cwd: targetDir,
|
|
200
|
-
stdio: "pipe",
|
|
201
|
-
});
|
|
202
|
-
}
|
|
332
|
+
}
|
|
333
|
+
if (packageManager.startsWith("yarn")) {
|
|
334
|
+
log(`Enabling corepack...`);
|
|
335
|
+
await $("corepack", ["enable"], { cwd: targetDir, stdio: "pipe" });
|
|
336
|
+
if (packageManager === "yarn") {
|
|
337
|
+
log(`Preparing yarn@stable with corepack...`);
|
|
338
|
+
await $("corepack", ["prepare", "yarn@stable", "--activate"], {
|
|
339
|
+
cwd: targetDir,
|
|
340
|
+
stdio: "pipe",
|
|
341
|
+
});
|
|
203
342
|
}
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
"yarn-classic": ["yarn"],
|
|
211
|
-
}[packageManager];
|
|
212
|
-
// Run install command in the target directory
|
|
213
|
-
log(`Running ${installCommand.join(" ")}`);
|
|
214
|
-
const [command, ...args] = installCommand;
|
|
215
|
-
const result = await $(command, args, {
|
|
216
|
-
cwd: targetDir,
|
|
217
|
-
stdio: "pipe", // Capture output
|
|
218
|
-
env: {
|
|
219
|
-
YARN_ENABLE_HARDENED_MODE: "0",
|
|
220
|
-
},
|
|
221
|
-
});
|
|
222
|
-
console.log("✅ Dependencies installed successfully");
|
|
223
|
-
// Log installation details at debug level
|
|
224
|
-
if (result.stdout) {
|
|
225
|
-
log(`${packageManager} install output: %s`, result.stdout);
|
|
343
|
+
else if (packageManager === "yarn-classic") {
|
|
344
|
+
log(`Preparing yarn@1.22.19 with corepack...`);
|
|
345
|
+
await $("corepack", ["prepare", "yarn@1.x", "--activate"], {
|
|
346
|
+
cwd: targetDir,
|
|
347
|
+
stdio: "pipe",
|
|
348
|
+
});
|
|
226
349
|
}
|
|
227
350
|
}
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
351
|
+
const npmCacheDir = path.join(await ensureTmpDir(), "npm-cache");
|
|
352
|
+
await fs.promises.mkdir(npmCacheDir, { recursive: true });
|
|
353
|
+
const installCommand = {
|
|
354
|
+
pnpm: ["pnpm", "install", "--reporter=silent"],
|
|
355
|
+
npm: ["npm", "install", "--cache", npmCacheDir, "--silent"],
|
|
356
|
+
yarn: ["yarn", "install", "--silent"],
|
|
357
|
+
"yarn-classic": ["yarn", "--silent"],
|
|
358
|
+
}[packageManager];
|
|
359
|
+
// Run install command in the target directory
|
|
360
|
+
log(`Running ${installCommand.join(" ")}`);
|
|
361
|
+
const [command, ...args] = installCommand;
|
|
362
|
+
await $(command, args, {
|
|
363
|
+
cwd: targetDir,
|
|
364
|
+
stdio: "pipe",
|
|
365
|
+
env: {
|
|
366
|
+
YARN_ENABLE_HARDENED_MODE: "0",
|
|
367
|
+
},
|
|
368
|
+
});
|
|
369
|
+
console.log("✅ Dependencies installed successfully");
|
|
233
370
|
}
|
package/dist/lib/e2e/index.d.mts
CHANGED
package/dist/lib/e2e/index.mjs
CHANGED
package/dist/lib/e2e/poll.d.mts
CHANGED
|
@@ -4,5 +4,5 @@ export interface PollOptions {
|
|
|
4
4
|
minTries: number;
|
|
5
5
|
onRetry?: (error: unknown, tries: number) => void;
|
|
6
6
|
}
|
|
7
|
-
export declare function poll(fn: () => Promise<boolean>, options?: Partial<PollOptions>): Promise<void>;
|
|
7
|
+
export declare function poll(fn: () => boolean | Promise<boolean>, options?: Partial<PollOptions>): Promise<void>;
|
|
8
8
|
export declare function pollValue<T>(fn: () => Promise<T>, options?: Partial<PollOptions>): Promise<T>;
|
|
@@ -39,6 +39,7 @@ export declare function runRelease(cwd: string, projectDir: string, resourceUniq
|
|
|
39
39
|
/**
|
|
40
40
|
* Check if a resource name includes a specific resource unique key
|
|
41
41
|
* This is used to identify resources created during our tests
|
|
42
|
+
* Handles both full format (adjective-animal-hash) and hash-only format
|
|
42
43
|
*/
|
|
43
44
|
export declare function isRelatedToTest(resourceName: string, resourceUniqueKey: string): boolean;
|
|
44
45
|
/**
|
package/dist/lib/e2e/release.mjs
CHANGED
|
@@ -44,10 +44,6 @@ export async function $expect(command, expectations, options = {
|
|
|
44
44
|
reject: true,
|
|
45
45
|
}) {
|
|
46
46
|
return new Promise((resolve, reject) => {
|
|
47
|
-
log("$expect starting with command: %s", command);
|
|
48
|
-
log("Working directory: %s", options.cwd ?? process.cwd());
|
|
49
|
-
log("Expected patterns: %O", expectations.map((e) => e.expect.toString()));
|
|
50
|
-
console.log(`Running command: ${command}`);
|
|
51
47
|
// Spawn the process with pipes for interaction
|
|
52
48
|
const childProcess = execaCommand(command, {
|
|
53
49
|
cwd: options.cwd ?? process.cwd(),
|
|
@@ -55,7 +51,6 @@ export async function $expect(command, expectations, options = {
|
|
|
55
51
|
reject: false, // Never reject so we can handle the error ourselves
|
|
56
52
|
env: options.env ?? process.env,
|
|
57
53
|
});
|
|
58
|
-
log("Process spawned with PID: %s", childProcess.pid);
|
|
59
54
|
let stdout = "";
|
|
60
55
|
let stderr = "";
|
|
61
56
|
let buffer = "";
|
|
@@ -67,7 +62,6 @@ export async function $expect(command, expectations, options = {
|
|
|
67
62
|
// Initialize match count for each pattern
|
|
68
63
|
expectations.forEach(({ expect: expectPattern }) => {
|
|
69
64
|
matchHistory.set(expectPattern, 0);
|
|
70
|
-
log("Initialized pattern match count for: %s", expectPattern.toString());
|
|
71
65
|
});
|
|
72
66
|
// Collect stdout
|
|
73
67
|
childProcess.stdout?.on("data", (data) => {
|
|
@@ -85,9 +79,6 @@ export async function $expect(command, expectations, options = {
|
|
|
85
79
|
: new RegExp(expectPattern, "m");
|
|
86
80
|
// Only search in the unmatched portion of the buffer
|
|
87
81
|
const searchBuffer = buffer.substring(lastMatchIndex);
|
|
88
|
-
log("Testing pattern: %s against buffer from position %d (%d chars)", pattern.toString(), lastMatchIndex, searchBuffer.length);
|
|
89
|
-
// Enhanced debugging: show actual search buffer content
|
|
90
|
-
log("Search buffer content for debugging: %O", searchBuffer);
|
|
91
82
|
const match = searchBuffer.match(pattern);
|
|
92
83
|
if (match) {
|
|
93
84
|
// Found a match
|
|
@@ -98,30 +89,21 @@ export async function $expect(command, expectations, options = {
|
|
|
98
89
|
const matchStartPosition = lastMatchIndex + match.index;
|
|
99
90
|
const matchEndPosition = matchStartPosition + match[0].length;
|
|
100
91
|
lastMatchIndex = matchEndPosition;
|
|
101
|
-
log(`Pattern matched: "${patternStr}" (occurrence #${matchCount + 1}) at position ${matchStartPosition}-${matchEndPosition}`);
|
|
102
|
-
// Only send a response if one is specified
|
|
103
92
|
if (send) {
|
|
104
|
-
log(`Sending response: "${send.replace(/\r/g, "\\r")}" to stdin`);
|
|
105
93
|
childProcess.stdin?.write(send);
|
|
106
94
|
}
|
|
107
|
-
else {
|
|
108
|
-
log(`Pattern "${patternStr}" matched (verification only)`);
|
|
109
|
-
}
|
|
110
95
|
// Increment the match count for this pattern
|
|
111
96
|
matchHistory.set(expectPattern, matchCount + 1);
|
|
112
|
-
log("Updated match count for %s: %d", patternStr, matchCount + 1);
|
|
113
97
|
// Move to the next expectation
|
|
114
98
|
currentExpectationIndex++;
|
|
115
99
|
// If we've processed all expectations but need to wait for stdin response,
|
|
116
100
|
// delay closing stdin until the next data event
|
|
117
101
|
if (currentExpectationIndex >= expectations.length && send) {
|
|
118
|
-
log("All patterns matched, closing stdin after last response");
|
|
119
102
|
childProcess.stdin?.end();
|
|
120
103
|
}
|
|
121
104
|
break; // Exit the while loop to process next chunk
|
|
122
105
|
}
|
|
123
106
|
else {
|
|
124
|
-
log("Pattern not matched. Attempting to diagnose the mismatch:");
|
|
125
107
|
// Try to find the closest substring that might partially match
|
|
126
108
|
const patternString = pattern.toString();
|
|
127
109
|
const patternCore = patternString.substring(1, patternString.lastIndexOf("/") > 0
|
|
@@ -132,7 +114,6 @@ export async function $expect(command, expectations, options = {
|
|
|
132
114
|
const partialPattern = patternCore.substring(0, i);
|
|
133
115
|
const partialRegex = new RegExp(partialPattern, "m");
|
|
134
116
|
const matches = partialRegex.test(searchBuffer);
|
|
135
|
-
log(" Partial pattern '%s': %s", partialPattern, matches ? "matched" : "not matched");
|
|
136
117
|
// Once we find where the matching starts to fail, stop
|
|
137
118
|
if (!matches)
|
|
138
119
|
break;
|
|
@@ -144,7 +125,6 @@ export async function $expect(command, expectations, options = {
|
|
|
144
125
|
// If all expectations have been matched, we can close stdin if not already closed
|
|
145
126
|
if (currentExpectationIndex >= expectations.length &&
|
|
146
127
|
childProcess.stdin?.writable) {
|
|
147
|
-
log("All patterns matched, ensuring stdin is closed");
|
|
148
128
|
childProcess.stdin.end();
|
|
149
129
|
}
|
|
150
130
|
});
|
|
@@ -160,19 +140,10 @@ export async function $expect(command, expectations, options = {
|
|
|
160
140
|
// Handle process completion
|
|
161
141
|
childProcess.on("close", (code) => {
|
|
162
142
|
log("Process closed with code: %s", code);
|
|
163
|
-
// Log the number of matches for each pattern
|
|
164
|
-
log("Pattern match summary:");
|
|
165
|
-
for (const [pattern, count] of matchHistory.entries()) {
|
|
166
|
-
log(` - "${pattern.toString()}": ${count} matches`);
|
|
167
|
-
}
|
|
168
143
|
// Check if any required patterns were not matched
|
|
169
144
|
const unmatchedPatterns = Array.from(matchHistory.entries())
|
|
170
145
|
.filter(([_, count]) => count === 0)
|
|
171
146
|
.map(([pattern, _]) => pattern.toString());
|
|
172
|
-
if (unmatchedPatterns.length > 0) {
|
|
173
|
-
log("WARNING: Some expected patterns were not matched: %O", unmatchedPatterns);
|
|
174
|
-
}
|
|
175
|
-
log("$expect completed. Total stdout: %d bytes, stderr: %d bytes", stdout.length, stderr.length);
|
|
176
147
|
resolve({ stdout, stderr, code });
|
|
177
148
|
});
|
|
178
149
|
childProcess.on("error", (err) => {
|
|
@@ -274,9 +245,15 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
274
245
|
await ensureCloudflareAccountId(cwd, projectDir);
|
|
275
246
|
// Extract worker name from directory name to ensure consistency
|
|
276
247
|
const dirName = cwd ? basename(cwd) : "unknown-worker";
|
|
248
|
+
// Extract hash part from resourceUniqueKey for matching
|
|
249
|
+
// resourceUniqueKey format is typically "adjective-animal-hash" or just "hash"
|
|
250
|
+
const hashPart = resourceUniqueKey.includes("-")
|
|
251
|
+
? resourceUniqueKey.split("-").pop() || resourceUniqueKey.substring(0, 8)
|
|
252
|
+
: resourceUniqueKey.substring(0, 8);
|
|
253
|
+
const uniqueKeyForMatching = hashPart.substring(0, 8);
|
|
277
254
|
// Ensure resource unique key is included in worker name for tracking
|
|
278
|
-
if (resourceUniqueKey && !dirName.includes(
|
|
279
|
-
log(`Worker name doesn't contain our unique key, this is unexpected: ${dirName}, key: ${
|
|
255
|
+
if (resourceUniqueKey && !dirName.includes(uniqueKeyForMatching)) {
|
|
256
|
+
log(`Worker name doesn't contain our unique key, this is unexpected: ${dirName}, key: ${uniqueKeyForMatching}`);
|
|
280
257
|
console.log(`⚠️ Worker name doesn't contain our unique key. This might cause cleanup issues.`);
|
|
281
258
|
}
|
|
282
259
|
// Ensure the worker name in wrangler.jsonc matches our unique name
|
|
@@ -405,9 +382,16 @@ export async function runRelease(cwd, projectDir, resourceUniqueKey) {
|
|
|
405
382
|
/**
|
|
406
383
|
* Check if a resource name includes a specific resource unique key
|
|
407
384
|
* This is used to identify resources created during our tests
|
|
385
|
+
* Handles both full format (adjective-animal-hash) and hash-only format
|
|
408
386
|
*/
|
|
409
387
|
export function isRelatedToTest(resourceName, resourceUniqueKey) {
|
|
410
|
-
|
|
388
|
+
// Extract hash part if resourceUniqueKey contains dashes (full format)
|
|
389
|
+
// Otherwise use as-is (hash-only format)
|
|
390
|
+
const hashPart = resourceUniqueKey.includes("-")
|
|
391
|
+
? resourceUniqueKey.split("-").pop() || resourceUniqueKey.substring(0, 8)
|
|
392
|
+
: resourceUniqueKey;
|
|
393
|
+
const uniqueKeyForMatching = hashPart.substring(0, 8);
|
|
394
|
+
return resourceName.includes(uniqueKeyForMatching);
|
|
411
395
|
}
|
|
412
396
|
/**
|
|
413
397
|
* Delete the worker using wrangler
|
package/dist/lib/e2e/tarball.mjs
CHANGED
|
@@ -2,34 +2,9 @@ import { createHash } from "crypto";
|
|
|
2
2
|
import { $ } from "execa";
|
|
3
3
|
import fs from "node:fs";
|
|
4
4
|
import path from "node:path";
|
|
5
|
-
import { adjectives, animals, uniqueNamesGenerator, } from "unique-names-generator";
|
|
6
5
|
import { ROOT_DIR } from "../constants.mjs";
|
|
7
6
|
import { copyProjectToTempDir } from "./environment.mjs";
|
|
8
7
|
const log = (message) => console.log(message);
|
|
9
|
-
async function verifyPackedContents(targetDir) {
|
|
10
|
-
log(" - Verifying installed package contents...");
|
|
11
|
-
const packageName = "rwsdk";
|
|
12
|
-
const installedDistPath = path.join(targetDir, "node_modules", packageName, "dist");
|
|
13
|
-
if (!fs.existsSync(installedDistPath)) {
|
|
14
|
-
throw new Error(`dist/ directory not found in installed package at ${installedDistPath}.`);
|
|
15
|
-
}
|
|
16
|
-
const { stdout: originalDistChecksumOut } = await $("find . -type f | sort | md5sum", {
|
|
17
|
-
shell: true,
|
|
18
|
-
cwd: path.join(ROOT_DIR, "dist"),
|
|
19
|
-
});
|
|
20
|
-
const originalDistChecksum = originalDistChecksumOut.split(" ")[0];
|
|
21
|
-
const { stdout: installedDistChecksumOut } = await $("find . -type f | sort | md5sum", {
|
|
22
|
-
shell: true,
|
|
23
|
-
cwd: installedDistPath,
|
|
24
|
-
});
|
|
25
|
-
const installedDistChecksum = installedDistChecksumOut.split(" ")[0];
|
|
26
|
-
log(` - Original dist checksum: ${originalDistChecksum}`);
|
|
27
|
-
log(` - Installed dist checksum: ${installedDistChecksum}`);
|
|
28
|
-
if (originalDistChecksum !== installedDistChecksum) {
|
|
29
|
-
throw new Error("File list in installed dist/ does not match original dist/.");
|
|
30
|
-
}
|
|
31
|
-
log(" ✅ Installed package contents match the local build.");
|
|
32
|
-
}
|
|
33
8
|
/**
|
|
34
9
|
* Copies wrangler cache from monorepo to temp directory for deployment tests
|
|
35
10
|
*/
|
|
@@ -90,21 +65,14 @@ async function copyWranglerCache(targetDir, sdkRoot) {
|
|
|
90
65
|
export async function setupTarballEnvironment({ projectDir, monorepoRoot, packageManager = "pnpm", }) {
|
|
91
66
|
log(`🚀 Setting up tarball environment for ${projectDir}`);
|
|
92
67
|
// Generate a resource unique key for this test run
|
|
93
|
-
|
|
94
|
-
dictionaries: [adjectives, animals],
|
|
95
|
-
separator: "-",
|
|
96
|
-
length: 2,
|
|
97
|
-
style: "lowerCase",
|
|
98
|
-
});
|
|
99
|
-
// Create a short unique hash based on the timestamp
|
|
68
|
+
// Use just the hash to keep worker names short (under Cloudflare's 54 char limit)
|
|
100
69
|
const hash = createHash("md5")
|
|
101
70
|
.update(Date.now().toString())
|
|
102
71
|
.digest("hex")
|
|
103
72
|
.substring(0, 8);
|
|
104
|
-
const resourceUniqueKey =
|
|
73
|
+
const resourceUniqueKey = hash;
|
|
105
74
|
try {
|
|
106
75
|
const { tempDir, targetDir } = await copyProjectToTempDir(projectDir, resourceUniqueKey, packageManager, monorepoRoot);
|
|
107
|
-
await verifyPackedContents(targetDir);
|
|
108
76
|
// Copy wrangler cache to improve deployment performance
|
|
109
77
|
const sdkRoot = ROOT_DIR;
|
|
110
78
|
await copyWranglerCache(targetDir, sdkRoot);
|