@i-santos/firestack 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/README.md +225 -0
- package/bin/firestack.mjs +122 -0
- package/package.json +36 -0
- package/scripts/cli/config-migrate.mjs +129 -0
- package/scripts/cli/config.mjs +14 -0
- package/scripts/cli/docker-init.mjs +102 -0
- package/scripts/cli/docker-runner.mjs +579 -0
- package/scripts/cli/env.mjs +168 -0
- package/scripts/cli/functions-env.mjs +98 -0
- package/scripts/cli/init.mjs +134 -0
- package/scripts/cli/install.mjs +105 -0
- package/scripts/cli/internal-e2e-runner.mjs +94 -0
- package/scripts/cli/internal-log-router.mjs +116 -0
- package/scripts/cli/internal-run-e2e-staging.mjs +49 -0
- package/scripts/cli/internal-run-e2e.mjs +153 -0
- package/scripts/cli/internal-run-functions-build.mjs +91 -0
- package/scripts/cli/internal-run-integration-report.mjs +132 -0
- package/scripts/cli/test.mjs +1094 -0
- package/scripts/publish-package.sh +16 -0
- package/templates/dockerignore +37 -0
- package/templates/env/.env.default.example +4 -0
- package/templates/env/.env.development.example +4 -0
- package/templates/env/.env.production.example +2 -0
- package/templates/env/.env.staging.example +4 -0
- package/templates/env/.env.test.default.example +6 -0
- package/templates/env/.env.test.development.example +6 -0
- package/templates/env/.env.test.production.example +4 -0
- package/templates/env/.env.test.staging.example +8 -0
- package/templates/firestack.config.json +101 -0
- package/templates/playwright.config.mjs +65 -0
- package/templates/tests.Dockerfile +43 -0
|
@@ -0,0 +1,579 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { spawnSync } from 'node:child_process';
|
|
3
|
+
import { createHash } from 'node:crypto';
|
|
4
|
+
import { isAbsolute, relative, resolve } from 'node:path';
|
|
5
|
+
|
|
6
|
+
function runDocker(args, options = {}) {
|
|
7
|
+
return spawnSync('docker', args, options);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
function fail(logPrefix, message, exitCode = 1) {
|
|
11
|
+
console.error(`${logPrefix} ${message}`);
|
|
12
|
+
process.exit(exitCode);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function runDockerStrict(logPrefix, step, args, options = {}) {
|
|
16
|
+
const result = runDocker(args, options);
|
|
17
|
+
if (result.error) {
|
|
18
|
+
fail(logPrefix, `${step}: ${result.error.message}`);
|
|
19
|
+
}
|
|
20
|
+
if ((result.status ?? 1) !== 0) {
|
|
21
|
+
process.exit(result.status ?? 1);
|
|
22
|
+
}
|
|
23
|
+
return result;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function listDockerLines(args) {
|
|
27
|
+
const result = runDocker(args, { encoding: 'utf8' });
|
|
28
|
+
if ((result.status ?? 1) !== 0 || !result.stdout) {
|
|
29
|
+
return [];
|
|
30
|
+
}
|
|
31
|
+
return result.stdout
|
|
32
|
+
.split('\n')
|
|
33
|
+
.map((line) => line.trim())
|
|
34
|
+
.filter(Boolean);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function removeDockerArtifact(logPrefix, type, artifact, rmArgs) {
|
|
38
|
+
const removeResult = runDocker(rmArgs, { stdio: 'ignore' });
|
|
39
|
+
if ((removeResult.status ?? 1) !== 0) {
|
|
40
|
+
console.warn(`${logPrefix} cleanup skipped for ${type} ${artifact} (likely in use or already removed).`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function stableStringify(value) {
|
|
45
|
+
if (Array.isArray(value)) {
|
|
46
|
+
return `[${value.map((item) => stableStringify(item)).join(',')}]`;
|
|
47
|
+
}
|
|
48
|
+
if (value && typeof value === 'object') {
|
|
49
|
+
const entries = Object.entries(value).sort(([a], [b]) => a.localeCompare(b));
|
|
50
|
+
return `{${entries.map(([k, v]) => `${JSON.stringify(k)}:${stableStringify(v)}`).join(',')}}`;
|
|
51
|
+
}
|
|
52
|
+
return JSON.stringify(value);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function readFileStrict(path) {
|
|
56
|
+
try {
|
|
57
|
+
return readFileSync(path);
|
|
58
|
+
} catch {
|
|
59
|
+
return null;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function readJsonStrict(path) {
|
|
64
|
+
try {
|
|
65
|
+
return JSON.parse(readFileSync(path, 'utf8'));
|
|
66
|
+
} catch {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const IGNORE_IMAGE_HASH_PACKAGES = new Set([
|
|
72
|
+
'@i-santos/firestack',
|
|
73
|
+
]);
|
|
74
|
+
|
|
75
|
+
function sanitizeDependencyObject(deps) {
|
|
76
|
+
if (!deps || typeof deps !== 'object') return {};
|
|
77
|
+
return Object.fromEntries(
|
|
78
|
+
Object.entries(deps).filter(([name]) => !IGNORE_IMAGE_HASH_PACKAGES.has(name))
|
|
79
|
+
);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function sanitizeLockDependencyTree(node) {
|
|
83
|
+
if (!node || typeof node !== 'object' || Array.isArray(node)) return node;
|
|
84
|
+
const sanitized = {};
|
|
85
|
+
for (const [key, value] of Object.entries(node)) {
|
|
86
|
+
if (IGNORE_IMAGE_HASH_PACKAGES.has(key)) continue;
|
|
87
|
+
if (key === 'requires' || key === 'dependencies' || key === 'optionalDependencies' || key === 'peerDependencies') {
|
|
88
|
+
sanitized[key] = sanitizeDependencyObject(value);
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
sanitized[key] = sanitizeLockDependencyTree(value);
|
|
92
|
+
}
|
|
93
|
+
return sanitized;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
function sanitizePackageLockJson(lockJson) {
|
|
97
|
+
if (!lockJson || typeof lockJson !== 'object' || Array.isArray(lockJson)) return lockJson;
|
|
98
|
+
const sanitized = sanitizeLockDependencyTree(lockJson);
|
|
99
|
+
|
|
100
|
+
if (sanitized.packages && typeof sanitized.packages === 'object') {
|
|
101
|
+
for (const key of Object.keys(sanitized.packages)) {
|
|
102
|
+
const normalized = key.replace(/\\/g, '/');
|
|
103
|
+
if (
|
|
104
|
+
normalized === 'node_modules/@i-santos/firestack' ||
|
|
105
|
+
normalized.endsWith('/node_modules/@i-santos/firestack')
|
|
106
|
+
) {
|
|
107
|
+
delete sanitized.packages[key];
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const root = sanitized.packages[''];
|
|
112
|
+
if (root && typeof root === 'object') {
|
|
113
|
+
root.dependencies = sanitizeDependencyObject(root.dependencies);
|
|
114
|
+
root.devDependencies = sanitizeDependencyObject(root.devDependencies);
|
|
115
|
+
root.optionalDependencies = sanitizeDependencyObject(root.optionalDependencies);
|
|
116
|
+
root.peerDependencies = sanitizeDependencyObject(root.peerDependencies);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
sanitized.dependencies = sanitizeDependencyObject(sanitized.dependencies);
|
|
121
|
+
return sanitized;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function readSanitizedLockfileString(path, { sanitize = false } = {}) {
|
|
125
|
+
const lockJson = readJsonStrict(path);
|
|
126
|
+
if (!lockJson) {
|
|
127
|
+
const raw = readFileStrict(path);
|
|
128
|
+
return raw ? raw.toString('utf8') : '';
|
|
129
|
+
}
|
|
130
|
+
const normalized = sanitize ? sanitizePackageLockJson(lockJson) : lockJson;
|
|
131
|
+
return stableStringify(normalized);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
function normalizeRelativePath(value) {
|
|
135
|
+
if (typeof value !== 'string') return null;
|
|
136
|
+
const trimmed = value.trim().replace(/\\/g, '/').replace(/^\.?\//, '');
|
|
137
|
+
if (!trimmed || trimmed.startsWith('/') || trimmed.includes('..')) return null;
|
|
138
|
+
return trimmed;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function discoverFunctionsPaths(repoPath, firebaseConfigPath = null) {
|
|
142
|
+
const configPath = firebaseConfigPath ?? resolve(repoPath, 'firebase.json');
|
|
143
|
+
const firebaseJson = readJsonStrict(configPath);
|
|
144
|
+
const discovered = [];
|
|
145
|
+
const addCandidate = (candidate) => {
|
|
146
|
+
const normalized = normalizeRelativePath(candidate);
|
|
147
|
+
if (!normalized) return;
|
|
148
|
+
if (existsSync(resolve(repoPath, normalized, 'package.json'))) {
|
|
149
|
+
discovered.push(normalized);
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
const functionsConfig = firebaseJson?.functions;
|
|
154
|
+
if (typeof functionsConfig === 'string') {
|
|
155
|
+
addCandidate(functionsConfig);
|
|
156
|
+
} else if (Array.isArray(functionsConfig)) {
|
|
157
|
+
for (const entry of functionsConfig) {
|
|
158
|
+
if (typeof entry === 'string') addCandidate(entry);
|
|
159
|
+
else if (entry && typeof entry === 'object') addCandidate(entry.source);
|
|
160
|
+
}
|
|
161
|
+
} else if (functionsConfig && typeof functionsConfig === 'object') {
|
|
162
|
+
addCandidate(functionsConfig.source);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
if (discovered.length === 0 && existsSync(resolve(repoPath, 'functions', 'package.json'))) {
|
|
166
|
+
discovered.push('functions');
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return [...new Set(discovered)];
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
function deriveFunctionsInstallPaths(repoPath, functionSourcePaths = []) {
|
|
173
|
+
const installPaths = new Set();
|
|
174
|
+
for (const sourcePath of functionSourcePaths) {
|
|
175
|
+
const normalized = normalizeRelativePath(sourcePath);
|
|
176
|
+
if (!normalized) continue;
|
|
177
|
+
const segments = normalized.split('/').filter(Boolean);
|
|
178
|
+
for (let i = 1; i <= segments.length; i += 1) {
|
|
179
|
+
const candidate = segments.slice(0, i).join('/');
|
|
180
|
+
if (existsSync(resolve(repoPath, candidate, 'package.json'))) {
|
|
181
|
+
installPaths.add(candidate);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
return [...installPaths].sort((a, b) => a.split('/').length - b.split('/').length || a.localeCompare(b));
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
function extractDependencyInputs(repoPath, functionsPaths = []) {
|
|
189
|
+
const pkg = readJsonStrict(resolve(repoPath, 'package.json')) ?? {};
|
|
190
|
+
const functionsPkgs = functionsPaths.map((path) => ({
|
|
191
|
+
path,
|
|
192
|
+
pkg: readJsonStrict(resolve(repoPath, path, 'package.json')) ?? null,
|
|
193
|
+
}));
|
|
194
|
+
const relevant = {
|
|
195
|
+
dependencies: sanitizeDependencyObject(pkg.dependencies),
|
|
196
|
+
devDependencies: sanitizeDependencyObject(pkg.devDependencies),
|
|
197
|
+
optionalDependencies: sanitizeDependencyObject(pkg.optionalDependencies),
|
|
198
|
+
peerDependencies: sanitizeDependencyObject(pkg.peerDependencies),
|
|
199
|
+
overrides: pkg.overrides ?? {},
|
|
200
|
+
engines: pkg.engines ?? {},
|
|
201
|
+
packageManager: pkg.packageManager ?? null,
|
|
202
|
+
functions: functionsPkgs
|
|
203
|
+
.filter(({ pkg }) => pkg)
|
|
204
|
+
.map(({ path, pkg: functionPkg }) => ({
|
|
205
|
+
path,
|
|
206
|
+
dependencies: sanitizeDependencyObject(functionPkg.dependencies),
|
|
207
|
+
devDependencies: sanitizeDependencyObject(functionPkg.devDependencies),
|
|
208
|
+
optionalDependencies: sanitizeDependencyObject(functionPkg.optionalDependencies),
|
|
209
|
+
peerDependencies: sanitizeDependencyObject(functionPkg.peerDependencies),
|
|
210
|
+
overrides: functionPkg.overrides ?? {},
|
|
211
|
+
engines: functionPkg.engines ?? {},
|
|
212
|
+
})),
|
|
213
|
+
};
|
|
214
|
+
return stableStringify(relevant);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
function computeDepsHash(repoPath, dockerfilePath, lockfilePath = 'package-lock.json', functionsPaths = []) {
|
|
218
|
+
const dockerfile = readFileStrict(resolve(repoPath, dockerfilePath));
|
|
219
|
+
const lockfile = readSanitizedLockfileString(resolve(repoPath, lockfilePath), { sanitize: true });
|
|
220
|
+
const hash = createHash('sha256');
|
|
221
|
+
hash.update(dockerfile ?? Buffer.from(''));
|
|
222
|
+
hash.update(extractDependencyInputs(repoPath, functionsPaths));
|
|
223
|
+
hash.update(lockfile);
|
|
224
|
+
for (const functionsPath of functionsPaths) {
|
|
225
|
+
const functionsLockfile = readSanitizedLockfileString(resolve(repoPath, functionsPath, 'package-lock.json'));
|
|
226
|
+
const functionsNpmShrinkwrap = readSanitizedLockfileString(resolve(repoPath, functionsPath, 'npm-shrinkwrap.json'));
|
|
227
|
+
hash.update(functionsLockfile);
|
|
228
|
+
hash.update(functionsNpmShrinkwrap);
|
|
229
|
+
}
|
|
230
|
+
return hash.digest('hex').slice(0, 12);
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
function getArtifactNamespace(repoPath, env = process.env) {
|
|
234
|
+
return (env.TEST_DOCKER_NAMESPACE?.trim() || createHash('sha256').update(repoPath).digest('hex').slice(0, 8)).toLowerCase();
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
function cleanupOldDockerArtifacts(logPrefix, imageBaseName, volumePrefixes, keepImage, keepVolumes = []) {
|
|
238
|
+
const images = listDockerLines(['image', 'ls', '--format', '{{.Repository}}:{{.Tag}}']);
|
|
239
|
+
for (const image of images) {
|
|
240
|
+
const shouldCleanup = image.startsWith(`${imageBaseName}:`) && image !== keepImage;
|
|
241
|
+
if (shouldCleanup) {
|
|
242
|
+
removeDockerArtifact(logPrefix, 'image', image, ['image', 'rm', image]);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
const prefixes = Array.isArray(volumePrefixes) ? volumePrefixes : [volumePrefixes];
|
|
247
|
+
const keepSet = new Set((Array.isArray(keepVolumes) ? keepVolumes : [keepVolumes]).filter(Boolean));
|
|
248
|
+
const volumes = listDockerLines(['volume', 'ls', '--format', '{{.Name}}']);
|
|
249
|
+
for (const volume of volumes) {
|
|
250
|
+
const shouldCleanup = prefixes.some((prefix) => typeof prefix === 'string' && volume.startsWith(prefix)) &&
|
|
251
|
+
!keepSet.has(volume);
|
|
252
|
+
if (shouldCleanup) {
|
|
253
|
+
removeDockerArtifact(logPrefix, 'volume', volume, ['volume', 'rm', volume]);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
function ensureImage(logPrefix, image, dockerfilePath, repoPath, depsHash, buildArgs = [], forceRebuild = false) {
|
|
259
|
+
const imageExists = runDocker(['image', 'inspect', image], { stdio: 'ignore' }).status === 0;
|
|
260
|
+
if (imageExists && !forceRebuild) return;
|
|
261
|
+
if (imageExists && forceRebuild) {
|
|
262
|
+
console.log(`${logPrefix} force rebuild enabled. removing cached image ${image}`);
|
|
263
|
+
runDockerStrict(logPrefix, 'failed to remove docker image for rebuild', ['image', 'rm', image], { stdio: 'inherit' });
|
|
264
|
+
}
|
|
265
|
+
console.log(`${logPrefix} building image ${image} (deps hash: ${depsHash})`);
|
|
266
|
+
if (buildArgs.length > 0) {
|
|
267
|
+
console.log(`${logPrefix} building image options: ${buildArgs.join(' ')}`);
|
|
268
|
+
}
|
|
269
|
+
runDockerStrict(
|
|
270
|
+
logPrefix,
|
|
271
|
+
'failed to build docker image',
|
|
272
|
+
['build', ...buildArgs, '-f', dockerfilePath, '-t', image, repoPath],
|
|
273
|
+
{ stdio: 'inherit' }
|
|
274
|
+
);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
function buildEnvArgs(env, envNames) {
|
|
278
|
+
return envNames
|
|
279
|
+
.filter((name) => typeof env[name] === 'string')
|
|
280
|
+
.flatMap((name) => ['-e', `${name}=${env[name]}`]);
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
function buildResourceArgs(env = process.env) {
|
|
284
|
+
const memory = env.TEST_DOCKER_MEMORY?.trim();
|
|
285
|
+
const memorySwap = env.TEST_DOCKER_MEMORY_SWAP?.trim();
|
|
286
|
+
const cpus = env.TEST_DOCKER_CPUS?.trim();
|
|
287
|
+
const pidsLimit = env.TEST_DOCKER_PIDS_LIMIT?.trim();
|
|
288
|
+
const args = [];
|
|
289
|
+
|
|
290
|
+
if (memory) args.push('--memory', memory);
|
|
291
|
+
if (memorySwap) args.push('--memory-swap', memorySwap);
|
|
292
|
+
if (cpus) args.push('--cpus', cpus);
|
|
293
|
+
if (pidsLimit) args.push('--pids-limit', pidsLimit);
|
|
294
|
+
|
|
295
|
+
return args;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
function escapeForDoubleQuotes(value) {
|
|
299
|
+
return String(value).replace(/\\/g, '\\\\').replace(/"/g, '\\"');
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
function ensureBindPathsWritableForUser(logPrefix, image, repoPath, uid, gid, workdir = '/work', paths = []) {
|
|
303
|
+
if (!Array.isArray(paths) || paths.length === 0) return;
|
|
304
|
+
|
|
305
|
+
const sanitized = paths
|
|
306
|
+
.filter((entry) => typeof entry === 'string' && entry.trim() !== '')
|
|
307
|
+
.map((entry) => entry.trim())
|
|
308
|
+
.filter((entry) => !entry.startsWith('/'))
|
|
309
|
+
.filter((entry) => !entry.includes('..'));
|
|
310
|
+
|
|
311
|
+
if (sanitized.length === 0) return;
|
|
312
|
+
|
|
313
|
+
const ownership = `${uid}:${gid}`;
|
|
314
|
+
const pathArgs = sanitized.map((entry) => `"${escapeForDoubleQuotes(entry)}"`).join(' ');
|
|
315
|
+
const command = `for rel in ${pathArgs}; do
|
|
316
|
+
target="${workdir}/$rel"
|
|
317
|
+
mkdir -p "$target"
|
|
318
|
+
current="$(stat -c '%u:%g' "$target" 2>/dev/null || true)"
|
|
319
|
+
if [ "$current" != "${ownership}" ]; then
|
|
320
|
+
chown -R ${ownership} "$target"
|
|
321
|
+
fi
|
|
322
|
+
done`;
|
|
323
|
+
|
|
324
|
+
const result = runDocker([
|
|
325
|
+
'run',
|
|
326
|
+
'--rm',
|
|
327
|
+
'-v',
|
|
328
|
+
`${repoPath}:${workdir}`,
|
|
329
|
+
image,
|
|
330
|
+
'bash',
|
|
331
|
+
'-lc',
|
|
332
|
+
command,
|
|
333
|
+
], { stdio: 'inherit' });
|
|
334
|
+
|
|
335
|
+
if (result.error) {
|
|
336
|
+
fail(logPrefix, `failed to prepare artifact directory permissions: ${result.error.message}`);
|
|
337
|
+
}
|
|
338
|
+
if ((result.status ?? 1) !== 0) {
|
|
339
|
+
process.exit(result.status ?? 1);
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
function ensureVolumeWritableForUser(logPrefix, image, volumeName, uid, gid, workdir = '/work', relativePath = 'node_modules') {
|
|
344
|
+
const ownership = `${uid}:${gid}`;
|
|
345
|
+
const target = `${workdir}/${relativePath}`;
|
|
346
|
+
const command = [
|
|
347
|
+
`mkdir -p ${target}`,
|
|
348
|
+
`current="$(stat -c '%u:%g' ${target} 2>/dev/null || true)"`,
|
|
349
|
+
`if [ "$current" != "${ownership}" ]; then chown -R ${ownership} ${target}; fi`,
|
|
350
|
+
].join(' && ');
|
|
351
|
+
|
|
352
|
+
const result = runDocker([
|
|
353
|
+
'run',
|
|
354
|
+
'--rm',
|
|
355
|
+
'-v',
|
|
356
|
+
`${volumeName}:${target}`,
|
|
357
|
+
image,
|
|
358
|
+
'bash',
|
|
359
|
+
'-lc',
|
|
360
|
+
command,
|
|
361
|
+
], { stdio: 'inherit' });
|
|
362
|
+
|
|
363
|
+
if (result.error) {
|
|
364
|
+
fail(logPrefix, `failed to prepare node_modules volume permissions: ${result.error.message}`);
|
|
365
|
+
}
|
|
366
|
+
if ((result.status ?? 1) !== 0) {
|
|
367
|
+
process.exit(result.status ?? 1);
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
function ensureFirestoreEmulatorCached(logPrefix, image, cacheVolume, uid, gid) {
|
|
372
|
+
const userArgs = Number.isInteger(uid) && Number.isInteger(gid) ? ['--user', `${uid}:${gid}`] : [];
|
|
373
|
+
const command = [
|
|
374
|
+
'set -e',
|
|
375
|
+
'mkdir -p /firestack-cache/firebase/emulators',
|
|
376
|
+
'if ls /opt/firebase/emulators/cloud-firestore-emulator-*.jar >/dev/null 2>&1; then',
|
|
377
|
+
` echo '${logPrefix} seeding firestore emulator cache from image...'`,
|
|
378
|
+
' cp -f /opt/firebase/emulators/cloud-firestore-emulator-*.jar /firestack-cache/firebase/emulators/',
|
|
379
|
+
'fi',
|
|
380
|
+
'if ls /firestack-cache/firebase/emulators/cloud-firestore-emulator-*.jar >/dev/null 2>&1; then',
|
|
381
|
+
` echo '${logPrefix} firestore emulator already cached.'`,
|
|
382
|
+
' exit 0',
|
|
383
|
+
'fi',
|
|
384
|
+
`echo '${logPrefix} preloading firestore emulator into docker cache volume...'`,
|
|
385
|
+
'if [ -x /opt/deps/node_modules/.bin/firebase ]; then',
|
|
386
|
+
' FIREBASE_EMULATORS_PATH=/firestack-cache/firebase/emulators /opt/deps/node_modules/.bin/firebase setup:emulators:firestore',
|
|
387
|
+
' exit 0',
|
|
388
|
+
'fi',
|
|
389
|
+
'if [ -x node_modules/.bin/firebase ]; then',
|
|
390
|
+
' FIREBASE_EMULATORS_PATH=/firestack-cache/firebase/emulators node_modules/.bin/firebase setup:emulators:firestore',
|
|
391
|
+
' exit 0',
|
|
392
|
+
'fi',
|
|
393
|
+
'FIREBASE_EMULATORS_PATH=/firestack-cache/firebase/emulators firebase setup:emulators:firestore',
|
|
394
|
+
].join('\n');
|
|
395
|
+
|
|
396
|
+
const result = runDocker([
|
|
397
|
+
'run',
|
|
398
|
+
'--rm',
|
|
399
|
+
...userArgs,
|
|
400
|
+
'-v',
|
|
401
|
+
`${cacheVolume}:/firestack-cache`,
|
|
402
|
+
'-e',
|
|
403
|
+
'FIREBASE_EMULATORS_PATH=/firestack-cache/firebase/emulators',
|
|
404
|
+
image,
|
|
405
|
+
'bash',
|
|
406
|
+
'-lc',
|
|
407
|
+
command,
|
|
408
|
+
], { stdio: 'inherit' });
|
|
409
|
+
|
|
410
|
+
if (result.error) {
|
|
411
|
+
console.warn(`${logPrefix} unable to warm firestore emulator cache: ${result.error.message}`);
|
|
412
|
+
return false;
|
|
413
|
+
}
|
|
414
|
+
if ((result.status ?? 1) !== 0) {
|
|
415
|
+
console.warn(`${logPrefix} firestore emulator cache warmup exited with code ${result.status ?? 'unknown'}. continuing without warmup.`);
|
|
416
|
+
return false;
|
|
417
|
+
}
|
|
418
|
+
return true;
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
export function defaultBootstrapCommand() {
|
|
422
|
+
return [
|
|
423
|
+
'firestack_has_declared_deps(){ target="$1"; [ -f "$target/package.json" ] || return 0; (cd "$target" && node -e "const fs=require(\'fs\');const p=JSON.parse(fs.readFileSync(\'package.json\',\'utf8\'));const deps={...(p.dependencies||{}),...(p.devDependencies||{}),...(p.optionalDependencies||{})};const missing=Object.keys(deps).some((name)=>!fs.existsSync(\'node_modules/\'+name));process.exit(missing?1:0);") ; }',
|
|
424
|
+
'firestack_copy_from_image(){ rel="$1"; src="/opt/deps/${rel:+$rel/}node_modules"; dst="/work/${rel:+$rel/}node_modules"; [ -d "$src" ] || return 0; [ -d "$dst/.bin" ] || { mkdir -p "$dst" && cp -a "$src/." "$dst/"; }; }',
|
|
425
|
+
'if [ -f /work/package.json ]; then firestack_copy_from_image ""; firestack_has_declared_deps /work || { echo "[firestack] missing root dependencies in container. Rebuild image with --docker-rebuild."; exit 1; }; fi',
|
|
426
|
+
'firestack_install_csv="${FIRESTACK_FUNCTIONS_INSTALL_PATHS:-${FIRESTACK_FUNCTIONS_PATHS:-}}"; if [ -z "$firestack_install_csv" ]; then firestack_cfg_rel="${FIRESTACK_FIREBASE_CONFIG_PATH:-firebase.json}"; firestack_cfg="/work/${firestack_cfg_rel}"; if [ -f "$firestack_cfg" ]; then firestack_install_csv="$(FIRESTACK_CONFIG_PATH="$firestack_cfg" node -e "const fs=require(\'fs\');const p=process.env.FIRESTACK_CONFIG_PATH||\'/work/firebase.json\';const cfg=JSON.parse(fs.readFileSync(p,\'utf8\'));const out=[];const add=(v)=>{if(typeof v!==\'string\')return;const n=v.trim().replace(/\\\\/g,\'/\').replace(/^\\.\\//,\'\');if(!n||n.startsWith(\'/\')||n.includes(\'..\'))return;if(fs.existsSync(\'/work/\'+n+\'/package.json\'))out.push(n);};const f=cfg.functions;if(typeof f===\'string\')add(f);else if(Array.isArray(f)){for(const e of f){if(typeof e===\'string\')add(e);else if(e&&typeof e===\'object\')add(e.source);}}else if(f&&typeof f===\'object\')add(f.source);process.stdout.write([...new Set(out)].join(\',\'));" )"; fi; fi; if [ -z "$firestack_install_csv" ] && [ -f "/work/functions/package.json" ]; then firestack_install_csv="functions"; fi',
|
|
427
|
+
'if [ -n "$firestack_install_csv" ]; then printf "%s" "$firestack_install_csv" | tr "," "\\n" | while IFS= read -r rel; do [ -z "$rel" ] && continue; module="/work/$rel"; [ -f "$module/package.json" ] || continue; firestack_copy_from_image "$rel"; firestack_has_declared_deps "$module" || { echo "[firestack] missing dependencies for $rel in container. Rebuild image with --docker-rebuild."; exit 1; }; done; fi',
|
|
428
|
+
].join(' && ');
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
export function createDockerTask({ cwd, logPrefix, dockerConfig, env = process.env, firebaseConfigPath = null, forceRebuild = false }) {
|
|
432
|
+
const dockerfilePath = dockerConfig.dockerfile ?? 'tests/Dockerfile';
|
|
433
|
+
const dockerfileAbsolutePath = resolve(cwd, dockerfilePath);
|
|
434
|
+
const resolvedFirebaseConfigPath = firebaseConfigPath
|
|
435
|
+
? (resolve(cwd, firebaseConfigPath))
|
|
436
|
+
: resolve(cwd, 'firebase.json');
|
|
437
|
+
const functionSourcePaths = discoverFunctionsPaths(cwd, resolvedFirebaseConfigPath);
|
|
438
|
+
const functionInstallPaths = deriveFunctionsInstallPaths(cwd, functionSourcePaths);
|
|
439
|
+
if (!existsSync(dockerfileAbsolutePath)) {
|
|
440
|
+
fail(
|
|
441
|
+
logPrefix,
|
|
442
|
+
`missing dockerfile "${dockerfilePath}" in target project. ` +
|
|
443
|
+
'Run "npx firestack docker init" (or "npx firestack init") and try again.'
|
|
444
|
+
);
|
|
445
|
+
}
|
|
446
|
+
const namespace = getArtifactNamespace(cwd, env);
|
|
447
|
+
const imageBaseName = `${dockerConfig.imageBaseName ?? 'firestack-tests'}-${namespace}`;
|
|
448
|
+
const nodeModulesVolumePrefix = `${dockerConfig.nodeModulesVolumePrefix ?? 'firestack-node_modules-'}${namespace}-`;
|
|
449
|
+
const functionsNodeModulesVolumePrefix = `${dockerConfig.functionsNodeModulesVolumePrefix ?? 'firestack-functions-node_modules-'}${namespace}-`;
|
|
450
|
+
const emulatorCacheVolume = `${dockerConfig.emulatorCacheVolumePrefix ?? 'firestack-firebase-cache-'}${namespace}`;
|
|
451
|
+
const depsHash = computeDepsHash(cwd, dockerfilePath, dockerConfig.lockfilePath ?? 'package-lock.json', functionInstallPaths);
|
|
452
|
+
const image = `${imageBaseName}:${depsHash}`;
|
|
453
|
+
const nodeModulesVolume = `${nodeModulesVolumePrefix}${depsHash}`;
|
|
454
|
+
const functionModuleMounts = functionInstallPaths.map((path) => {
|
|
455
|
+
const pathHash = createHash('sha256').update(path).digest('hex').slice(0, 8);
|
|
456
|
+
return {
|
|
457
|
+
path,
|
|
458
|
+
volume: `${functionsNodeModulesVolumePrefix}${pathHash}-${depsHash}`,
|
|
459
|
+
};
|
|
460
|
+
});
|
|
461
|
+
const functionsPathsCsv = functionSourcePaths.join(',');
|
|
462
|
+
const functionsInstallPathsCsv = functionInstallPaths.join(',');
|
|
463
|
+
const workdir = dockerConfig.workdir ?? '/work';
|
|
464
|
+
const addHosts = Array.isArray(dockerConfig.addHosts) ? dockerConfig.addHosts : ['host.docker.internal:host-gateway'];
|
|
465
|
+
const runAsHostUser = dockerConfig.runAsHostUser !== false;
|
|
466
|
+
const supportsUidGid = typeof process.getuid === 'function' && typeof process.getgid === 'function';
|
|
467
|
+
const useHostUser = runAsHostUser && supportsUidGid;
|
|
468
|
+
const userArgs = useHostUser ? ['--user', `${process.getuid()}:${process.getgid()}`] : [];
|
|
469
|
+
const hostArgs = addHosts.flatMap((entry) => ['--add-host', entry]);
|
|
470
|
+
const registry = dockerConfig.registry ?? {};
|
|
471
|
+
const hasLocalHostRegistry =
|
|
472
|
+
typeof registry.defaultHostUrl === 'string' &&
|
|
473
|
+
/https?:\/\/(127\.0\.0\.1|localhost)(:\d+)?/i.test(registry.defaultHostUrl);
|
|
474
|
+
const buildNetwork = dockerConfig.buildNetwork ?? (hasLocalHostRegistry ? 'host' : null);
|
|
475
|
+
const buildAddHosts = Array.isArray(dockerConfig.buildAddHosts) ? dockerConfig.buildAddHosts : [];
|
|
476
|
+
const buildHostArgs = buildAddHosts.flatMap((entry) => ['--add-host', entry]);
|
|
477
|
+
const buildNetworkArgs = typeof buildNetwork === 'string' && buildNetwork.trim() !== ''
|
|
478
|
+
? ['--network', buildNetwork.trim()]
|
|
479
|
+
: [];
|
|
480
|
+
const firebaseConfigRelPathRaw = relative(cwd, resolvedFirebaseConfigPath).replaceAll('\\', '/');
|
|
481
|
+
if (firebaseConfigRelPathRaw.startsWith('..') || isAbsolute(firebaseConfigRelPathRaw)) {
|
|
482
|
+
fail(logPrefix, `firebase config path must be inside project root: ${resolvedFirebaseConfigPath}`);
|
|
483
|
+
}
|
|
484
|
+
const firebaseConfigRelPath = firebaseConfigRelPathRaw || 'firebase.json';
|
|
485
|
+
const imageBuildArgs = [
|
|
486
|
+
...buildNetworkArgs,
|
|
487
|
+
...buildHostArgs,
|
|
488
|
+
'--build-arg',
|
|
489
|
+
`FIREBASE_CONFIG_PATH=${firebaseConfigRelPath}`,
|
|
490
|
+
'--build-arg',
|
|
491
|
+
`FIRESTACK_FUNCTIONS_INSTALL_PATHS=${functionsInstallPathsCsv}`,
|
|
492
|
+
];
|
|
493
|
+
const writablePaths = Array.isArray(dockerConfig.writablePaths)
|
|
494
|
+
? dockerConfig.writablePaths
|
|
495
|
+
: ['out'];
|
|
496
|
+
const preloadFirestoreEmulator = dockerConfig.preloadFirestoreEmulator !== false;
|
|
497
|
+
|
|
498
|
+
return {
|
|
499
|
+
image,
|
|
500
|
+
nodeModulesVolume,
|
|
501
|
+
emulatorCacheVolume,
|
|
502
|
+
prepare() {
|
|
503
|
+
ensureImage(logPrefix, image, dockerfilePath, cwd, depsHash, imageBuildArgs, forceRebuild);
|
|
504
|
+
if (useHostUser) {
|
|
505
|
+
ensureBindPathsWritableForUser(logPrefix, image, cwd, process.getuid(), process.getgid(), workdir, writablePaths);
|
|
506
|
+
ensureVolumeWritableForUser(logPrefix, image, nodeModulesVolume, process.getuid(), process.getgid(), workdir);
|
|
507
|
+
for (const mount of functionModuleMounts) {
|
|
508
|
+
ensureVolumeWritableForUser(
|
|
509
|
+
logPrefix,
|
|
510
|
+
image,
|
|
511
|
+
mount.volume,
|
|
512
|
+
process.getuid(),
|
|
513
|
+
process.getgid(),
|
|
514
|
+
workdir,
|
|
515
|
+
`${mount.path}/node_modules`
|
|
516
|
+
);
|
|
517
|
+
}
|
|
518
|
+
ensureVolumeWritableForUser(logPrefix, image, emulatorCacheVolume, process.getuid(), process.getgid(), '/firestack-cache', 'firebase/emulators');
|
|
519
|
+
}
|
|
520
|
+
if (preloadFirestoreEmulator) {
|
|
521
|
+
ensureFirestoreEmulatorCached(
|
|
522
|
+
logPrefix,
|
|
523
|
+
image,
|
|
524
|
+
emulatorCacheVolume,
|
|
525
|
+
useHostUser ? process.getuid() : null,
|
|
526
|
+
useHostUser ? process.getgid() : null
|
|
527
|
+
);
|
|
528
|
+
}
|
|
529
|
+
if (dockerConfig.cleanup !== false) {
|
|
530
|
+
cleanupOldDockerArtifacts(
|
|
531
|
+
logPrefix,
|
|
532
|
+
imageBaseName,
|
|
533
|
+
[nodeModulesVolumePrefix, functionsNodeModulesVolumePrefix, `${dockerConfig.emulatorCacheVolumePrefix ?? 'firestack-firebase-cache-'}${namespace}`],
|
|
534
|
+
image,
|
|
535
|
+
[nodeModulesVolume, ...functionModuleMounts.map((mount) => mount.volume), emulatorCacheVolume]
|
|
536
|
+
);
|
|
537
|
+
}
|
|
538
|
+
},
|
|
539
|
+
run({ command, envNames = [], extraArgs = [] }) {
|
|
540
|
+
const dockerEnvArgs = buildEnvArgs(env, envNames);
|
|
541
|
+
const resourceArgs = buildResourceArgs(env);
|
|
542
|
+
const args = [
|
|
543
|
+
'run',
|
|
544
|
+
'--rm',
|
|
545
|
+
'-t',
|
|
546
|
+
...userArgs,
|
|
547
|
+
'-v',
|
|
548
|
+
`${cwd}:${workdir}`,
|
|
549
|
+
'-v',
|
|
550
|
+
`${nodeModulesVolume}:${workdir}/node_modules`,
|
|
551
|
+
...functionModuleMounts.flatMap((mount) => ['-v', `${mount.volume}:${workdir}/${mount.path}/node_modules`]),
|
|
552
|
+
'-v',
|
|
553
|
+
`${emulatorCacheVolume}:/firestack-cache`,
|
|
554
|
+
'-w',
|
|
555
|
+
workdir,
|
|
556
|
+
...hostArgs,
|
|
557
|
+
...resourceArgs,
|
|
558
|
+
...extraArgs,
|
|
559
|
+
...dockerEnvArgs,
|
|
560
|
+
'-e',
|
|
561
|
+
'FIREBASE_EMULATORS_PATH=/firestack-cache/firebase/emulators',
|
|
562
|
+
'-e',
|
|
563
|
+
`FIRESTACK_FUNCTIONS_PATHS=${functionsPathsCsv}`,
|
|
564
|
+
'-e',
|
|
565
|
+
`FIRESTACK_FUNCTIONS_INSTALL_PATHS=${functionsInstallPathsCsv}`,
|
|
566
|
+
image,
|
|
567
|
+
'bash',
|
|
568
|
+
'-lc',
|
|
569
|
+
command,
|
|
570
|
+
];
|
|
571
|
+
|
|
572
|
+
const result = runDocker(args, { stdio: 'inherit' });
|
|
573
|
+
if (result.error) {
|
|
574
|
+
fail(logPrefix, `failed to execute docker: ${result.error.message}`);
|
|
575
|
+
}
|
|
576
|
+
return result.status ?? 1;
|
|
577
|
+
},
|
|
578
|
+
};
|
|
579
|
+
}
|