takos-runtime-service 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +29 -0
- package/src/__tests__/middleware/rate-limit.test.ts +33 -0
- package/src/__tests__/middleware/workspace-scope-extended.test.ts +163 -0
- package/src/__tests__/routes/actions-start-limits.test.ts +139 -0
- package/src/__tests__/routes/actions-step-warnings.test.ts +194 -0
- package/src/__tests__/routes/cli-proxy.test.ts +72 -0
- package/src/__tests__/routes/git-http.test.ts +218 -0
- package/src/__tests__/routes/git-lfs-policy.test.ts +112 -0
- package/src/__tests__/routes/sessions/store.test.ts +72 -0
- package/src/__tests__/routes/workspace-scope.test.ts +45 -0
- package/src/__tests__/runtime/action-registry.test.ts +208 -0
- package/src/__tests__/runtime/action-result-helpers.test.ts +129 -0
- package/src/__tests__/runtime/actions/executor.test.ts +131 -0
- package/src/__tests__/runtime/composite-expression.test.ts +294 -0
- package/src/__tests__/runtime/file-parsers.test.ts +129 -0
- package/src/__tests__/runtime/logging.test.ts +65 -0
- package/src/__tests__/runtime/paths.test.ts +236 -0
- package/src/__tests__/runtime/secrets.test.ts +247 -0
- package/src/__tests__/runtime/validation.test.ts +516 -0
- package/src/__tests__/setup.ts +126 -0
- package/src/__tests__/shared/errors.test.ts +117 -0
- package/src/__tests__/storage/r2.test.ts +106 -0
- package/src/__tests__/utils/audit-log.test.ts +163 -0
- package/src/__tests__/utils/error-message.test.ts +38 -0
- package/src/__tests__/utils/sandbox-env.test.ts +74 -0
- package/src/app.ts +245 -0
- package/src/index.ts +1 -0
- package/src/middleware/rate-limit.ts +91 -0
- package/src/middleware/space-scope.ts +95 -0
- package/src/routes/actions/action-types.ts +20 -0
- package/src/routes/actions/execution.ts +229 -0
- package/src/routes/actions/index.ts +17 -0
- package/src/routes/actions/job-lifecycle.ts +242 -0
- package/src/routes/actions/job-queries.ts +52 -0
- package/src/routes/cli/proxy.ts +105 -0
- package/src/routes/git/http.ts +565 -0
- package/src/routes/git/init.ts +88 -0
- package/src/routes/repos/branches.ts +160 -0
- package/src/routes/repos/content.ts +209 -0
- package/src/routes/repos/read.ts +130 -0
- package/src/routes/repos/repo-validation.ts +136 -0
- package/src/routes/repos/write.ts +274 -0
- package/src/routes/runtime/exec.ts +147 -0
- package/src/routes/runtime/tools.ts +113 -0
- package/src/routes/sessions/execution.ts +263 -0
- package/src/routes/sessions/files.ts +326 -0
- package/src/routes/sessions/session-routes.ts +241 -0
- package/src/routes/sessions/session-utils.ts +88 -0
- package/src/routes/sessions/snapshot.ts +208 -0
- package/src/routes/sessions/storage.ts +329 -0
- package/src/runtime/actions/action-registry.ts +450 -0
- package/src/runtime/actions/action-result-converter.ts +31 -0
- package/src/runtime/actions/builtin/artifacts.ts +292 -0
- package/src/runtime/actions/builtin/cache-operations.ts +358 -0
- package/src/runtime/actions/builtin/checkout.ts +58 -0
- package/src/runtime/actions/builtin/index.ts +5 -0
- package/src/runtime/actions/builtin/setup-node.ts +86 -0
- package/src/runtime/actions/builtin/tar-parser.ts +175 -0
- package/src/runtime/actions/composite-executor.ts +192 -0
- package/src/runtime/actions/composite-expression.ts +190 -0
- package/src/runtime/actions/executor.ts +578 -0
- package/src/runtime/actions/file-parsers.ts +51 -0
- package/src/runtime/actions/job-manager.ts +213 -0
- package/src/runtime/actions/process-spawner.ts +275 -0
- package/src/runtime/actions/secrets.ts +162 -0
- package/src/runtime/command.ts +120 -0
- package/src/runtime/exec-runner.ts +309 -0
- package/src/runtime/git-http-backend.ts +145 -0
- package/src/runtime/git.ts +98 -0
- package/src/runtime/heartbeat.ts +57 -0
- package/src/runtime/logging.ts +26 -0
- package/src/runtime/paths.ts +264 -0
- package/src/runtime/secure-fs.ts +82 -0
- package/src/runtime/tools/network.ts +161 -0
- package/src/runtime/tools/worker.ts +335 -0
- package/src/runtime/validation.ts +292 -0
- package/src/shared/config.ts +149 -0
- package/src/shared/errors.ts +65 -0
- package/src/shared/temp-id.ts +10 -0
- package/src/storage/r2.ts +287 -0
- package/src/types/hono.d.ts +23 -0
- package/src/utils/audit-log.ts +92 -0
- package/src/utils/process-kill.ts +18 -0
- package/src/utils/sandbox-env.ts +136 -0
- package/src/utils/temp-dir.ts +74 -0
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import { createLogger } from 'takos-common/logger';
|
|
3
|
+
import { pushLog } from '../logging.js';
|
|
4
|
+
import { type SecretsSanitizer } from './secrets.js';
|
|
5
|
+
import { SANDBOX_LIMITS } from '../../shared/config.js';
|
|
6
|
+
|
|
7
|
+
const logger = createLogger({ service: 'takos-runtime', defaultFields: { module: 'actions' } });
|
|
8
|
+
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
// Types
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
|
|
13
|
+
export interface StepDefinition {
|
|
14
|
+
name?: string;
|
|
15
|
+
run?: string;
|
|
16
|
+
uses?: string;
|
|
17
|
+
with?: Record<string, unknown>;
|
|
18
|
+
env?: Record<string, string>;
|
|
19
|
+
if?: string;
|
|
20
|
+
'continue-on-error'?: boolean;
|
|
21
|
+
'timeout-minutes'?: number;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export interface ActiveJob {
|
|
25
|
+
id: string;
|
|
26
|
+
spaceId: string;
|
|
27
|
+
repoId: string;
|
|
28
|
+
ref: string;
|
|
29
|
+
sha: string;
|
|
30
|
+
workflowPath: string;
|
|
31
|
+
jobName: string;
|
|
32
|
+
workspacePath: string;
|
|
33
|
+
status: 'running' | 'completed' | 'failed';
|
|
34
|
+
steps: StepDefinition[];
|
|
35
|
+
env: Record<string, string>;
|
|
36
|
+
secrets: Record<string, string>;
|
|
37
|
+
secretsSanitizer: SecretsSanitizer;
|
|
38
|
+
logs: string[];
|
|
39
|
+
currentStep: number;
|
|
40
|
+
startedAt: number;
|
|
41
|
+
completedAt?: number;
|
|
42
|
+
conclusion?: 'success' | 'failure' | 'cancelled';
|
|
43
|
+
outputs: Record<string, string>;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// ---------------------------------------------------------------------------
|
|
47
|
+
// Constants
|
|
48
|
+
// ---------------------------------------------------------------------------
|
|
49
|
+
|
|
50
|
+
const JOB_CLEANUP_INTERVAL_MS = 60 * 60 * 1000; // 1 hour (reduced from 24h to catch stale jobs sooner)
|
|
51
|
+
const MAX_JOB_AGE_MS = 7 * 24 * 60 * 60 * 1000;
|
|
52
|
+
const COMPLETED_JOB_RETENTION_MS = 5 * 60 * 1000;
|
|
53
|
+
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
// Job Manager
|
|
56
|
+
// ---------------------------------------------------------------------------
|
|
57
|
+
|
|
58
|
+
export class JobManager {
|
|
59
|
+
readonly jobs = new Map<string, ActiveJob>();
|
|
60
|
+
private cleanupInterval: ReturnType<typeof setInterval> | null = null;
|
|
61
|
+
private cleanupRunning = false;
|
|
62
|
+
|
|
63
|
+
countRunningJobsForSpace(spaceId: string): number {
|
|
64
|
+
return Array.from(this.jobs.values())
|
|
65
|
+
.filter(job => job.status === 'running' && job.spaceId === spaceId)
|
|
66
|
+
.length;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
countRunningJobsGlobal(): number {
|
|
70
|
+
return Array.from(this.jobs.values())
|
|
71
|
+
.filter(job => job.status === 'running')
|
|
72
|
+
.length;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Check whether a new job can be started.
|
|
77
|
+
* Enforces both per-space and global concurrency limits.
|
|
78
|
+
*/
|
|
79
|
+
canStartJob(spaceId: string): { allowed: boolean; reason?: string } {
|
|
80
|
+
const globalRunning = this.countRunningJobsGlobal();
|
|
81
|
+
if (globalRunning >= SANDBOX_LIMITS.maxConcurrentJobs) {
|
|
82
|
+
return { allowed: false, reason: `Global concurrent job limit reached (${globalRunning}/${SANDBOX_LIMITS.maxConcurrentJobs})` };
|
|
83
|
+
}
|
|
84
|
+
const wsRunning = this.countRunningJobsForSpace(spaceId);
|
|
85
|
+
if (wsRunning >= SANDBOX_LIMITS.maxConcurrentJobs) {
|
|
86
|
+
return { allowed: false, reason: `Space concurrent job limit reached (${wsRunning}/${SANDBOX_LIMITS.maxConcurrentJobs})` };
|
|
87
|
+
}
|
|
88
|
+
return { allowed: true };
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/** Delete a job from the active map after clearing its sanitizer and cleaning up job directory. */
|
|
92
|
+
async purgeJob(jobId: string): Promise<void> {
|
|
93
|
+
const job = this.jobs.get(jobId);
|
|
94
|
+
if (!job) return;
|
|
95
|
+
// Delete from map first to prevent concurrent access during cleanup
|
|
96
|
+
this.jobs.delete(jobId);
|
|
97
|
+
job.secretsSanitizer.clear();
|
|
98
|
+
// Ensure job directory is cleaned up (best-effort, may already be removed)
|
|
99
|
+
await removeJobDirSafe(job.workspacePath, jobId, 'purged job');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/** Schedule removal of a completed job after the retention window. */
|
|
103
|
+
scheduleJobCleanup(jobId: string): void {
|
|
104
|
+
const tryPurge = (): void => {
|
|
105
|
+
const job = this.jobs.get(jobId);
|
|
106
|
+
if (!job) return;
|
|
107
|
+
if (job.status === 'running') {
|
|
108
|
+
// Still running -- retry after another retention window
|
|
109
|
+
setTimeout(tryPurge, COMPLETED_JOB_RETENTION_MS);
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
void this.purgeJob(jobId);
|
|
113
|
+
};
|
|
114
|
+
setTimeout(tryPurge, COMPLETED_JOB_RETENTION_MS);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/** Mark a job failed, cleanup job directory, and schedule retention cleanup. */
|
|
118
|
+
async failCloseJob(
|
|
119
|
+
jobId: string,
|
|
120
|
+
job: ActiveJob,
|
|
121
|
+
reason: string,
|
|
122
|
+
): Promise<void> {
|
|
123
|
+
job.status = 'failed';
|
|
124
|
+
job.conclusion = 'failure';
|
|
125
|
+
job.completedAt = Date.now();
|
|
126
|
+
pushLog(job.logs, reason, job.secretsSanitizer);
|
|
127
|
+
await removeJobDirSafe(job.workspacePath, jobId, 'failed job');
|
|
128
|
+
this.scheduleJobCleanup(jobId);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// -- Lifecycle-managed cleanup of stale jobs --------------------------------
|
|
132
|
+
|
|
133
|
+
startCleanup(): void {
|
|
134
|
+
if (this.cleanupInterval) return;
|
|
135
|
+
|
|
136
|
+
this.cleanupInterval = setInterval(() => {
|
|
137
|
+
if (this.cleanupRunning) return;
|
|
138
|
+
this.cleanupRunning = true;
|
|
139
|
+
|
|
140
|
+
void this.cleanupStaleJobs()
|
|
141
|
+
.catch((err) => {
|
|
142
|
+
logger.error('Error in periodic job cleanup', { error: err });
|
|
143
|
+
})
|
|
144
|
+
.finally(() => {
|
|
145
|
+
this.cleanupRunning = false;
|
|
146
|
+
});
|
|
147
|
+
}, JOB_CLEANUP_INTERVAL_MS);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
stopCleanup(): void {
|
|
151
|
+
if (!this.cleanupInterval) return;
|
|
152
|
+
clearInterval(this.cleanupInterval);
|
|
153
|
+
this.cleanupInterval = null;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// -- Private helpers -------------------------------------------------------
|
|
157
|
+
|
|
158
|
+
private async cleanupStaleJobs(): Promise<void> {
|
|
159
|
+
const now = Date.now();
|
|
160
|
+
for (const [jobId, job] of this.jobs.entries()) {
|
|
161
|
+
const age = now - job.startedAt;
|
|
162
|
+
|
|
163
|
+
// Clean up jobs that exceeded maximum age regardless of status
|
|
164
|
+
if (age > MAX_JOB_AGE_MS) {
|
|
165
|
+
logger.info('Cleaning up stale job (max age exceeded)', { jobId, status: job.status });
|
|
166
|
+
job.secretsSanitizer.clear();
|
|
167
|
+
await removeJobDirSafe(job.workspacePath, jobId, 'stale job');
|
|
168
|
+
this.jobs.delete(jobId);
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Detect and fail running jobs that exceeded the maximum job duration
|
|
173
|
+
if (job.status === 'running' && age > SANDBOX_LIMITS.maxJobDuration) {
|
|
174
|
+
logger.warn('Failing job that exceeded max duration', { jobId, durationMs: age });
|
|
175
|
+
await this.failCloseJob(jobId, job, `Job exceeded maximum duration of ${SANDBOX_LIMITS.maxJobDuration}ms`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// ---------------------------------------------------------------------------
|
|
182
|
+
// Standalone helpers (not dependent on instance state)
|
|
183
|
+
// ---------------------------------------------------------------------------
|
|
184
|
+
|
|
185
|
+
/** Remove a job's working directory, logging failures instead of throwing. */
|
|
186
|
+
export async function removeJobDirSafe(
|
|
187
|
+
workspacePath: string,
|
|
188
|
+
jobId: string,
|
|
189
|
+
context: string,
|
|
190
|
+
): Promise<void> {
|
|
191
|
+
try {
|
|
192
|
+
await fs.rm(workspacePath, { recursive: true, force: true });
|
|
193
|
+
} catch (rmErr) {
|
|
194
|
+
logger.error(`Failed to remove ${context} job directory`, { jobId, error: rmErr });
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
/** Sanitize a key-value map through the job's secrets sanitizer. */
|
|
200
|
+
export function sanitizeOutputs(
|
|
201
|
+
outputs: Record<string, string>,
|
|
202
|
+
sanitizer: SecretsSanitizer,
|
|
203
|
+
): Record<string, string> {
|
|
204
|
+
return Object.fromEntries(
|
|
205
|
+
Object.entries(outputs).map(([k, v]) => [k, sanitizer.sanitize(v)])
|
|
206
|
+
);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// ---------------------------------------------------------------------------
|
|
210
|
+
// Default singleton for backwards compatibility
|
|
211
|
+
// ---------------------------------------------------------------------------
|
|
212
|
+
|
|
213
|
+
export const jobManager = new JobManager();
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import { spawn, type ChildProcess } from 'child_process';
|
|
2
|
+
import * as fs from 'fs/promises';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
import { randomUUID } from 'crypto';
|
|
5
|
+
import { pushLog } from '../logging.js';
|
|
6
|
+
import { SANDBOX_LIMITS } from '../../shared/config.js';
|
|
7
|
+
import { getErrorMessage } from 'takos-common/errors';
|
|
8
|
+
import { gracefulKill } from '../../utils/process-kill.js';
|
|
9
|
+
import type { ExecutorStepResult } from './executor.js';
|
|
10
|
+
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
// Types
|
|
13
|
+
// ---------------------------------------------------------------------------
|
|
14
|
+
|
|
15
|
+
interface CommandFiles {
|
|
16
|
+
output: string;
|
|
17
|
+
env: string;
|
|
18
|
+
path: string;
|
|
19
|
+
summary: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
interface PreparedCommandFiles {
|
|
23
|
+
envVars: Record<string, string>;
|
|
24
|
+
files: CommandFiles;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
interface SpawnOptions {
|
|
28
|
+
timeout: number;
|
|
29
|
+
cwd: string;
|
|
30
|
+
shell?: boolean;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
interface SpawnContext {
|
|
34
|
+
env: Record<string, string>;
|
|
35
|
+
logs: string[];
|
|
36
|
+
outputs: Record<string, string>;
|
|
37
|
+
workspacePath: string;
|
|
38
|
+
parseWorkflowCommands: (text: string) => void;
|
|
39
|
+
parseKeyValueFile: (content: string) => Record<string, string>;
|
|
40
|
+
parsePathFile: (content: string) => string[];
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
// Result builders
|
|
45
|
+
// ---------------------------------------------------------------------------
|
|
46
|
+
|
|
47
|
+
export function failureResult(
|
|
48
|
+
stderr: string,
|
|
49
|
+
outputs: Record<string, string> = {},
|
|
50
|
+
exitCode: number = 1
|
|
51
|
+
): ExecutorStepResult {
|
|
52
|
+
return { exitCode, stdout: '', stderr, outputs, conclusion: 'failure' };
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export function successResult(stdout: string, outputs: Record<string, string>): ExecutorStepResult {
|
|
56
|
+
return { exitCode: 0, stdout, stderr: '', outputs, conclusion: 'success' };
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// ---------------------------------------------------------------------------
|
|
60
|
+
// Command file management
|
|
61
|
+
// ---------------------------------------------------------------------------
|
|
62
|
+
|
|
63
|
+
export async function prepareCommandFiles(workspacePath: string): Promise<PreparedCommandFiles> {
|
|
64
|
+
const commandDir = path.join(workspacePath, '.runner', 'commands');
|
|
65
|
+
await fs.mkdir(commandDir, { recursive: true });
|
|
66
|
+
|
|
67
|
+
const id = randomUUID();
|
|
68
|
+
const files: CommandFiles = {
|
|
69
|
+
output: path.join(commandDir, `output-${id}.txt`),
|
|
70
|
+
env: path.join(commandDir, `env-${id}.txt`),
|
|
71
|
+
path: path.join(commandDir, `path-${id}.txt`),
|
|
72
|
+
summary: path.join(commandDir, `summary-${id}.md`),
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
await Promise.all(Object.values(files).map(f => fs.writeFile(f, '')));
|
|
76
|
+
|
|
77
|
+
return {
|
|
78
|
+
envVars: {
|
|
79
|
+
GITHUB_OUTPUT: files.output,
|
|
80
|
+
GITHUB_ENV: files.env,
|
|
81
|
+
GITHUB_PATH: files.path,
|
|
82
|
+
GITHUB_STEP_SUMMARY: files.summary,
|
|
83
|
+
},
|
|
84
|
+
files,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export async function applyCommandFiles(
|
|
89
|
+
prepared: PreparedCommandFiles,
|
|
90
|
+
ctx: SpawnContext
|
|
91
|
+
): Promise<void> {
|
|
92
|
+
const readFile = (filePath: string): Promise<string> =>
|
|
93
|
+
fs.readFile(filePath, 'utf-8').catch(() => '');
|
|
94
|
+
|
|
95
|
+
const [outputContent, envContent, pathContent] = await Promise.all([
|
|
96
|
+
readFile(prepared.files.output),
|
|
97
|
+
readFile(prepared.files.env),
|
|
98
|
+
readFile(prepared.files.path),
|
|
99
|
+
]);
|
|
100
|
+
|
|
101
|
+
if (outputContent) Object.assign(ctx.outputs, ctx.parseKeyValueFile(outputContent));
|
|
102
|
+
if (envContent) Object.assign(ctx.env, ctx.parseKeyValueFile(envContent));
|
|
103
|
+
if (pathContent) {
|
|
104
|
+
for (const entry of ctx.parsePathFile(pathContent)) {
|
|
105
|
+
ctx.env.PATH = entry + path.delimiter + (ctx.env.PATH || '');
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
export async function cleanupCommandFiles(prepared: PreparedCommandFiles): Promise<void> {
|
|
111
|
+
await Promise.all(
|
|
112
|
+
Object.values(prepared.files).map(f => fs.rm(f, { force: true }))
|
|
113
|
+
);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// ---------------------------------------------------------------------------
|
|
117
|
+
// Runtime env construction
|
|
118
|
+
// ---------------------------------------------------------------------------
|
|
119
|
+
|
|
120
|
+
export function createRuntimeEnv(
|
|
121
|
+
env: Record<string, string>,
|
|
122
|
+
workspacePath: string,
|
|
123
|
+
commandFileEnv: Record<string, string>
|
|
124
|
+
): Record<string, string> {
|
|
125
|
+
const runnerBase = path.join(workspacePath, '.runner');
|
|
126
|
+
const runtimeEnv: Record<string, string> = {
|
|
127
|
+
...env,
|
|
128
|
+
...commandFileEnv,
|
|
129
|
+
RUNNER_TEMP: path.join(runnerBase, 'temp'),
|
|
130
|
+
RUNNER_TOOL_CACHE: path.join(runnerBase, 'tool-cache'),
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
for (const key of ['PATH', 'HOME'] as const) {
|
|
134
|
+
if (!runtimeEnv[key] && process.env[key]) {
|
|
135
|
+
runtimeEnv[key] = process.env[key]!;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
const existingNodeOpts = runtimeEnv.NODE_OPTIONS || '';
|
|
140
|
+
if (!existingNodeOpts.includes('--max-old-space-size')) {
|
|
141
|
+
runtimeEnv.NODE_OPTIONS = `${existingNodeOpts} --max-old-space-size=2048`.trim();
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return runtimeEnv;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// ---------------------------------------------------------------------------
|
|
148
|
+
// Process spawning
|
|
149
|
+
// ---------------------------------------------------------------------------
|
|
150
|
+
|
|
151
|
+
function handleStdoutData(
|
|
152
|
+
data: Buffer,
|
|
153
|
+
state: { stdout: string },
|
|
154
|
+
child: ChildProcess,
|
|
155
|
+
ctx: SpawnContext
|
|
156
|
+
): void {
|
|
157
|
+
const text = data.toString('utf-8');
|
|
158
|
+
state.stdout += text;
|
|
159
|
+
if (state.stdout.length > SANDBOX_LIMITS.maxOutputSize) {
|
|
160
|
+
pushLog(ctx.logs, '[WARNING] Output size limit exceeded, truncating...');
|
|
161
|
+
state.stdout = state.stdout.slice(0, SANDBOX_LIMITS.maxOutputSize);
|
|
162
|
+
child.kill('SIGTERM');
|
|
163
|
+
}
|
|
164
|
+
ctx.parseWorkflowCommands(text);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function handleStderrData(
|
|
168
|
+
data: Buffer,
|
|
169
|
+
state: { stderr: string }
|
|
170
|
+
): void {
|
|
171
|
+
const text = data.toString('utf-8');
|
|
172
|
+
state.stderr += text;
|
|
173
|
+
if (state.stderr.length > SANDBOX_LIMITS.maxOutputSize) {
|
|
174
|
+
state.stderr = state.stderr.slice(0, SANDBOX_LIMITS.maxOutputSize);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export async function spawnWithTimeout(
|
|
179
|
+
command: string,
|
|
180
|
+
args: string[],
|
|
181
|
+
options: SpawnOptions,
|
|
182
|
+
ctx: SpawnContext
|
|
183
|
+
): Promise<ExecutorStepResult> {
|
|
184
|
+
ctx.outputs = {};
|
|
185
|
+
ctx.logs = [];
|
|
186
|
+
const prepared = await prepareCommandFiles(ctx.workspacePath);
|
|
187
|
+
|
|
188
|
+
return new Promise((resolve) => {
|
|
189
|
+
const state = { stdout: '', stderr: '', isTimedOut: false };
|
|
190
|
+
let child: ChildProcess;
|
|
191
|
+
|
|
192
|
+
try {
|
|
193
|
+
child = spawn(command, args, {
|
|
194
|
+
cwd: options.cwd,
|
|
195
|
+
env: createRuntimeEnv(ctx.env, ctx.workspacePath, prepared.envVars),
|
|
196
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
197
|
+
shell: options.shell === true ? true : false,
|
|
198
|
+
});
|
|
199
|
+
} catch (err) {
|
|
200
|
+
void cleanupCommandFiles(prepared);
|
|
201
|
+
resolve(failureResult(`Failed to spawn process: ${getErrorMessage(err)}`, ctx.outputs));
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
let forceKillHandle: NodeJS.Timeout | undefined;
|
|
206
|
+
const timeoutHandle = setTimeout(() => {
|
|
207
|
+
state.isTimedOut = true;
|
|
208
|
+
pushLog(ctx.logs, `[TIMEOUT] Command timed out after ${options.timeout}ms`);
|
|
209
|
+
forceKillHandle = gracefulKill(child);
|
|
210
|
+
}, options.timeout);
|
|
211
|
+
|
|
212
|
+
function clearTimers(): void {
|
|
213
|
+
clearTimeout(timeoutHandle);
|
|
214
|
+
if (forceKillHandle) clearTimeout(forceKillHandle);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
child.stdout?.on('data', (data: Buffer) => {
|
|
218
|
+
handleStdoutData(data, state, child, ctx);
|
|
219
|
+
});
|
|
220
|
+
child.stdout?.on('error', (err) => {
|
|
221
|
+
state.stderr += `\nstdout stream error: ${err.message}`;
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
child.stderr?.on('data', (data: Buffer) => {
|
|
225
|
+
handleStderrData(data, state);
|
|
226
|
+
});
|
|
227
|
+
child.stderr?.on('error', (err) => {
|
|
228
|
+
state.stderr += `\nstderr stream error: ${err.message}`;
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
child.on('error', (err) => {
|
|
232
|
+
clearTimers();
|
|
233
|
+
void cleanupCommandFiles(prepared);
|
|
234
|
+
resolve({
|
|
235
|
+
exitCode: 1,
|
|
236
|
+
stdout: state.stdout,
|
|
237
|
+
stderr: state.stderr + `\nSpawn error: ${err.message}`,
|
|
238
|
+
outputs: ctx.outputs,
|
|
239
|
+
conclusion: 'failure',
|
|
240
|
+
});
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
child.on('close', (code, signal) => {
|
|
244
|
+
clearTimers();
|
|
245
|
+
void (async () => {
|
|
246
|
+
try {
|
|
247
|
+
await applyCommandFiles(prepared, ctx);
|
|
248
|
+
} catch (err) {
|
|
249
|
+
pushLog(ctx.logs, `[WARNING] Failed to parse command files: ${getErrorMessage(err)}`);
|
|
250
|
+
}
|
|
251
|
+
await cleanupCommandFiles(prepared);
|
|
252
|
+
|
|
253
|
+
if (state.isTimedOut) {
|
|
254
|
+
resolve({
|
|
255
|
+
exitCode: 124,
|
|
256
|
+
stdout: state.stdout,
|
|
257
|
+
stderr: state.stderr + '\nCommand timed out',
|
|
258
|
+
outputs: ctx.outputs,
|
|
259
|
+
conclusion: 'failure',
|
|
260
|
+
});
|
|
261
|
+
return;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
const exitCode = code ?? (signal ? 128 : 1);
|
|
265
|
+
resolve({
|
|
266
|
+
exitCode,
|
|
267
|
+
stdout: state.stdout,
|
|
268
|
+
stderr: state.stderr,
|
|
269
|
+
outputs: ctx.outputs,
|
|
270
|
+
conclusion: exitCode === 0 ? 'success' : 'failure',
|
|
271
|
+
});
|
|
272
|
+
})();
|
|
273
|
+
});
|
|
274
|
+
});
|
|
275
|
+
}
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
const SECRET_MASK = '***';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Maximum length for a single secret value to prevent ReDoS.
|
|
5
|
+
* Secrets longer than this are handled via string replacement only (no regex).
|
|
6
|
+
*/
|
|
7
|
+
const MAX_SECRET_REGEX_LENGTH = 4096;
|
|
8
|
+
|
|
9
|
+
export class SecretsSanitizer {
|
|
10
|
+
private secretValues: Set<string> = new Set();
|
|
11
|
+
private secretPatterns: RegExp[] = [];
|
|
12
|
+
/** Secrets too long for safe regex conversion — handled via string replacement only. */
|
|
13
|
+
private longSecrets: Set<string> = new Set();
|
|
14
|
+
|
|
15
|
+
private addValues(values: Iterable<string>): void {
|
|
16
|
+
for (const value of values) {
|
|
17
|
+
if (value.length > 0) {
|
|
18
|
+
this.secretValues.add(value);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
this.buildPatterns();
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
registerSecrets(secrets: Record<string, string>): void {
|
|
25
|
+
this.addValues(Object.values(secrets));
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
registerSecretValues(values: string[]): void {
|
|
29
|
+
this.addValues(values);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
private buildPatterns(): void {
|
|
33
|
+
this.secretPatterns = [];
|
|
34
|
+
this.longSecrets = new Set();
|
|
35
|
+
for (const secret of this.secretValues) {
|
|
36
|
+
// Skip regex for long secrets to prevent ReDoS
|
|
37
|
+
if (secret.length > MAX_SECRET_REGEX_LENGTH) {
|
|
38
|
+
this.longSecrets.add(secret);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
const escaped = secret.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
42
|
+
try {
|
|
43
|
+
this.secretPatterns.push(new RegExp(escaped, 'g'));
|
|
44
|
+
} catch (err) {
|
|
45
|
+
// Regex construction failed — fall back to string replacement
|
|
46
|
+
this.longSecrets.add(secret);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
sanitize(text: string): string {
|
|
52
|
+
if (!text || this.secretValues.size === 0) return text;
|
|
53
|
+
|
|
54
|
+
let sanitized = text;
|
|
55
|
+
for (const pattern of this.secretPatterns) {
|
|
56
|
+
pattern.lastIndex = 0;
|
|
57
|
+
sanitized = sanitized.replace(pattern, SECRET_MASK);
|
|
58
|
+
}
|
|
59
|
+
// String replacement fallback for long secrets and regex-failed secrets
|
|
60
|
+
for (const secret of this.longSecrets) {
|
|
61
|
+
if (sanitized.includes(secret)) {
|
|
62
|
+
sanitized = sanitized.split(secret).join(SECRET_MASK);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return sanitized;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
sanitizeLogs(logs: string[]): string[] {
|
|
69
|
+
return logs.map(log => this.sanitize(log));
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
clear(): void {
|
|
73
|
+
this.secretValues.clear();
|
|
74
|
+
this.secretPatterns = [];
|
|
75
|
+
this.longSecrets.clear();
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Commands that would directly dump environment variables containing secrets.
|
|
81
|
+
* These are blocked (not just warned) to prevent secret leakage.
|
|
82
|
+
*/
|
|
83
|
+
const SECRET_EXPOSING_COMMANDS: Array<{ pattern: RegExp; description: string }> = [
|
|
84
|
+
{ pattern: /^\s*env\s*$/, description: 'bare "env" dumps all environment variables' },
|
|
85
|
+
{ pattern: /^\s*printenv\s*$/, description: 'bare "printenv" dumps all environment variables' },
|
|
86
|
+
{ pattern: /^\s*export\s+-p\s*$/, description: '"export -p" dumps all exported variables' },
|
|
87
|
+
];
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Detect if a shell command might expose secrets via env/printenv/set.
|
|
91
|
+
* Returns a description of the risk if detected, or null if safe.
|
|
92
|
+
*/
|
|
93
|
+
export function mightExposeSecrets(command: string): string | null {
|
|
94
|
+
for (const line of command.split('\n')) {
|
|
95
|
+
const trimmed = line.trim();
|
|
96
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
97
|
+
for (const { pattern, description } of SECRET_EXPOSING_COMMANDS) {
|
|
98
|
+
if (pattern.test(trimmed)) {
|
|
99
|
+
return description;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return null;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Check if a command should be blocked because it would expose secrets.
|
|
108
|
+
* Unlike mightExposeSecrets(), this returns true only for commands that
|
|
109
|
+
* would definitely dump all environment variables.
|
|
110
|
+
*/
|
|
111
|
+
export function shouldBlockForSecretExposure(command: string): boolean {
|
|
112
|
+
return mightExposeSecrets(command) !== null;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
export function createSecretsSanitizer(
|
|
116
|
+
secrets: Record<string, string>,
|
|
117
|
+
extraValues: string[] = []
|
|
118
|
+
): SecretsSanitizer {
|
|
119
|
+
const sanitizer = new SecretsSanitizer();
|
|
120
|
+
sanitizer.registerSecrets(secrets);
|
|
121
|
+
if (extraValues.length > 0) {
|
|
122
|
+
sanitizer.registerSecretValues(extraValues);
|
|
123
|
+
}
|
|
124
|
+
return sanitizer;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// ---------------------------------------------------------------------------
|
|
128
|
+
// --- Sensitive environment detection ---
|
|
129
|
+
// ---------------------------------------------------------------------------
|
|
130
|
+
|
|
131
|
+
const EXTRA_SENSITIVE_ENV_PATTERNS = [
|
|
132
|
+
/SECRET/i,
|
|
133
|
+
/PASSWORD/i,
|
|
134
|
+
/TOKEN/i,
|
|
135
|
+
/API_KEY/i,
|
|
136
|
+
/PRIVATE_KEY/i,
|
|
137
|
+
/ACCESS_KEY/i,
|
|
138
|
+
/AUTH/i,
|
|
139
|
+
];
|
|
140
|
+
|
|
141
|
+
const EXTRA_SENSITIVE_ENV_KEYS = new Set([
|
|
142
|
+
'TAKOS_TOKEN',
|
|
143
|
+
'TAKOS_SESSION_ID',
|
|
144
|
+
]);
|
|
145
|
+
|
|
146
|
+
export function collectSensitiveEnvValues(env?: Record<string, string>): string[] {
|
|
147
|
+
if (!env) return [];
|
|
148
|
+
const values: string[] = [];
|
|
149
|
+
|
|
150
|
+
for (const [key, value] of Object.entries(env)) {
|
|
151
|
+
if (!value) continue;
|
|
152
|
+
if (EXTRA_SENSITIVE_ENV_KEYS.has(key)) {
|
|
153
|
+
values.push(value);
|
|
154
|
+
continue;
|
|
155
|
+
}
|
|
156
|
+
if (EXTRA_SENSITIVE_ENV_PATTERNS.some((pattern) => pattern.test(key))) {
|
|
157
|
+
values.push(value);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return values;
|
|
162
|
+
}
|