@blaxel/core 0.2.57-preview.30 → 0.2.57-preview.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,8 +9,8 @@ const index_js_1 = require("../authentication/index.js");
9
9
  const env_js_1 = require("../common/env.js");
10
10
  const node_js_1 = require("../common/node.js");
11
11
  // Build info - these placeholders are replaced at build time by build:replace-imports
12
- const BUILD_VERSION = "0.2.57-preview.30";
13
- const BUILD_COMMIT = "56d2203f7145b774778210b79f0641b8e3a5e175";
12
+ const BUILD_VERSION = "0.2.57-preview.33";
13
+ const BUILD_COMMIT = "c169eb11d41a069351c8203a2aad841f8cdef220";
14
14
  const BUILD_SENTRY_DSN = "https://fd5e60e1c9820e1eef5ccebb84a07127@o4508714045276160.ingest.us.sentry.io/4510465864564736";
15
15
  // Cache for config.yaml tracking value
16
16
  let configTrackingValue = null;
@@ -525,7 +525,7 @@ const getProcess = (options) => {
525
525
  exports.getProcess = getProcess;
526
526
  /**
527
527
  * Execute a command
528
- * Execute a command and return process information
528
+ * Execute a command and return process information. If Accept header is text/event-stream, streams logs in SSE format and returns the process response as a final event.
529
529
  */
530
530
  const postProcess = (options) => {
531
531
  return (options.client ?? client_gen_1.client).post({
@@ -77,41 +77,36 @@ class SandboxProcess extends action_js_1.SandboxAction {
77
77
  }
78
78
  async exec(process) {
79
79
  let onLog;
80
+ let onStdout;
81
+ let onStderr;
80
82
  if ('onLog' in process && process.onLog) {
81
83
  onLog = process.onLog;
82
84
  delete process.onLog;
83
85
  }
86
+ if ('onStdout' in process && process.onStdout) {
87
+ onStdout = process.onStdout;
88
+ delete process.onStdout;
89
+ }
90
+ if ('onStderr' in process && process.onStderr) {
91
+ onStderr = process.onStderr;
92
+ delete process.onStderr;
93
+ }
84
94
  // Store original wait_for_completion setting
85
95
  const shouldWaitForCompletion = process.waitForCompletion;
86
- // Always start process without wait_for_completion to avoid server-side blocking
87
- if (shouldWaitForCompletion && onLog) {
88
- process.waitForCompletion = false;
89
- }
90
- const { response, data, error } = await (0, index_js_1.postProcess)({
91
- body: process,
92
- baseUrl: this.url,
93
- client: this.client,
94
- });
95
- this.handleResponseError(response, data, error);
96
- let result = data;
97
- // Handle wait_for_completion with parallel log streaming
98
- if (shouldWaitForCompletion && onLog) {
99
- const streamControl = this.streamLogs(result.pid, { onLog });
100
- try {
101
- // Wait for process completion
102
- result = await this.wait(result.pid, { interval: 500, maxWait: 1000 * 60 * 60 });
103
- }
104
- finally {
105
- // Clean up log streaming
106
- if (streamControl) {
107
- streamControl.close();
108
- }
109
- }
96
+ // When waiting for completion with streaming callbacks, use streaming endpoint
97
+ if (shouldWaitForCompletion && (onLog || onStdout || onStderr)) {
98
+ return await this.execWithStreaming(process, { onLog, onStdout, onStderr });
110
99
  }
111
100
  else {
112
- // For non-blocking execution, set up log streaming immediately if requested
113
- if (onLog) {
114
- const streamControl = this.streamLogs(result.pid, { onLog });
101
+ const { response, data, error } = await (0, index_js_1.postProcess)({
102
+ body: process,
103
+ baseUrl: this.url,
104
+ client: this.client,
105
+ });
106
+ this.handleResponseError(response, data, error);
107
+ const result = data;
108
+ if (onLog || onStdout || onStderr) {
109
+ const streamControl = this.streamLogs(result.pid, { onLog, onStdout, onStderr });
115
110
  return {
116
111
  ...result,
117
112
  close() {
@@ -121,8 +116,123 @@ class SandboxProcess extends action_js_1.SandboxAction {
121
116
  },
122
117
  };
123
118
  }
119
+ return result;
120
+ }
121
+ }
122
+ async execWithStreaming(processRequest, options) {
123
+ const headers = this.sandbox.forceUrl ? this.sandbox.headers : settings_js_1.settings.headers;
124
+ const controller = new AbortController();
125
+ const response = await fetch(`${this.url}/process`, {
126
+ method: 'POST',
127
+ signal: controller.signal,
128
+ headers: {
129
+ ...headers,
130
+ 'Content-Type': 'application/json',
131
+ 'Accept': 'text/event-stream',
132
+ },
133
+ body: JSON.stringify(processRequest),
134
+ });
135
+ if (!response.ok) {
136
+ const errorText = await response.text();
137
+ throw new Error(`Failed to execute process: ${errorText}`);
124
138
  }
125
- return { ...result, close: () => { } };
139
+ const contentType = response.headers.get('Content-Type') || '';
140
+ const isStreaming = contentType.includes('application/x-ndjson');
141
+ // Fallback: server doesn't support streaming, use legacy approach
142
+ if (!isStreaming) {
143
+ const data = await response.json();
144
+ // If process already completed (server waited), just return with logs
145
+ if (data.status === 'completed' || data.status === 'failed') {
146
+ // Emit any captured logs through callbacks
147
+ if (data.stdout) {
148
+ for (const line of data.stdout.split('\n').filter(l => l)) {
149
+ options.onStdout?.(line);
150
+ }
151
+ }
152
+ if (data.stderr) {
153
+ for (const line of data.stderr.split('\n').filter(l => l)) {
154
+ options.onStderr?.(line);
155
+ }
156
+ }
157
+ if (data.logs) {
158
+ for (const line of data.logs.split('\n').filter(l => l)) {
159
+ options.onLog?.(line);
160
+ }
161
+ }
162
+ return {
163
+ ...data,
164
+ close: () => { },
165
+ };
166
+ }
167
+ return {
168
+ ...data,
169
+ close: () => { },
170
+ };
171
+ }
172
+ // Streaming response handling
173
+ if (!response.body) {
174
+ throw new Error('No response body for streaming');
175
+ }
176
+ const reader = response.body.getReader();
177
+ const decoder = new TextDecoder();
178
+ let buffer = '';
179
+ let result = null;
180
+ while (true) {
181
+ const readResult = await reader.read();
182
+ if (readResult.done)
183
+ break;
184
+ if (readResult.value && readResult.value instanceof Uint8Array) {
185
+ buffer += decoder.decode(readResult.value, { stream: true });
186
+ }
187
+ const lines = buffer.split(/\r?\n/);
188
+ buffer = lines.pop();
189
+ for (const line of lines) {
190
+ const parsed = JSON.parse(line);
191
+ switch (parsed.type) {
192
+ case 'stdout':
193
+ if (parsed.data) {
194
+ options.onStdout?.(parsed.data);
195
+ options.onLog?.(parsed.data);
196
+ }
197
+ break;
198
+ case 'stderr':
199
+ if (parsed.data) {
200
+ options.onStderr?.(parsed.data);
201
+ options.onLog?.(parsed.data);
202
+ }
203
+ break;
204
+ case 'result':
205
+ try {
206
+ result = JSON.parse(parsed.data);
207
+ }
208
+ catch {
209
+ throw new Error(`Failed to parse result JSON: ${parsed.data}`);
210
+ }
211
+ break;
212
+ default:
213
+ break;
214
+ }
215
+ }
216
+ }
217
+ // Process any remaining buffer
218
+ if (buffer.trim()) {
219
+ if (buffer.startsWith('result:')) {
220
+ const jsonStr = buffer.slice(7);
221
+ try {
222
+ result = JSON.parse(jsonStr);
223
+ }
224
+ catch {
225
+ throw new Error(`Failed to parse result JSON: ${jsonStr}`);
226
+ }
227
+ }
228
+ }
229
+ if (!result) {
230
+ throw new Error('No result received from streaming response');
231
+ }
232
+ return {
233
+ ...result,
234
+ close: () => controller.abort(),
235
+ };
126
236
  }
127
237
  async wait(identifier, { maxWait = 60000, interval = 1000 } = {}) {
128
238
  const startTime = Date.now();
@@ -187,7 +187,7 @@ export declare const getNetworkProcessByPidPorts: <ThrowOnError extends boolean
187
187
  export declare const getProcess: <ThrowOnError extends boolean = false>(options?: Options<GetProcessData, ThrowOnError>) => import("@hey-api/client-fetch").RequestResult<import("./types.gen").ProcessResponse[], unknown, ThrowOnError>;
188
188
  /**
189
189
  * Execute a command
190
- * Execute a command and return process information
190
+ * Execute a command and return process information. If Accept header is text/event-stream, streams logs in SSE format and returns the process response as a final event.
191
191
  */
192
192
  export declare const postProcess: <ThrowOnError extends boolean = false>(options: Options<PostProcessData, ThrowOnError>) => import("@hey-api/client-fetch").RequestResult<import("./types.gen").ProcessResponse, import("./types.gen").ErrorResponse, ThrowOnError>;
193
193
  /**
@@ -143,6 +143,8 @@ export type ProcessResponse = {
143
143
  restartOnFailure?: boolean;
144
144
  startedAt: string;
145
145
  status: 'failed' | 'killed' | 'stopped' | 'running' | 'completed';
146
+ stderr: string;
147
+ stdout: string;
146
148
  workingDir: string;
147
149
  };
148
150
  export type RankedFile = {
@@ -13,6 +13,7 @@ export declare class SandboxProcess extends SandboxAction {
13
13
  close: () => void;
14
14
  };
15
15
  exec(process: ProcessRequest | ProcessRequestWithLog): Promise<PostProcessResponse | ProcessResponseWithLog>;
16
+ private execWithStreaming;
16
17
  wait(identifier: string, { maxWait, interval }?: {
17
18
  maxWait?: number;
18
19
  interval?: number;
@@ -47,6 +47,8 @@ export declare function normalizeEnvs(envs?: EnvVar[]): EnvVar[] | undefined;
47
47
  export declare function normalizeVolumes(volumes?: (VolumeBinding | VolumeAttachment)[]): VolumeAttachment[] | undefined;
48
48
  export type ProcessRequestWithLog = ProcessRequest & {
49
49
  onLog?: (log: string) => void;
50
+ onStdout?: (stdout: string) => void;
51
+ onStderr?: (stderr: string) => void;
50
52
  };
51
53
  export type ProcessResponseWithLog = PostProcessResponse & {
52
54
  close: () => void;