e2b 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +6 -2
- data/lib/e2b/api/http_client.rb +30 -19
- data/lib/e2b/client.rb +79 -36
- data/lib/e2b/configuration.rb +12 -6
- data/lib/e2b/dockerfile_parser.rb +179 -0
- data/lib/e2b/errors.rb +24 -1
- data/lib/e2b/models/build_info.rb +29 -0
- data/lib/e2b/models/build_status_reason.rb +27 -0
- data/lib/e2b/models/sandbox_info.rb +19 -2
- data/lib/e2b/models/snapshot_info.rb +19 -0
- data/lib/e2b/models/template_build_status_response.rb +31 -0
- data/lib/e2b/models/template_log_entry.rb +54 -0
- data/lib/e2b/models/template_tag.rb +34 -0
- data/lib/e2b/models/template_tag_info.rb +21 -0
- data/lib/e2b/paginator.rb +97 -0
- data/lib/e2b/ready_cmd.rb +36 -0
- data/lib/e2b/sandbox.rb +217 -66
- data/lib/e2b/sandbox_helpers.rb +100 -0
- data/lib/e2b/services/base_service.rb +64 -15
- data/lib/e2b/services/command_handle.rb +189 -36
- data/lib/e2b/services/commands.rb +37 -50
- data/lib/e2b/services/filesystem.rb +70 -23
- data/lib/e2b/services/live_streamable.rb +94 -0
- data/lib/e2b/services/pty.rb +13 -64
- data/lib/e2b/services/watch_handle.rb +6 -3
- data/lib/e2b/template.rb +1089 -0
- data/lib/e2b/template_logger.rb +52 -0
- data/lib/e2b/version.rb +1 -1
- data/lib/e2b.rb +16 -0
- metadata +44 -2
data/lib/e2b/template.rb
ADDED
|
@@ -0,0 +1,1089 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "digest"
|
|
4
|
+
require "json"
|
|
5
|
+
require "pathname"
|
|
6
|
+
require "rubygems/package"
|
|
7
|
+
require "stringio"
|
|
8
|
+
require "uri"
|
|
9
|
+
require "zlib"
|
|
10
|
+
|
|
11
|
+
module E2B
|
|
12
|
+
class Template
|
|
13
|
+
DEFAULT_BASE_IMAGE = "e2bdev/base"
|
|
14
|
+
DEFAULT_RESOLVE_SYMLINKS = false
|
|
15
|
+
BASE_STEP_NAME = "base"
|
|
16
|
+
FINALIZE_STEP_NAME = "finalize"
|
|
17
|
+
|
|
18
|
+
class << self
|
|
19
|
+
def to_json(template, compute_hashes: true)
|
|
20
|
+
template.to_json(compute_hashes: compute_hashes)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def to_dockerfile(template)
|
|
24
|
+
template.to_dockerfile
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def exists(name, api_key: nil, access_token: nil, domain: nil)
|
|
28
|
+
alias_exists(name, api_key: api_key, access_token: access_token, domain: domain)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def alias_exists(alias_name, api_key: nil, access_token: nil, domain: nil)
|
|
32
|
+
credentials = resolve_credentials(api_key: api_key, access_token: access_token)
|
|
33
|
+
http_client = build_http_client(**credentials, domain: resolve_domain(domain))
|
|
34
|
+
http_client.get("/templates/aliases/#{escape_path_segment(alias_name)}")
|
|
35
|
+
true
|
|
36
|
+
rescue E2B::NotFoundError
|
|
37
|
+
false
|
|
38
|
+
rescue E2B::AuthenticationError => e
|
|
39
|
+
return true if e.status_code == 403
|
|
40
|
+
|
|
41
|
+
raise
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def assign_tags(target_name, tags, api_key: nil, access_token: nil, domain: nil)
|
|
45
|
+
credentials = resolve_credentials(api_key: api_key, access_token: access_token)
|
|
46
|
+
http_client = build_http_client(**credentials, domain: resolve_domain(domain))
|
|
47
|
+
response = http_client.post("/templates/tags", body: {
|
|
48
|
+
target: target_name,
|
|
49
|
+
tags: normalize_tags(tags)
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
Models::TemplateTagInfo.from_hash(response)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def remove_tags(name, tags, api_key: nil, access_token: nil, domain: nil)
|
|
56
|
+
credentials = resolve_credentials(api_key: api_key, access_token: access_token)
|
|
57
|
+
http_client = build_http_client(**credentials, domain: resolve_domain(domain))
|
|
58
|
+
http_client.delete("/templates/tags", body: {
|
|
59
|
+
name: name,
|
|
60
|
+
tags: normalize_tags(tags)
|
|
61
|
+
})
|
|
62
|
+
nil
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def get_tags(template_id, api_key: nil, access_token: nil, domain: nil)
|
|
66
|
+
credentials = resolve_credentials(api_key: api_key, access_token: access_token)
|
|
67
|
+
http_client = build_http_client(**credentials, domain: resolve_domain(domain))
|
|
68
|
+
response = http_client.get("/templates/#{escape_path_segment(template_id)}/tags")
|
|
69
|
+
|
|
70
|
+
Array(response).map { |item| Models::TemplateTag.from_hash(item) }
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def get_build_status(build_info = nil, logs_offset: nil, api_key: nil, access_token: nil, domain: nil,
|
|
74
|
+
template_id: nil, build_id: nil)
|
|
75
|
+
resolved_template_id, resolved_build_id = extract_build_identifiers(
|
|
76
|
+
build_info,
|
|
77
|
+
template_id: template_id,
|
|
78
|
+
build_id: build_id,
|
|
79
|
+
build_step_origins: nil
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
credentials = resolve_credentials(api_key: api_key, access_token: access_token)
|
|
83
|
+
http_client = build_http_client(**credentials, domain: resolve_domain(domain))
|
|
84
|
+
params = {}
|
|
85
|
+
params[:logsOffset] = logs_offset unless logs_offset.nil?
|
|
86
|
+
|
|
87
|
+
response = http_client.get(
|
|
88
|
+
"/templates/#{escape_path_segment(resolved_template_id)}/builds/#{escape_path_segment(resolved_build_id)}/status",
|
|
89
|
+
params: params
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
Models::TemplateBuildStatusResponse.from_hash(response)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def wait_for_build_finish(build_info = nil, logs_offset: 0, on_build_logs: nil, logs_refresh_frequency: 0.2,
|
|
96
|
+
api_key: nil, access_token: nil, domain: nil, template_id: nil, build_id: nil,
|
|
97
|
+
build_step_origins: nil)
|
|
98
|
+
resolved_template_id, resolved_build_id, resolved_build_step_origins = extract_build_identifiers(
|
|
99
|
+
build_info,
|
|
100
|
+
template_id: template_id,
|
|
101
|
+
build_id: build_id,
|
|
102
|
+
build_step_origins: build_step_origins
|
|
103
|
+
)
|
|
104
|
+
current_logs_offset = logs_offset
|
|
105
|
+
|
|
106
|
+
loop do
|
|
107
|
+
status = get_build_status(
|
|
108
|
+
nil,
|
|
109
|
+
logs_offset: current_logs_offset,
|
|
110
|
+
api_key: api_key,
|
|
111
|
+
access_token: access_token,
|
|
112
|
+
domain: domain,
|
|
113
|
+
template_id: resolved_template_id,
|
|
114
|
+
build_id: resolved_build_id
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
current_logs_offset += status.log_entries.length
|
|
118
|
+
status.log_entries.each { |entry| on_build_logs.call(entry) } if on_build_logs
|
|
119
|
+
|
|
120
|
+
case status.status
|
|
121
|
+
when "building", "waiting"
|
|
122
|
+
sleep_for_build_poll(logs_refresh_frequency)
|
|
123
|
+
when "ready"
|
|
124
|
+
return status
|
|
125
|
+
when "error"
|
|
126
|
+
raise build_error(
|
|
127
|
+
status.reason&.message || "Unknown build error occurred.",
|
|
128
|
+
step: status.reason&.step,
|
|
129
|
+
source_location: build_step_source_location(status.reason&.step, resolved_build_step_origins)
|
|
130
|
+
)
|
|
131
|
+
else
|
|
132
|
+
raise build_error("Unknown build status: #{status.status}")
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def build(template, name: nil, alias_name: nil, tags: nil, cpu_count: 2, memory_mb: 1024, skip_cache: false,
|
|
138
|
+
on_build_logs: nil, api_key: nil, access_token: nil, domain: nil, **opts)
|
|
139
|
+
on_build_logs&.call(Models::TemplateLogEntryStart.new(timestamp: Time.now, message: "Build started"))
|
|
140
|
+
|
|
141
|
+
build_info = build_in_background(
|
|
142
|
+
template,
|
|
143
|
+
name: name,
|
|
144
|
+
alias_name: alias_name || opts[:alias] || opts["alias"],
|
|
145
|
+
tags: tags,
|
|
146
|
+
cpu_count: cpu_count,
|
|
147
|
+
memory_mb: memory_mb,
|
|
148
|
+
skip_cache: skip_cache,
|
|
149
|
+
on_build_logs: on_build_logs,
|
|
150
|
+
api_key: api_key,
|
|
151
|
+
access_token: access_token,
|
|
152
|
+
domain: domain
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
on_build_logs&.call(log_entry("Waiting for logs..."))
|
|
156
|
+
|
|
157
|
+
wait_for_build_finish(
|
|
158
|
+
build_info,
|
|
159
|
+
on_build_logs: on_build_logs,
|
|
160
|
+
api_key: api_key,
|
|
161
|
+
access_token: access_token,
|
|
162
|
+
domain: domain
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
build_info
|
|
166
|
+
ensure
|
|
167
|
+
on_build_logs&.call(Models::TemplateLogEntryEnd.new(timestamp: Time.now, message: "Build finished"))
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
def build_in_background(template, name: nil, alias_name: nil, tags: nil, cpu_count: 2, memory_mb: 1024,
|
|
171
|
+
skip_cache: false, on_build_logs: nil, api_key: nil, access_token: nil, domain: nil, **opts)
|
|
172
|
+
alias_name ||= opts[:alias] || opts["alias"]
|
|
173
|
+
resolved_name = normalize_build_name(name: name, alias_name: alias_name)
|
|
174
|
+
template.send(:force_build!) if skip_cache
|
|
175
|
+
|
|
176
|
+
credentials = resolve_credentials(api_key: api_key, access_token: access_token)
|
|
177
|
+
http_client = build_http_client(**credentials, domain: resolve_domain(domain))
|
|
178
|
+
|
|
179
|
+
tags_message = Array(tags).any? ? " with tags #{Array(tags).join(', ')}" : ""
|
|
180
|
+
on_build_logs&.call(log_entry("Requesting build for template: #{resolved_name}#{tags_message}"))
|
|
181
|
+
|
|
182
|
+
create_response = http_client.post("/v3/templates", body: {
|
|
183
|
+
name: resolved_name,
|
|
184
|
+
tags: tags,
|
|
185
|
+
cpuCount: cpu_count,
|
|
186
|
+
memoryMB: memory_mb
|
|
187
|
+
})
|
|
188
|
+
|
|
189
|
+
build_info = Models::BuildInfo.new(
|
|
190
|
+
alias_name: resolved_name,
|
|
191
|
+
name: resolved_name,
|
|
192
|
+
tags: create_response["tags"] || create_response[:tags] || [],
|
|
193
|
+
template_id: create_response["templateID"] || create_response[:templateID],
|
|
194
|
+
build_id: create_response["buildID"] || create_response[:buildID],
|
|
195
|
+
build_step_origins: template.send(:build_step_origins)
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
on_build_logs&.call(
|
|
199
|
+
log_entry("Template created with ID: #{build_info.template_id}, Build ID: #{build_info.build_id}")
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
instructions = template.send(:instructions_with_hash_metadata)
|
|
203
|
+
upload_copy_instructions(
|
|
204
|
+
http_client,
|
|
205
|
+
template,
|
|
206
|
+
build_info,
|
|
207
|
+
instructions,
|
|
208
|
+
on_build_logs: on_build_logs
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
on_build_logs&.call(log_entry("All file uploads completed"))
|
|
212
|
+
on_build_logs&.call(log_entry("Starting building..."))
|
|
213
|
+
|
|
214
|
+
http_client.post(
|
|
215
|
+
"/v2/templates/#{escape_path_segment(build_info.template_id)}/builds/#{escape_path_segment(build_info.build_id)}",
|
|
216
|
+
body: template.send(:build_payload, instructions)
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
build_info
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
private
|
|
223
|
+
|
|
224
|
+
def normalize_build_name(name:, alias_name:)
|
|
225
|
+
resolved_name = name || alias_name
|
|
226
|
+
return resolved_name if resolved_name && !resolved_name.empty?
|
|
227
|
+
|
|
228
|
+
raise TemplateError, "Name must be provided"
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
def upload_copy_instructions(http_client, template, build_info, instructions, on_build_logs:)
|
|
232
|
+
source_location = nil
|
|
233
|
+
|
|
234
|
+
instructions.each do |instruction|
|
|
235
|
+
next unless instruction[:type] == "COPY"
|
|
236
|
+
|
|
237
|
+
src = instruction[:args][0]
|
|
238
|
+
files_hash = instruction[:filesHash]
|
|
239
|
+
source_location = instruction[:sourceLocation]
|
|
240
|
+
response = http_client.get(
|
|
241
|
+
"/templates/#{escape_path_segment(build_info.template_id)}/files/#{escape_path_segment(files_hash)}"
|
|
242
|
+
)
|
|
243
|
+
present = response["present"]
|
|
244
|
+
url = response["url"]
|
|
245
|
+
|
|
246
|
+
if (instruction[:forceUpload] && url) || (present == false && url)
|
|
247
|
+
upload_file(
|
|
248
|
+
template,
|
|
249
|
+
file_name: src,
|
|
250
|
+
url: url,
|
|
251
|
+
resolve_symlinks: instruction[:resolveSymlinks],
|
|
252
|
+
source_location: source_location
|
|
253
|
+
)
|
|
254
|
+
on_build_logs&.call(log_entry("Uploaded '#{src}'"))
|
|
255
|
+
else
|
|
256
|
+
on_build_logs&.call(log_entry("Skipping upload of '#{src}', already cached"))
|
|
257
|
+
end
|
|
258
|
+
end
|
|
259
|
+
rescue E2BError => e
|
|
260
|
+
raise file_upload_error(
|
|
261
|
+
e.message,
|
|
262
|
+
source_location: source_location,
|
|
263
|
+
status_code: e.status_code,
|
|
264
|
+
headers: e.headers
|
|
265
|
+
)
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
def upload_file(template, file_name:, url:, resolve_symlinks:, source_location: nil)
|
|
269
|
+
tarball = build_tar_archive(template, file_name, resolve_symlinks: resolve_symlinks)
|
|
270
|
+
response = Faraday.put(url) do |req|
|
|
271
|
+
req.headers["Content-Type"] = "application/octet-stream"
|
|
272
|
+
req.body = tarball
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
return if response.success?
|
|
276
|
+
|
|
277
|
+
raise file_upload_error("Failed to upload file: #{response.status}", source_location: source_location)
|
|
278
|
+
rescue Faraday::Error => e
|
|
279
|
+
raise file_upload_error("Failed to upload file: #{e.message}", source_location: source_location)
|
|
280
|
+
end
|
|
281
|
+
|
|
282
|
+
def build_tar_archive(template, file_name, resolve_symlinks:)
|
|
283
|
+
context_path = template.send(:file_context_path)
|
|
284
|
+
files = template.send(:collect_files, file_name)
|
|
285
|
+
output = StringIO.new
|
|
286
|
+
|
|
287
|
+
gzip = Zlib::GzipWriter.new(output)
|
|
288
|
+
Gem::Package::TarWriter.new(gzip) do |tar|
|
|
289
|
+
files.each do |file|
|
|
290
|
+
relative = Pathname.new(file).relative_path_from(Pathname.new(context_path)).to_s
|
|
291
|
+
|
|
292
|
+
if File.symlink?(file) && !resolve_symlinks
|
|
293
|
+
tar.add_symlink(relative, File.readlink(file), File.lstat(file).mode)
|
|
294
|
+
next
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
stat = File.stat(file)
|
|
298
|
+
if File.directory?(file)
|
|
299
|
+
tar.mkdir(relative, stat.mode)
|
|
300
|
+
elsif File.file?(file)
|
|
301
|
+
tar.add_file_simple(relative, stat.mode, stat.size) do |io|
|
|
302
|
+
io.write(File.binread(file))
|
|
303
|
+
end
|
|
304
|
+
end
|
|
305
|
+
end
|
|
306
|
+
end
|
|
307
|
+
gzip.finish
|
|
308
|
+
|
|
309
|
+
output.rewind
|
|
310
|
+
output.string
|
|
311
|
+
end
|
|
312
|
+
|
|
313
|
+
def log_entry(message, level = "info")
|
|
314
|
+
Models::TemplateLogEntry.new(
|
|
315
|
+
timestamp: Time.now,
|
|
316
|
+
level: level,
|
|
317
|
+
message: message
|
|
318
|
+
)
|
|
319
|
+
end
|
|
320
|
+
|
|
321
|
+
def normalize_tags(tags)
|
|
322
|
+
Array(tags).flatten.compact
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
def extract_build_identifiers(build_info, template_id:, build_id:, build_step_origins:)
|
|
326
|
+
resolved_build_step_origins = build_step_origins
|
|
327
|
+
|
|
328
|
+
if build_info
|
|
329
|
+
if build_info.respond_to?(:template_id) && build_info.respond_to?(:build_id)
|
|
330
|
+
resolved_build_step_origins ||= build_info.build_step_origins if build_info.respond_to?(:build_step_origins)
|
|
331
|
+
return [build_info.template_id, build_info.build_id, Array(resolved_build_step_origins).compact]
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
if build_info.is_a?(Hash)
|
|
335
|
+
resolved_template_id = build_info[:template_id] || build_info["template_id"] ||
|
|
336
|
+
build_info[:templateID] || build_info["templateID"]
|
|
337
|
+
resolved_build_id = build_info[:build_id] || build_info["build_id"] ||
|
|
338
|
+
build_info[:buildID] || build_info["buildID"]
|
|
339
|
+
resolved_build_step_origins ||= build_info[:build_step_origins] || build_info["build_step_origins"] ||
|
|
340
|
+
build_info[:buildStepOrigins] || build_info["buildStepOrigins"]
|
|
341
|
+
|
|
342
|
+
return [resolved_template_id, resolved_build_id, Array(resolved_build_step_origins).compact]
|
|
343
|
+
end
|
|
344
|
+
end
|
|
345
|
+
|
|
346
|
+
return [template_id, build_id, Array(resolved_build_step_origins).compact] if template_id && build_id
|
|
347
|
+
|
|
348
|
+
raise ArgumentError, "Provide build_info or both template_id: and build_id:"
|
|
349
|
+
end
|
|
350
|
+
|
|
351
|
+
def build_step_source_location(step, build_step_origins)
|
|
352
|
+
origins = Array(build_step_origins).compact
|
|
353
|
+
return nil if origins.empty?
|
|
354
|
+
|
|
355
|
+
index = case step
|
|
356
|
+
when BASE_STEP_NAME
|
|
357
|
+
0
|
|
358
|
+
when FINALIZE_STEP_NAME
|
|
359
|
+
origins.length - 1
|
|
360
|
+
else
|
|
361
|
+
Integer(step, 10)
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
origins[index] if index && index >= 0 && index < origins.length
|
|
365
|
+
rescue ArgumentError, TypeError
|
|
366
|
+
nil
|
|
367
|
+
end
|
|
368
|
+
|
|
369
|
+
def build_error(message, step: nil, source_location: nil, status_code: nil, headers: {})
|
|
370
|
+
BuildError.new(
|
|
371
|
+
message,
|
|
372
|
+
step: step,
|
|
373
|
+
source_location: source_location,
|
|
374
|
+
status_code: status_code,
|
|
375
|
+
headers: headers
|
|
376
|
+
)
|
|
377
|
+
end
|
|
378
|
+
|
|
379
|
+
def file_upload_error(message, source_location: nil, status_code: nil, headers: {})
|
|
380
|
+
FileUploadError.new(
|
|
381
|
+
message,
|
|
382
|
+
source_location: source_location,
|
|
383
|
+
status_code: status_code,
|
|
384
|
+
headers: headers
|
|
385
|
+
)
|
|
386
|
+
end
|
|
387
|
+
|
|
388
|
+
def template_error(message, source_location: nil, status_code: nil, headers: {})
|
|
389
|
+
TemplateError.new(
|
|
390
|
+
message,
|
|
391
|
+
source_location: source_location,
|
|
392
|
+
status_code: status_code,
|
|
393
|
+
headers: headers
|
|
394
|
+
)
|
|
395
|
+
end
|
|
396
|
+
|
|
397
|
+
def escape_path_segment(value)
|
|
398
|
+
URI.encode_www_form_component(value.to_s)
|
|
399
|
+
end
|
|
400
|
+
|
|
401
|
+
def resolve_credentials(api_key:, access_token:)
|
|
402
|
+
resolved_api_key = api_key || E2B.configuration&.api_key || ENV["E2B_API_KEY"]
|
|
403
|
+
resolved_access_token = access_token || E2B.configuration&.access_token || ENV["E2B_ACCESS_TOKEN"]
|
|
404
|
+
|
|
405
|
+
unless (resolved_api_key && !resolved_api_key.empty?) || (resolved_access_token && !resolved_access_token.empty?)
|
|
406
|
+
raise ConfigurationError,
|
|
407
|
+
"E2B credentials are required. Set E2B_API_KEY or E2B_ACCESS_TOKEN, or pass api_key:/access_token:."
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
{ api_key: resolved_api_key, access_token: resolved_access_token }
|
|
411
|
+
end
|
|
412
|
+
|
|
413
|
+
def resolve_domain(domain)
|
|
414
|
+
domain || E2B.configuration&.domain || ENV["E2B_DOMAIN"] || Configuration::DEFAULT_DOMAIN
|
|
415
|
+
end
|
|
416
|
+
|
|
417
|
+
def build_http_client(api_key:, access_token:, domain:)
|
|
418
|
+
config = E2B.configuration
|
|
419
|
+
base_url = config&.api_url || ENV["E2B_API_URL"] || Configuration.default_api_url(domain)
|
|
420
|
+
API::HttpClient.new(
|
|
421
|
+
base_url: base_url,
|
|
422
|
+
api_key: api_key,
|
|
423
|
+
access_token: access_token,
|
|
424
|
+
logger: config&.logger
|
|
425
|
+
)
|
|
426
|
+
end
|
|
427
|
+
|
|
428
|
+
def sleep_for_build_poll(interval)
|
|
429
|
+
sleep(interval)
|
|
430
|
+
end
|
|
431
|
+
end
|
|
432
|
+
|
|
433
|
+
def initialize(file_context_path: nil, file_ignore_patterns: [])
|
|
434
|
+
@file_context_path = (file_context_path || default_file_context_path).to_s
|
|
435
|
+
@file_ignore_patterns = Array(file_ignore_patterns)
|
|
436
|
+
@base_image = DEFAULT_BASE_IMAGE
|
|
437
|
+
@base_template = nil
|
|
438
|
+
@registry_config = nil
|
|
439
|
+
@start_cmd = nil
|
|
440
|
+
@ready_cmd = nil
|
|
441
|
+
@force = false
|
|
442
|
+
@force_next_layer = false
|
|
443
|
+
@instructions = []
|
|
444
|
+
@base_source_location = capture_source_location
|
|
445
|
+
@finalization_source_location = nil
|
|
446
|
+
end
|
|
447
|
+
|
|
448
|
+
def from_debian_image(variant = "stable")
|
|
449
|
+
from_image("debian:#{variant}")
|
|
450
|
+
end
|
|
451
|
+
|
|
452
|
+
def from_ubuntu_image(variant = "latest")
|
|
453
|
+
from_image("ubuntu:#{variant}")
|
|
454
|
+
end
|
|
455
|
+
|
|
456
|
+
def from_python_image(version = "3")
|
|
457
|
+
from_image("python:#{version}")
|
|
458
|
+
end
|
|
459
|
+
|
|
460
|
+
def from_node_image(variant = "lts")
|
|
461
|
+
from_image("node:#{variant}")
|
|
462
|
+
end
|
|
463
|
+
|
|
464
|
+
def from_bun_image(variant = "latest")
|
|
465
|
+
from_image("oven/bun:#{variant}")
|
|
466
|
+
end
|
|
467
|
+
|
|
468
|
+
def from_base_image
|
|
469
|
+
from_image(DEFAULT_BASE_IMAGE)
|
|
470
|
+
end
|
|
471
|
+
|
|
472
|
+
def from_image(image, username: nil, password: nil)
|
|
473
|
+
@base_image = image
|
|
474
|
+
@base_template = nil
|
|
475
|
+
@registry_config = if username && password
|
|
476
|
+
{
|
|
477
|
+
type: "registry",
|
|
478
|
+
username: username,
|
|
479
|
+
password: password
|
|
480
|
+
}
|
|
481
|
+
end
|
|
482
|
+
@force = true if @force_next_layer
|
|
483
|
+
@base_source_location = capture_source_location
|
|
484
|
+
self
|
|
485
|
+
end
|
|
486
|
+
|
|
487
|
+
def from_aws_registry(image, access_key_id:, secret_access_key:, region:)
|
|
488
|
+
@base_image = image
|
|
489
|
+
@base_template = nil
|
|
490
|
+
@registry_config = {
|
|
491
|
+
type: "aws",
|
|
492
|
+
awsAccessKeyId: access_key_id,
|
|
493
|
+
awsSecretAccessKey: secret_access_key,
|
|
494
|
+
awsRegion: region
|
|
495
|
+
}
|
|
496
|
+
@force = true if @force_next_layer
|
|
497
|
+
@base_source_location = capture_source_location
|
|
498
|
+
self
|
|
499
|
+
end
|
|
500
|
+
|
|
501
|
+
def from_gcp_registry(image, service_account_json:)
|
|
502
|
+
@base_image = image
|
|
503
|
+
@base_template = nil
|
|
504
|
+
@registry_config = {
|
|
505
|
+
type: "gcp",
|
|
506
|
+
serviceAccountJson: read_gcp_service_account_json(service_account_json)
|
|
507
|
+
}
|
|
508
|
+
@force = true if @force_next_layer
|
|
509
|
+
@base_source_location = capture_source_location
|
|
510
|
+
self
|
|
511
|
+
end
|
|
512
|
+
|
|
513
|
+
def from_dockerfile(dockerfile_content_or_path)
|
|
514
|
+
@base_template = nil
|
|
515
|
+
@registry_config = nil
|
|
516
|
+
@base_image = E2B::DockerfileParser.parse(dockerfile_content_or_path, self)
|
|
517
|
+
@force = true if @force_next_layer
|
|
518
|
+
@base_source_location = capture_source_location
|
|
519
|
+
self
|
|
520
|
+
rescue TemplateError => e
|
|
521
|
+
raise template_error(
|
|
522
|
+
e.message,
|
|
523
|
+
source_location: capture_source_location,
|
|
524
|
+
status_code: e.status_code,
|
|
525
|
+
headers: e.headers
|
|
526
|
+
)
|
|
527
|
+
end
|
|
528
|
+
|
|
529
|
+
def from_template(template)
|
|
530
|
+
@base_template = template
|
|
531
|
+
@base_image = nil
|
|
532
|
+
@registry_config = nil
|
|
533
|
+
@force = true if @force_next_layer
|
|
534
|
+
@base_source_location = capture_source_location
|
|
535
|
+
self
|
|
536
|
+
end
|
|
537
|
+
|
|
538
|
+
def copy(src, dest, force_upload: nil, user: nil, mode: nil, resolve_symlinks: nil)
|
|
539
|
+
source_location = capture_source_location
|
|
540
|
+
|
|
541
|
+
Array(src).each do |source|
|
|
542
|
+
source_path = source.to_s
|
|
543
|
+
validate_relative_path!(source_path, source_location: source_location)
|
|
544
|
+
|
|
545
|
+
@instructions << {
|
|
546
|
+
type: "COPY",
|
|
547
|
+
args: [
|
|
548
|
+
source_path,
|
|
549
|
+
dest.to_s,
|
|
550
|
+
user || "",
|
|
551
|
+
mode ? format("%04o", mode) : ""
|
|
552
|
+
],
|
|
553
|
+
force: !!force_upload || @force_next_layer,
|
|
554
|
+
forceUpload: force_upload,
|
|
555
|
+
resolveSymlinks: resolve_symlinks.nil? ? DEFAULT_RESOLVE_SYMLINKS : resolve_symlinks,
|
|
556
|
+
sourceLocation: source_location
|
|
557
|
+
}
|
|
558
|
+
end
|
|
559
|
+
|
|
560
|
+
self
|
|
561
|
+
end
|
|
562
|
+
|
|
563
|
+
def copy_items(items)
|
|
564
|
+
items.each do |item|
|
|
565
|
+
copy(
|
|
566
|
+
copy_item_value(item, :src),
|
|
567
|
+
copy_item_value(item, :dest),
|
|
568
|
+
force_upload: copy_item_value(item, :forceUpload, required: false),
|
|
569
|
+
user: copy_item_value(item, :user, required: false),
|
|
570
|
+
mode: copy_item_value(item, :mode, required: false),
|
|
571
|
+
resolve_symlinks: copy_item_value(item, :resolveSymlinks, required: false)
|
|
572
|
+
)
|
|
573
|
+
end
|
|
574
|
+
|
|
575
|
+
self
|
|
576
|
+
end
|
|
577
|
+
|
|
578
|
+
def run_cmd(cmd, user: nil)
|
|
579
|
+
commands = Array(cmd).map(&:to_s)
|
|
580
|
+
source_location = capture_source_location
|
|
581
|
+
|
|
582
|
+
@instructions << {
|
|
583
|
+
type: "RUN",
|
|
584
|
+
args: [commands.join(" && "), user || ""],
|
|
585
|
+
force: @force_next_layer,
|
|
586
|
+
sourceLocation: source_location
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
self
|
|
590
|
+
end
|
|
591
|
+
|
|
592
|
+
def set_workdir(workdir)
|
|
593
|
+
source_location = capture_source_location
|
|
594
|
+
|
|
595
|
+
@instructions << {
|
|
596
|
+
type: "WORKDIR",
|
|
597
|
+
args: [workdir.to_s],
|
|
598
|
+
force: @force_next_layer,
|
|
599
|
+
sourceLocation: source_location
|
|
600
|
+
}
|
|
601
|
+
self
|
|
602
|
+
end
|
|
603
|
+
|
|
604
|
+
def set_user(user)
|
|
605
|
+
source_location = capture_source_location
|
|
606
|
+
|
|
607
|
+
@instructions << {
|
|
608
|
+
type: "USER",
|
|
609
|
+
args: [user.to_s],
|
|
610
|
+
force: @force_next_layer,
|
|
611
|
+
sourceLocation: source_location
|
|
612
|
+
}
|
|
613
|
+
self
|
|
614
|
+
end
|
|
615
|
+
|
|
616
|
+
def set_envs(envs)
|
|
617
|
+
return self if envs.empty?
|
|
618
|
+
|
|
619
|
+
args = envs.each_with_object([]) do |(key, value), values|
|
|
620
|
+
values << key.to_s
|
|
621
|
+
values << value.to_s
|
|
622
|
+
end
|
|
623
|
+
source_location = capture_source_location
|
|
624
|
+
|
|
625
|
+
@instructions << {
|
|
626
|
+
type: "ENV",
|
|
627
|
+
args: args,
|
|
628
|
+
force: @force_next_layer,
|
|
629
|
+
sourceLocation: source_location
|
|
630
|
+
}
|
|
631
|
+
self
|
|
632
|
+
end
|
|
633
|
+
|
|
634
|
+
def pip_install(packages = nil, g: true)
|
|
635
|
+
package_list = packages.nil? ? nil : Array(packages).map(&:to_s)
|
|
636
|
+
args = ["pip", "install"]
|
|
637
|
+
args << "--user" unless g
|
|
638
|
+
args.concat(package_list || ["."])
|
|
639
|
+
run_cmd(args.join(" "), user: g ? "root" : nil)
|
|
640
|
+
end
|
|
641
|
+
|
|
642
|
+
def npm_install(packages = nil, g: false, dev: false)
|
|
643
|
+
package_list = packages.nil? ? nil : Array(packages).map(&:to_s)
|
|
644
|
+
args = ["npm", "install"]
|
|
645
|
+
args << "-g" if g
|
|
646
|
+
args << "--save-dev" if dev
|
|
647
|
+
args.concat(package_list) if package_list
|
|
648
|
+
run_cmd(args.join(" "), user: g ? "root" : nil)
|
|
649
|
+
end
|
|
650
|
+
|
|
651
|
+
def bun_install(packages = nil, g: false, dev: false)
|
|
652
|
+
package_list = packages.nil? ? nil : Array(packages).map(&:to_s)
|
|
653
|
+
args = ["bun", "install"]
|
|
654
|
+
args << "-g" if g
|
|
655
|
+
args << "--dev" if dev
|
|
656
|
+
args.concat(package_list) if package_list
|
|
657
|
+
run_cmd(args.join(" "), user: g ? "root" : nil)
|
|
658
|
+
end
|
|
659
|
+
|
|
660
|
+
def apt_install(packages, no_install_recommends: false)
|
|
661
|
+
package_list = Array(packages).map(&:to_s)
|
|
662
|
+
install_flags = no_install_recommends ? "--no-install-recommends " : ""
|
|
663
|
+
run_cmd(
|
|
664
|
+
[
|
|
665
|
+
"apt-get update",
|
|
666
|
+
"DEBIAN_FRONTEND=noninteractive DEBCONF_NOWARNINGS=yes apt-get install -y #{install_flags}#{package_list.join(' ')}"
|
|
667
|
+
],
|
|
668
|
+
user: "root"
|
|
669
|
+
)
|
|
670
|
+
end
|
|
671
|
+
|
|
672
|
+
def add_mcp_server(servers)
|
|
673
|
+
unless @base_template == "mcp-gateway"
|
|
674
|
+
raise build_error(
|
|
675
|
+
"MCP servers can only be added to mcp-gateway template",
|
|
676
|
+
source_location: capture_source_location
|
|
677
|
+
)
|
|
678
|
+
end
|
|
679
|
+
|
|
680
|
+
server_list = Array(servers).map(&:to_s)
|
|
681
|
+
run_cmd("mcp-gateway pull #{server_list.join(' ')}", user: "root")
|
|
682
|
+
end
|
|
683
|
+
|
|
684
|
+
def git_clone(url, path = nil, branch: nil, depth: nil, user: nil)
|
|
685
|
+
args = ["git", "clone", url.to_s]
|
|
686
|
+
if branch
|
|
687
|
+
args << "--branch #{branch}"
|
|
688
|
+
args << "--single-branch"
|
|
689
|
+
end
|
|
690
|
+
args << "--depth #{depth}" if depth
|
|
691
|
+
args << path.to_s if path
|
|
692
|
+
run_cmd(args.join(" "), user: user)
|
|
693
|
+
end
|
|
694
|
+
|
|
695
|
+
def beta_dev_container_prebuild(devcontainer_directory)
|
|
696
|
+
ensure_devcontainer_template!
|
|
697
|
+
run_cmd("devcontainer build --workspace-folder #{devcontainer_directory}", user: "root")
|
|
698
|
+
end
|
|
699
|
+
|
|
700
|
+
def beta_set_dev_container_start(devcontainer_directory)
|
|
701
|
+
ensure_devcontainer_template!
|
|
702
|
+
set_start_cmd(
|
|
703
|
+
"sudo devcontainer up --workspace-folder #{devcontainer_directory} && sudo /prepare-exec.sh #{devcontainer_directory} | sudo tee /devcontainer.sh > /dev/null && sudo chmod +x /devcontainer.sh && sudo touch /devcontainer.up",
|
|
704
|
+
E2B.wait_for_file("/devcontainer.up")
|
|
705
|
+
)
|
|
706
|
+
end
|
|
707
|
+
|
|
708
|
+
alias beta_set_devcontainer_start beta_set_dev_container_start
|
|
709
|
+
|
|
710
|
+
def remove(path, force: false, recursive: false, user: nil)
|
|
711
|
+
args = ["rm"]
|
|
712
|
+
args << "-r" if recursive
|
|
713
|
+
args << "-f" if force
|
|
714
|
+
args.concat(Array(path).map(&:to_s))
|
|
715
|
+
run_cmd(args.join(" "), user: user)
|
|
716
|
+
end
|
|
717
|
+
|
|
718
|
+
def rename(src, dest, force: false, user: nil)
|
|
719
|
+
args = ["mv"]
|
|
720
|
+
args << "-f" if force
|
|
721
|
+
args << src.to_s
|
|
722
|
+
args << dest.to_s
|
|
723
|
+
run_cmd(args.join(" "), user: user)
|
|
724
|
+
end
|
|
725
|
+
|
|
726
|
+
def make_dir(path, mode: nil, user: nil)
|
|
727
|
+
args = ["mkdir", "-p"]
|
|
728
|
+
args << "-m #{format('%04o', mode)}" if mode
|
|
729
|
+
args.concat(Array(path).map(&:to_s))
|
|
730
|
+
run_cmd(args.join(" "), user: user)
|
|
731
|
+
end
|
|
732
|
+
|
|
733
|
+
def make_symlink(src, dest, user: nil, force: false)
|
|
734
|
+
args = ["ln", "-s"]
|
|
735
|
+
args << "-f" if force
|
|
736
|
+
args << src.to_s
|
|
737
|
+
args << dest.to_s
|
|
738
|
+
run_cmd(args.join(" "), user: user)
|
|
739
|
+
end
|
|
740
|
+
|
|
741
|
+
def skip_cache
|
|
742
|
+
@force_next_layer = true
|
|
743
|
+
self
|
|
744
|
+
end
|
|
745
|
+
|
|
746
|
+
def set_start_cmd(start_cmd, ready_cmd = nil)
|
|
747
|
+
@start_cmd = start_cmd.to_s
|
|
748
|
+
@ready_cmd = normalize_ready_cmd(ready_cmd) unless ready_cmd.nil?
|
|
749
|
+
@finalization_source_location = capture_source_location
|
|
750
|
+
self
|
|
751
|
+
end
|
|
752
|
+
|
|
753
|
+
def set_ready_cmd(ready_cmd)
|
|
754
|
+
@ready_cmd = normalize_ready_cmd(ready_cmd)
|
|
755
|
+
@finalization_source_location = capture_source_location
|
|
756
|
+
self
|
|
757
|
+
end
|
|
758
|
+
|
|
759
|
+
def to_h(compute_hashes: false)
|
|
760
|
+
build_payload(compute_hashes ? instructions_with_hash_metadata : @instructions)
|
|
761
|
+
end
|
|
762
|
+
|
|
763
|
+
def to_json(compute_hashes: true)
|
|
764
|
+
JSON.pretty_generate(to_h(compute_hashes: compute_hashes))
|
|
765
|
+
end
|
|
766
|
+
|
|
767
|
+
def to_dockerfile
|
|
768
|
+
if @base_template
|
|
769
|
+
raise template_error(
|
|
770
|
+
"Cannot convert template built from another template to Dockerfile. Templates based on other templates can only be built using the E2B API.",
|
|
771
|
+
source_location: capture_source_location
|
|
772
|
+
)
|
|
773
|
+
end
|
|
774
|
+
|
|
775
|
+
raise template_error("No base image specified for template", source_location: capture_source_location) unless @base_image
|
|
776
|
+
|
|
777
|
+
dockerfile = +"FROM #{@base_image}\n"
|
|
778
|
+
@instructions.each do |instruction|
|
|
779
|
+
case instruction[:type]
|
|
780
|
+
when "RUN"
|
|
781
|
+
dockerfile << "RUN #{instruction[:args][0]}\n"
|
|
782
|
+
when "COPY"
|
|
783
|
+
dockerfile << "COPY #{instruction[:args][0]} #{instruction[:args][1]}\n"
|
|
784
|
+
when "ENV"
|
|
785
|
+
values = instruction[:args].each_slice(2).map { |key, value| "#{key}=#{value}" }
|
|
786
|
+
dockerfile << "ENV #{values.join(' ')}\n"
|
|
787
|
+
else
|
|
788
|
+
dockerfile << "#{instruction[:type]} #{instruction[:args].join(' ')}\n"
|
|
789
|
+
end
|
|
790
|
+
end
|
|
791
|
+
dockerfile << "ENTRYPOINT #{@start_cmd}\n" if @start_cmd
|
|
792
|
+
dockerfile
|
|
793
|
+
end
|
|
794
|
+
|
|
795
|
+
protected
|
|
796
|
+
|
|
797
|
+
attr_reader :file_context_path
|
|
798
|
+
|
|
799
|
+
private
|
|
800
|
+
|
|
801
|
+
def force_build!
|
|
802
|
+
@force = true
|
|
803
|
+
end
|
|
804
|
+
|
|
805
|
+
def build_payload(instructions)
|
|
806
|
+
template_data = {
|
|
807
|
+
steps: serialized_steps(instructions),
|
|
808
|
+
force: @force
|
|
809
|
+
}
|
|
810
|
+
template_data[:fromImage] = @base_image if @base_image
|
|
811
|
+
template_data[:fromTemplate] = @base_template if @base_template
|
|
812
|
+
template_data[:fromImageRegistry] = @registry_config if @registry_config
|
|
813
|
+
template_data[:startCmd] = @start_cmd if @start_cmd
|
|
814
|
+
template_data[:readyCmd] = @ready_cmd if @ready_cmd
|
|
815
|
+
template_data
|
|
816
|
+
end
|
|
817
|
+
|
|
818
|
+
def serialized_steps(steps)
|
|
819
|
+
steps.map { |instruction| serialized_step(instruction) }
|
|
820
|
+
end
|
|
821
|
+
|
|
822
|
+
def instructions_with_hashes
|
|
823
|
+
serialized_steps(instructions_with_hash_metadata)
|
|
824
|
+
end
|
|
825
|
+
|
|
826
|
+
def instructions_with_hash_metadata
|
|
827
|
+
@instructions.map do |instruction|
|
|
828
|
+
next instruction unless instruction[:type] == "COPY"
|
|
829
|
+
|
|
830
|
+
src = instruction[:args][0]
|
|
831
|
+
dest = instruction[:args][1]
|
|
832
|
+
instruction.merge(filesHash: calculate_files_hash(
|
|
833
|
+
src,
|
|
834
|
+
dest,
|
|
835
|
+
resolve_symlinks: instruction[:resolveSymlinks],
|
|
836
|
+
source_location: instruction[:sourceLocation]
|
|
837
|
+
))
|
|
838
|
+
end
|
|
839
|
+
end
|
|
840
|
+
|
|
841
|
+
def serialized_step(instruction)
|
|
842
|
+
step = {
|
|
843
|
+
type: instruction[:type],
|
|
844
|
+
args: instruction[:args],
|
|
845
|
+
force: instruction[:force]
|
|
846
|
+
}
|
|
847
|
+
step[:filesHash] = instruction[:filesHash] if instruction.key?(:filesHash)
|
|
848
|
+
step[:forceUpload] = instruction[:forceUpload] unless instruction[:forceUpload].nil?
|
|
849
|
+
step
|
|
850
|
+
end
|
|
851
|
+
|
|
852
|
+
def validate_relative_path!(src, source_location:)
|
|
853
|
+
if Pathname.new(src).absolute?
|
|
854
|
+
raise template_error(
|
|
855
|
+
"Invalid source path \"#{src}\": absolute paths are not allowed. Use a relative path within the context directory.",
|
|
856
|
+
source_location: source_location
|
|
857
|
+
)
|
|
858
|
+
end
|
|
859
|
+
|
|
860
|
+
normalized = Pathname.new(src).cleanpath.to_s
|
|
861
|
+
escapes = normalized == ".." || normalized.start_with?("../")
|
|
862
|
+
return unless escapes
|
|
863
|
+
|
|
864
|
+
raise template_error(
|
|
865
|
+
"Invalid source path \"#{src}\": path escapes the context directory. The path must stay within the context directory.",
|
|
866
|
+
source_location: source_location
|
|
867
|
+
)
|
|
868
|
+
end
|
|
869
|
+
|
|
870
|
+
def calculate_files_hash(src, dest, resolve_symlinks:, source_location: nil)
|
|
871
|
+
digest = Digest::SHA256.new
|
|
872
|
+
digest.update("COPY #{src} #{dest}")
|
|
873
|
+
files = collect_files(src)
|
|
874
|
+
|
|
875
|
+
if files.empty?
|
|
876
|
+
raise template_error(
|
|
877
|
+
"No files found in #{File.join(@file_context_path, src)}",
|
|
878
|
+
source_location: source_location
|
|
879
|
+
)
|
|
880
|
+
end
|
|
881
|
+
|
|
882
|
+
files.each do |file|
|
|
883
|
+
relative_path = Pathname.new(file).relative_path_from(Pathname.new(@file_context_path)).to_s
|
|
884
|
+
digest.update(relative_path)
|
|
885
|
+
|
|
886
|
+
if File.symlink?(file)
|
|
887
|
+
link_stat = File.lstat(file)
|
|
888
|
+
should_follow = resolve_symlinks && (File.file?(file) || File.directory?(file))
|
|
889
|
+
unless should_follow
|
|
890
|
+
update_stat_hash(digest, link_stat)
|
|
891
|
+
digest.update(File.readlink(file))
|
|
892
|
+
next
|
|
893
|
+
end
|
|
894
|
+
end
|
|
895
|
+
|
|
896
|
+
stat = File.stat(file)
|
|
897
|
+
update_stat_hash(digest, stat)
|
|
898
|
+
digest.update(File.binread(file)) if File.file?(file)
|
|
899
|
+
end
|
|
900
|
+
|
|
901
|
+
digest.hexdigest
|
|
902
|
+
end
|
|
903
|
+
|
|
904
|
+
def update_stat_hash(digest, stat)
|
|
905
|
+
digest.update(stat.mode.to_s)
|
|
906
|
+
digest.update(stat.size.to_s)
|
|
907
|
+
end
|
|
908
|
+
|
|
909
|
+
def collect_files(src)
|
|
910
|
+
matches = Dir.glob(src, base: @file_context_path, flags: File::FNM_DOTMATCH)
|
|
911
|
+
.reject { |entry| entry == "." || entry == ".." }
|
|
912
|
+
|
|
913
|
+
files = []
|
|
914
|
+
matches.each do |match|
|
|
915
|
+
add_match_files(files, match)
|
|
916
|
+
end
|
|
917
|
+
|
|
918
|
+
files.uniq.sort
|
|
919
|
+
end
|
|
920
|
+
|
|
921
|
+
def add_match_files(files, match)
|
|
922
|
+
full_path = File.join(@file_context_path, match)
|
|
923
|
+
directory = File.directory?(full_path) && !File.symlink?(full_path)
|
|
924
|
+
return if ignored_path?(match, directory: directory)
|
|
925
|
+
|
|
926
|
+
if directory
|
|
927
|
+
files << full_path
|
|
928
|
+
Dir.glob(File.join(full_path, "**", "*"), File::FNM_DOTMATCH).each do |child|
|
|
929
|
+
next if [".", ".."].include?(File.basename(child))
|
|
930
|
+
|
|
931
|
+
relative = Pathname.new(child).relative_path_from(Pathname.new(@file_context_path)).to_s
|
|
932
|
+
child_directory = File.directory?(child) && !File.symlink?(child)
|
|
933
|
+
next if ignored_path?(relative, directory: child_directory)
|
|
934
|
+
|
|
935
|
+
files << child
|
|
936
|
+
end
|
|
937
|
+
else
|
|
938
|
+
files << full_path
|
|
939
|
+
end
|
|
940
|
+
end
|
|
941
|
+
|
|
942
|
+
def ignored_path?(relative_path, directory: false)
|
|
943
|
+
normalized = normalize_ignore_path(relative_path)
|
|
944
|
+
ignore_patterns.any? do |pattern|
|
|
945
|
+
normalized_pattern = normalize_ignore_pattern(pattern)
|
|
946
|
+
candidates = ignore_path_candidates(normalized, normalized_pattern, directory: directory)
|
|
947
|
+
|
|
948
|
+
ignore_pattern_variants(normalized_pattern).any? do |variant|
|
|
949
|
+
candidates.any? do |candidate|
|
|
950
|
+
File.fnmatch?(variant, candidate, File::FNM_PATHNAME | File::FNM_DOTMATCH)
|
|
951
|
+
end
|
|
952
|
+
end
|
|
953
|
+
end
|
|
954
|
+
end
|
|
955
|
+
|
|
956
|
+
def ignore_patterns
|
|
957
|
+
@ignore_patterns ||= (@file_ignore_patterns + read_dockerignore)
|
|
958
|
+
end
|
|
959
|
+
|
|
960
|
+
def ignore_pattern_variants(normalized)
|
|
961
|
+
variants = [normalized]
|
|
962
|
+
|
|
963
|
+
if normalized.end_with?("/")
|
|
964
|
+
base = normalized.sub(%r{/+\z}, "")
|
|
965
|
+
variants << base
|
|
966
|
+
variants << "#{base}/**"
|
|
967
|
+
elsif normalized.end_with?("/**")
|
|
968
|
+
base = normalized.sub(%r{/+\*\*\z}, "")
|
|
969
|
+
variants << base unless normalized.start_with?("/")
|
|
970
|
+
elsif !normalized.start_with?("/")
|
|
971
|
+
variants << "#{normalized}/**"
|
|
972
|
+
end
|
|
973
|
+
|
|
974
|
+
variants.reject(&:empty?).uniq
|
|
975
|
+
end
|
|
976
|
+
|
|
977
|
+
def ignore_path_candidates(normalized_path, normalized_pattern, directory:)
|
|
978
|
+
candidates = [normalized_path]
|
|
979
|
+
candidates << "#{normalized_path}/" if directory
|
|
980
|
+
|
|
981
|
+
return candidates unless normalized_pattern.start_with?("/")
|
|
982
|
+
|
|
983
|
+
candidates << "/#{normalized_path}" unless directory && normalized_pattern.end_with?("/**")
|
|
984
|
+
candidates << "/#{normalized_path}/" if directory && !normalized_pattern.end_with?("/**")
|
|
985
|
+
candidates
|
|
986
|
+
end
|
|
987
|
+
|
|
988
|
+
def normalize_ignore_pattern(pattern)
|
|
989
|
+
normalized = pattern.to_s.tr(File::SEPARATOR, "/").sub(/\A\.\//, "")
|
|
990
|
+
return "/#{normalized.sub(%r{\A/+}, '')}" if normalized.start_with?("/")
|
|
991
|
+
|
|
992
|
+
normalized.sub(%r{\A/+}, "")
|
|
993
|
+
end
|
|
994
|
+
|
|
995
|
+
def normalize_ignore_path(path)
|
|
996
|
+
path.to_s.tr(File::SEPARATOR, "/").sub(/\A\.\//, "").sub(%r{\A/+}, "")
|
|
997
|
+
end
|
|
998
|
+
|
|
999
|
+
def build_step_origins
|
|
1000
|
+
origins = [@base_source_location]
|
|
1001
|
+
origins.concat(@instructions.map { |instruction| instruction[:sourceLocation] })
|
|
1002
|
+
origins << @finalization_source_location if @finalization_source_location
|
|
1003
|
+
origins.compact
|
|
1004
|
+
end
|
|
1005
|
+
|
|
1006
|
+
def read_dockerignore
|
|
1007
|
+
dockerignore_path = File.join(@file_context_path, ".dockerignore")
|
|
1008
|
+
return [] unless File.exist?(dockerignore_path)
|
|
1009
|
+
|
|
1010
|
+
File.readlines(dockerignore_path, chomp: true)
|
|
1011
|
+
.map(&:strip)
|
|
1012
|
+
.reject { |line| line.empty? || line.start_with?("#") }
|
|
1013
|
+
end
|
|
1014
|
+
|
|
1015
|
+
def read_gcp_service_account_json(path_or_content)
|
|
1016
|
+
return JSON.generate(path_or_content) unless path_or_content.is_a?(String)
|
|
1017
|
+
|
|
1018
|
+
File.read(File.join(@file_context_path, path_or_content))
|
|
1019
|
+
end
|
|
1020
|
+
|
|
1021
|
+
def default_file_context_path
|
|
1022
|
+
location = caller_locations(2, 20).find do |entry|
|
|
1023
|
+
path = entry.absolute_path || entry.path
|
|
1024
|
+
next false unless path
|
|
1025
|
+
|
|
1026
|
+
!path.include?("/lib/e2b/")
|
|
1027
|
+
end
|
|
1028
|
+
|
|
1029
|
+
return File.dirname(location.absolute_path || location.path) if location
|
|
1030
|
+
|
|
1031
|
+
Dir.pwd
|
|
1032
|
+
end
|
|
1033
|
+
|
|
1034
|
+
def capture_source_location
|
|
1035
|
+
location = caller_locations(2, 20).find do |entry|
|
|
1036
|
+
path = entry.absolute_path || entry.path
|
|
1037
|
+
next false unless path
|
|
1038
|
+
|
|
1039
|
+
!path.include?("/lib/e2b/")
|
|
1040
|
+
end
|
|
1041
|
+
|
|
1042
|
+
location&.to_s
|
|
1043
|
+
end
|
|
1044
|
+
|
|
1045
|
+
def build_error(message, step: nil, source_location: nil, status_code: nil, headers: {})
|
|
1046
|
+
self.class.send(
|
|
1047
|
+
:build_error,
|
|
1048
|
+
message,
|
|
1049
|
+
step: step,
|
|
1050
|
+
source_location: source_location,
|
|
1051
|
+
status_code: status_code,
|
|
1052
|
+
headers: headers
|
|
1053
|
+
)
|
|
1054
|
+
end
|
|
1055
|
+
|
|
1056
|
+
def template_error(message, source_location: nil, status_code: nil, headers: {})
|
|
1057
|
+
self.class.send(
|
|
1058
|
+
:template_error,
|
|
1059
|
+
message,
|
|
1060
|
+
source_location: source_location,
|
|
1061
|
+
status_code: status_code,
|
|
1062
|
+
headers: headers
|
|
1063
|
+
)
|
|
1064
|
+
end
|
|
1065
|
+
|
|
1066
|
+
def normalize_ready_cmd(ready_cmd)
|
|
1067
|
+
return ready_cmd.get_cmd if ready_cmd.respond_to?(:get_cmd)
|
|
1068
|
+
|
|
1069
|
+
ready_cmd.to_s
|
|
1070
|
+
end
|
|
1071
|
+
|
|
1072
|
+
def copy_item_value(item, key, required: true)
|
|
1073
|
+
value = item[key]
|
|
1074
|
+
value = item[key.to_s] if value.nil?
|
|
1075
|
+
return value unless value.nil? && required
|
|
1076
|
+
|
|
1077
|
+
raise KeyError, "Missing copy_items value for #{key}"
|
|
1078
|
+
end
|
|
1079
|
+
|
|
1080
|
+
def ensure_devcontainer_template!
|
|
1081
|
+
return if @base_template == "devcontainer"
|
|
1082
|
+
|
|
1083
|
+
raise build_error(
|
|
1084
|
+
"Devcontainers can only used in the devcontainer template",
|
|
1085
|
+
source_location: capture_source_location
|
|
1086
|
+
)
|
|
1087
|
+
end
|
|
1088
|
+
end
|
|
1089
|
+
end
|