@vm0/runner 2.6.0 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +468 -400
  2. package/package.json +6 -1
package/index.js CHANGED
@@ -37,6 +37,42 @@ var runnerConfigSchema = z.object({
37
37
  port: z.number().int().min(1024).max(65535).default(8080)
38
38
  }).default({})
39
39
  });
40
+ var debugConfigSchema = z.object({
41
+ name: z.string().default("debug-runner"),
42
+ group: z.string().default("debug/local"),
43
+ server: z.object({
44
+ url: z.string().url().default("http://localhost:3000"),
45
+ token: z.string().default("debug-token")
46
+ }).default({}),
47
+ sandbox: z.object({
48
+ max_concurrent: z.number().int().min(1).default(1),
49
+ vcpu: z.number().int().min(1).default(2),
50
+ memory_mb: z.number().int().min(128).default(2048),
51
+ poll_interval_ms: z.number().int().min(1e3).default(5e3)
52
+ }).default({}),
53
+ firecracker: z.object({
54
+ binary: z.string().min(1, "Firecracker binary path is required"),
55
+ kernel: z.string().min(1, "Kernel path is required"),
56
+ rootfs: z.string().min(1, "Rootfs path is required")
57
+ }),
58
+ proxy: z.object({
59
+ port: z.number().int().min(1024).max(65535).default(8080)
60
+ }).default({})
61
+ });
62
+ function loadDebugConfig(configPath) {
63
+ if (!fs.existsSync(configPath)) {
64
+ throw new Error(`Config file not found: ${configPath}`);
65
+ }
66
+ const content = fs.readFileSync(configPath, "utf-8");
67
+ const raw = yaml.parse(content);
68
+ const result = debugConfigSchema.safeParse(raw);
69
+ if (!result.success) {
70
+ const errors = result.error.errors.map((e) => ` - ${e.path.join(".")}: ${e.message}`).join("\n");
71
+ throw new Error(`Invalid configuration:
72
+ ${errors}`);
73
+ }
74
+ return result.data;
75
+ }
40
76
  function loadConfig(configPath) {
41
77
  if (!fs.existsSync(configPath)) {
42
78
  throw new Error(`runner.yaml not found: ${configPath}`);
@@ -142,7 +178,7 @@ import path4 from "path";
142
178
  import fs6 from "fs";
143
179
 
144
180
  // src/lib/firecracker/vm.ts
145
- import { spawn } from "child_process";
181
+ import { execSync as execSync2, spawn } from "child_process";
146
182
  import fs2 from "fs";
147
183
  import path from "path";
148
184
  import readline from "readline";
@@ -545,13 +581,13 @@ var FirecrackerVM = class {
545
581
  state = "created";
546
582
  workDir;
547
583
  socketPath;
548
- vmRootfsPath;
549
- // Per-VM copy of rootfs
584
+ vmOverlayPath;
585
+ // Per-VM sparse overlay for writes
550
586
  constructor(config) {
551
587
  this.config = config;
552
588
  this.workDir = config.workDir || `/tmp/vm0-vm-${config.vmId}`;
553
589
  this.socketPath = path.join(this.workDir, "firecracker.sock");
554
- this.vmRootfsPath = path.join(this.workDir, "rootfs.ext4");
590
+ this.vmOverlayPath = path.join(this.workDir, "overlay.ext4");
555
591
  }
556
592
  /**
557
593
  * Get current VM state
@@ -590,8 +626,12 @@ var FirecrackerVM = class {
590
626
  if (fs2.existsSync(this.socketPath)) {
591
627
  fs2.unlinkSync(this.socketPath);
592
628
  }
593
- console.log(`[VM ${this.config.vmId}] Copying rootfs for isolation...`);
594
- fs2.copyFileSync(this.config.rootfsPath, this.vmRootfsPath);
629
+ console.log(`[VM ${this.config.vmId}] Creating sparse overlay file...`);
630
+ const overlaySize = 2 * 1024 * 1024 * 1024;
631
+ const fd = fs2.openSync(this.vmOverlayPath, "w");
632
+ fs2.ftruncateSync(fd, overlaySize);
633
+ fs2.closeSync(fd);
634
+ execSync2(`mkfs.ext4 -F -q "${this.vmOverlayPath}"`, { stdio: "ignore" });
595
635
  console.log(`[VM ${this.config.vmId}] Setting up network...`);
596
636
  this.networkConfig = await createTapDevice(this.config.vmId);
597
637
  console.log(`[VM ${this.config.vmId}] Starting Firecracker...`);
@@ -668,19 +708,27 @@ var FirecrackerVM = class {
668
708
  mem_size_mib: this.config.memoryMb
669
709
  });
670
710
  const networkBootArgs = generateNetworkBootArgs(this.networkConfig);
671
- const bootArgs = `console=ttyS0 reboot=k panic=1 pci=off ${networkBootArgs}`;
711
+ const bootArgs = `console=ttyS0 reboot=k panic=1 pci=off init=/sbin/overlay-init ${networkBootArgs}`;
672
712
  console.log(`[VM ${this.config.vmId}] Boot args: ${bootArgs}`);
673
713
  await this.client.setBootSource({
674
714
  kernel_image_path: this.config.kernelPath,
675
715
  boot_args: bootArgs
676
716
  });
677
- console.log(`[VM ${this.config.vmId}] Rootfs: ${this.vmRootfsPath}`);
717
+ console.log(
718
+ `[VM ${this.config.vmId}] Base rootfs: ${this.config.rootfsPath}`
719
+ );
678
720
  await this.client.setDrive({
679
721
  drive_id: "rootfs",
680
- path_on_host: this.vmRootfsPath,
722
+ path_on_host: this.config.rootfsPath,
681
723
  is_root_device: true,
724
+ is_read_only: true
725
+ });
726
+ console.log(`[VM ${this.config.vmId}] Overlay: ${this.vmOverlayPath}`);
727
+ await this.client.setDrive({
728
+ drive_id: "overlay",
729
+ path_on_host: this.vmOverlayPath,
730
+ is_root_device: false,
682
731
  is_read_only: false
683
- // Need write access for agent execution
684
732
  });
685
733
  console.log(
686
734
  `[VM ${this.config.vmId}] Network: ${this.networkConfig.tapDevice}`
@@ -770,7 +818,7 @@ var FirecrackerVM = class {
770
818
  };
771
819
 
772
820
  // src/lib/firecracker/guest.ts
773
- import { exec as exec2, execSync as execSync2 } from "child_process";
821
+ import { exec as exec2, execSync as execSync3 } from "child_process";
774
822
  import { promisify as promisify2 } from "util";
775
823
  import fs3 from "fs";
776
824
  import path2 from "path";
@@ -4891,9 +4939,7 @@ var storedExecutionContextSchema = z4.object({
4891
4939
  encryptedSecrets: z4.string().nullable(),
4892
4940
  // AES-256-GCM encrypted secrets
4893
4941
  cliAgentType: z4.string(),
4894
- experimentalFirewall: experimentalFirewallSchema.optional(),
4895
- postCreateCommand: z4.string().nullable().optional()
4896
- // Lifecycle hook
4942
+ experimentalFirewall: experimentalFirewallSchema.optional()
4897
4943
  });
4898
4944
  var executionContextSchema = z4.object({
4899
4945
  runId: z4.string().uuid(),
@@ -4911,9 +4957,7 @@ var executionContextSchema = z4.object({
4911
4957
  secretValues: z4.array(z4.string()).nullable(),
4912
4958
  cliAgentType: z4.string(),
4913
4959
  // Experimental firewall configuration
4914
- experimentalFirewall: experimentalFirewallSchema.optional(),
4915
- // Lifecycle hook - command to run after working dir creation
4916
- postCreateCommand: z4.string().nullable().optional()
4960
+ experimentalFirewall: experimentalFirewallSchema.optional()
4917
4961
  });
4918
4962
  var runnersJobClaimContract = c.router({
4919
4963
  claim: {
@@ -6297,19 +6341,11 @@ var agentVersionSchema = z16.object({
6297
6341
  id: z16.string(),
6298
6342
  agent_id: z16.string(),
6299
6343
  version_number: z16.number(),
6300
- config: z16.unknown(),
6301
- // Agent YAML configuration
6302
6344
  created_at: timestampSchema
6303
6345
  });
6304
- var publicAgentDetailSchema = publicAgentSchema.extend({
6305
- config: z16.unknown().optional()
6306
- });
6346
+ var publicAgentDetailSchema = publicAgentSchema;
6307
6347
  var paginatedAgentsSchema = createPaginatedResponseSchema(publicAgentSchema);
6308
6348
  var paginatedAgentVersionsSchema = createPaginatedResponseSchema(agentVersionSchema);
6309
- var updateAgentRequestSchema = z16.object({
6310
- config: z16.unknown()
6311
- // New agent configuration (creates new version)
6312
- });
6313
6349
  var agentListQuerySchema = listQuerySchema.extend({
6314
6350
  name: z16.string().optional()
6315
6351
  });
@@ -6342,23 +6378,6 @@ var publicAgentByIdContract = c11.router({
6342
6378
  },
6343
6379
  summary: "Get agent",
6344
6380
  description: "Get agent details by ID"
6345
- },
6346
- update: {
6347
- method: "PUT",
6348
- path: "/v1/agents/:id",
6349
- pathParams: z16.object({
6350
- id: z16.string().min(1, "Agent ID is required")
6351
- }),
6352
- body: updateAgentRequestSchema,
6353
- responses: {
6354
- 200: publicAgentDetailSchema,
6355
- 400: publicApiErrorSchema,
6356
- 401: publicApiErrorSchema,
6357
- 404: publicApiErrorSchema,
6358
- 500: publicApiErrorSchema
6359
- },
6360
- summary: "Update agent",
6361
- description: "Update agent configuration. Creates a new version if config changes."
6362
6381
  }
6363
6382
  });
6364
6383
  var publicAgentVersionsContract = c11.router({
@@ -6402,12 +6421,12 @@ var publicRunSchema = z17.object({
6402
6421
  completed_at: timestampSchema.nullable()
6403
6422
  });
6404
6423
  var publicRunDetailSchema = publicRunSchema.extend({
6405
- output: z17.string().nullable(),
6406
6424
  error: z17.string().nullable(),
6407
6425
  execution_time_ms: z17.number().nullable(),
6408
6426
  checkpoint_id: z17.string().nullable(),
6409
6427
  session_id: z17.string().nullable(),
6410
- artifacts: z17.record(z17.string(), z17.string()).optional(),
6428
+ artifact_name: z17.string().nullable(),
6429
+ artifact_version: z17.string().nullable(),
6411
6430
  volumes: z17.record(z17.string(), z17.string()).optional()
6412
6431
  });
6413
6432
  var paginatedRunsSchema = createPaginatedResponseSchema(publicRunSchema);
@@ -6428,8 +6447,10 @@ var createRunRequestSchema = z17.object({
6428
6447
  // Optional configuration
6429
6448
  variables: z17.record(z17.string(), z17.string()).optional(),
6430
6449
  secrets: z17.record(z17.string(), z17.string()).optional(),
6431
- artifacts: z17.record(z17.string(), z17.string()).optional(),
6432
- // artifact_name -> version
6450
+ artifact_name: z17.string().optional(),
6451
+ // Artifact name to mount
6452
+ artifact_version: z17.string().optional(),
6453
+ // Artifact version (defaults to latest)
6433
6454
  volumes: z17.record(z17.string(), z17.string()).optional()
6434
6455
  // volume_name -> version
6435
6456
  });
@@ -6643,51 +6664,6 @@ var paginatedArtifactsSchema = createPaginatedResponseSchema(publicArtifactSchem
6643
6664
  var paginatedArtifactVersionsSchema = createPaginatedResponseSchema(
6644
6665
  artifactVersionSchema
6645
6666
  );
6646
- var createArtifactRequestSchema = z18.object({
6647
- name: z18.string().min(1).max(100).regex(
6648
- /^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/,
6649
- "Name must be lowercase alphanumeric with hyphens, not starting or ending with hyphen"
6650
- )
6651
- });
6652
- var fileEntrySchema = z18.object({
6653
- path: z18.string(),
6654
- size: z18.number(),
6655
- hash: z18.string().optional()
6656
- // SHA-256 hash of file content
6657
- });
6658
- var prepareUploadRequestSchema = z18.object({
6659
- files: z18.array(fileEntrySchema),
6660
- message: z18.string().optional()
6661
- // Optional commit message
6662
- });
6663
- var presignedUploadSchema2 = z18.object({
6664
- path: z18.string(),
6665
- upload_url: z18.string(),
6666
- // Presigned S3 URL
6667
- upload_id: z18.string()
6668
- // For multi-part uploads
6669
- });
6670
- var prepareUploadResponseSchema = z18.object({
6671
- upload_session_id: z18.string(),
6672
- files: z18.array(presignedUploadSchema2),
6673
- expires_at: timestampSchema
6674
- });
6675
- var commitUploadRequestSchema = z18.object({
6676
- upload_session_id: z18.string(),
6677
- message: z18.string().optional()
6678
- });
6679
- var downloadResponseSchema = z18.object({
6680
- version_id: z18.string(),
6681
- files: z18.array(
6682
- z18.object({
6683
- path: z18.string(),
6684
- size: z18.number(),
6685
- download_url: z18.string()
6686
- // Presigned S3 URL
6687
- })
6688
- ),
6689
- expires_at: timestampSchema
6690
- });
6691
6667
  var publicArtifactsListContract = c13.router({
6692
6668
  list: {
6693
6669
  method: "GET",
@@ -6700,20 +6676,6 @@ var publicArtifactsListContract = c13.router({
6700
6676
  },
6701
6677
  summary: "List artifacts",
6702
6678
  description: "List all artifacts in the current scope with pagination"
6703
- },
6704
- create: {
6705
- method: "POST",
6706
- path: "/v1/artifacts",
6707
- body: createArtifactRequestSchema,
6708
- responses: {
6709
- 201: publicArtifactDetailSchema,
6710
- 400: publicApiErrorSchema,
6711
- 401: publicApiErrorSchema,
6712
- 409: publicApiErrorSchema,
6713
- 500: publicApiErrorSchema
6714
- },
6715
- summary: "Create artifact",
6716
- description: "Create a new empty artifact container"
6717
6679
  }
6718
6680
  });
6719
6681
  var publicArtifactByIdContract = c13.router({
@@ -6751,44 +6713,6 @@ var publicArtifactVersionsContract = c13.router({
6751
6713
  description: "List all versions of an artifact with pagination"
6752
6714
  }
6753
6715
  });
6754
- var publicArtifactUploadContract = c13.router({
6755
- prepareUpload: {
6756
- method: "POST",
6757
- path: "/v1/artifacts/:id/upload",
6758
- pathParams: z18.object({
6759
- id: z18.string().min(1, "Artifact ID is required")
6760
- }),
6761
- body: prepareUploadRequestSchema,
6762
- responses: {
6763
- 200: prepareUploadResponseSchema,
6764
- 400: publicApiErrorSchema,
6765
- 401: publicApiErrorSchema,
6766
- 404: publicApiErrorSchema,
6767
- 500: publicApiErrorSchema
6768
- },
6769
- summary: "Prepare artifact upload",
6770
- description: "Get presigned URLs for direct S3 upload. Returns upload URLs for each file."
6771
- }
6772
- });
6773
- var publicArtifactCommitContract = c13.router({
6774
- commitUpload: {
6775
- method: "POST",
6776
- path: "/v1/artifacts/:id/commit",
6777
- pathParams: z18.object({
6778
- id: z18.string().min(1, "Artifact ID is required")
6779
- }),
6780
- body: commitUploadRequestSchema,
6781
- responses: {
6782
- 200: artifactVersionSchema,
6783
- 400: publicApiErrorSchema,
6784
- 401: publicApiErrorSchema,
6785
- 404: publicApiErrorSchema,
6786
- 500: publicApiErrorSchema
6787
- },
6788
- summary: "Commit artifact upload",
6789
- description: "Finalize an upload session and create a new artifact version."
6790
- }
6791
- });
6792
6716
  var publicArtifactDownloadContract = c13.router({
6793
6717
  download: {
6794
6718
  method: "GET",
@@ -6801,13 +6725,14 @@ var publicArtifactDownloadContract = c13.router({
6801
6725
  // Defaults to current version
6802
6726
  }),
6803
6727
  responses: {
6804
- 200: downloadResponseSchema,
6728
+ 302: z18.undefined(),
6729
+ // Redirect to presigned URL
6805
6730
  401: publicApiErrorSchema,
6806
6731
  404: publicApiErrorSchema,
6807
6732
  500: publicApiErrorSchema
6808
6733
  },
6809
6734
  summary: "Download artifact",
6810
- description: "Get presigned URLs for downloading artifact files. Defaults to current version."
6735
+ description: "Redirect to presigned URL for downloading artifact as tar.gz archive. Defaults to current version."
6811
6736
  }
6812
6737
  });
6813
6738
 
@@ -6841,51 +6766,6 @@ var publicVolumeDetailSchema = publicVolumeSchema.extend({
6841
6766
  });
6842
6767
  var paginatedVolumesSchema = createPaginatedResponseSchema(publicVolumeSchema);
6843
6768
  var paginatedVolumeVersionsSchema = createPaginatedResponseSchema(volumeVersionSchema);
6844
- var createVolumeRequestSchema = z19.object({
6845
- name: z19.string().min(1).max(100).regex(
6846
- /^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/,
6847
- "Name must be lowercase alphanumeric with hyphens, not starting or ending with hyphen"
6848
- )
6849
- });
6850
- var fileEntrySchema2 = z19.object({
6851
- path: z19.string(),
6852
- size: z19.number(),
6853
- hash: z19.string().optional()
6854
- // SHA-256 hash of file content
6855
- });
6856
- var prepareUploadRequestSchema2 = z19.object({
6857
- files: z19.array(fileEntrySchema2),
6858
- message: z19.string().optional()
6859
- // Optional commit message
6860
- });
6861
- var presignedUploadSchema3 = z19.object({
6862
- path: z19.string(),
6863
- upload_url: z19.string(),
6864
- // Presigned S3 URL
6865
- upload_id: z19.string()
6866
- // For multi-part uploads
6867
- });
6868
- var prepareUploadResponseSchema2 = z19.object({
6869
- upload_session_id: z19.string(),
6870
- files: z19.array(presignedUploadSchema3),
6871
- expires_at: timestampSchema
6872
- });
6873
- var commitUploadRequestSchema2 = z19.object({
6874
- upload_session_id: z19.string(),
6875
- message: z19.string().optional()
6876
- });
6877
- var downloadResponseSchema2 = z19.object({
6878
- version_id: z19.string(),
6879
- files: z19.array(
6880
- z19.object({
6881
- path: z19.string(),
6882
- size: z19.number(),
6883
- download_url: z19.string()
6884
- // Presigned S3 URL
6885
- })
6886
- ),
6887
- expires_at: timestampSchema
6888
- });
6889
6769
  var publicVolumesListContract = c14.router({
6890
6770
  list: {
6891
6771
  method: "GET",
@@ -6898,20 +6778,6 @@ var publicVolumesListContract = c14.router({
6898
6778
  },
6899
6779
  summary: "List volumes",
6900
6780
  description: "List all volumes in the current scope with pagination"
6901
- },
6902
- create: {
6903
- method: "POST",
6904
- path: "/v1/volumes",
6905
- body: createVolumeRequestSchema,
6906
- responses: {
6907
- 201: publicVolumeDetailSchema,
6908
- 400: publicApiErrorSchema,
6909
- 401: publicApiErrorSchema,
6910
- 409: publicApiErrorSchema,
6911
- 500: publicApiErrorSchema
6912
- },
6913
- summary: "Create volume",
6914
- description: "Create a new empty volume container"
6915
6781
  }
6916
6782
  });
6917
6783
  var publicVolumeByIdContract = c14.router({
@@ -6949,44 +6815,6 @@ var publicVolumeVersionsContract = c14.router({
6949
6815
  description: "List all versions of a volume with pagination"
6950
6816
  }
6951
6817
  });
6952
- var publicVolumeUploadContract = c14.router({
6953
- prepareUpload: {
6954
- method: "POST",
6955
- path: "/v1/volumes/:id/upload",
6956
- pathParams: z19.object({
6957
- id: z19.string().min(1, "Volume ID is required")
6958
- }),
6959
- body: prepareUploadRequestSchema2,
6960
- responses: {
6961
- 200: prepareUploadResponseSchema2,
6962
- 400: publicApiErrorSchema,
6963
- 401: publicApiErrorSchema,
6964
- 404: publicApiErrorSchema,
6965
- 500: publicApiErrorSchema
6966
- },
6967
- summary: "Prepare volume upload",
6968
- description: "Get presigned URLs for direct S3 upload. Returns upload URLs for each file."
6969
- }
6970
- });
6971
- var publicVolumeCommitContract = c14.router({
6972
- commitUpload: {
6973
- method: "POST",
6974
- path: "/v1/volumes/:id/commit",
6975
- pathParams: z19.object({
6976
- id: z19.string().min(1, "Volume ID is required")
6977
- }),
6978
- body: commitUploadRequestSchema2,
6979
- responses: {
6980
- 200: volumeVersionSchema,
6981
- 400: publicApiErrorSchema,
6982
- 401: publicApiErrorSchema,
6983
- 404: publicApiErrorSchema,
6984
- 500: publicApiErrorSchema
6985
- },
6986
- summary: "Commit volume upload",
6987
- description: "Finalize an upload session and create a new volume version."
6988
- }
6989
- });
6990
6818
  var publicVolumeDownloadContract = c14.router({
6991
6819
  download: {
6992
6820
  method: "GET",
@@ -6999,93 +6827,14 @@ var publicVolumeDownloadContract = c14.router({
6999
6827
  // Defaults to current version
7000
6828
  }),
7001
6829
  responses: {
7002
- 200: downloadResponseSchema2,
6830
+ 302: z19.undefined(),
6831
+ // Redirect to presigned URL
7003
6832
  401: publicApiErrorSchema,
7004
6833
  404: publicApiErrorSchema,
7005
6834
  500: publicApiErrorSchema
7006
6835
  },
7007
6836
  summary: "Download volume",
7008
- description: "Get presigned URLs for downloading volume files. Defaults to current version."
7009
- }
7010
- });
7011
-
7012
- // ../../packages/core/src/contracts/public/tokens.ts
7013
- import { z as z20 } from "zod";
7014
- var c15 = initContract();
7015
- var publicTokenSchema = z20.object({
7016
- id: z20.string(),
7017
- name: z20.string(),
7018
- token_prefix: z20.string(),
7019
- // First 12 chars for identification (e.g., "vm0_live_abc")
7020
- last_used_at: timestampSchema.nullable(),
7021
- expires_at: timestampSchema,
7022
- created_at: timestampSchema
7023
- });
7024
- var publicTokenDetailSchema = publicTokenSchema.extend({
7025
- token: z20.string().optional()
7026
- // Full token value, only returned on creation
7027
- });
7028
- var paginatedTokensSchema = createPaginatedResponseSchema(publicTokenSchema);
7029
- var createTokenRequestSchema = z20.object({
7030
- name: z20.string().min(1, "Name is required").max(100, "Name too long"),
7031
- expires_in_days: z20.number().min(1).max(365).optional()
7032
- // null for no expiry (default 90 days)
7033
- });
7034
- var publicTokensListContract = c15.router({
7035
- list: {
7036
- method: "GET",
7037
- path: "/v1/tokens",
7038
- query: listQuerySchema,
7039
- responses: {
7040
- 200: paginatedTokensSchema,
7041
- 401: publicApiErrorSchema
7042
- },
7043
- summary: "List API tokens",
7044
- description: "List all API tokens for the authenticated user"
7045
- },
7046
- create: {
7047
- method: "POST",
7048
- path: "/v1/tokens",
7049
- body: createTokenRequestSchema,
7050
- responses: {
7051
- 201: publicTokenDetailSchema,
7052
- // Includes full token value
7053
- 400: publicApiErrorSchema,
7054
- 401: publicApiErrorSchema
7055
- },
7056
- summary: "Create API token",
7057
- description: "Create a new API token. The token value is only returned once on creation."
7058
- }
7059
- });
7060
- var publicTokenByIdContract = c15.router({
7061
- get: {
7062
- method: "GET",
7063
- path: "/v1/tokens/:id",
7064
- pathParams: z20.object({
7065
- id: z20.string()
7066
- }),
7067
- responses: {
7068
- 200: publicTokenSchema,
7069
- // Does NOT include token value
7070
- 401: publicApiErrorSchema,
7071
- 404: publicApiErrorSchema
7072
- },
7073
- summary: "Get API token",
7074
- description: "Get details of an API token (does not include the token value)"
7075
- },
7076
- delete: {
7077
- method: "DELETE",
7078
- path: "/v1/tokens/:id",
7079
- pathParams: z20.object({
7080
- id: z20.string()
7081
- }),
7082
- responses: {
7083
- 204: z20.undefined(),
7084
- 401: publicApiErrorSchema,
7085
- 404: publicApiErrorSchema
7086
- },
7087
- summary: "Revoke API token",
7088
- description: "Permanently revoke an API token. This action cannot be undone."
6837
+ description: "Redirect to presigned URL for downloading volume as tar.gz archive. Defaults to current version."
7089
6838
  }
7090
6839
  });
7091
6840
 
@@ -7129,9 +6878,6 @@ ARTIFACT_MOUNT_PATH = os.environ.get("VM0_ARTIFACT_MOUNT_PATH", "")
7129
6878
  ARTIFACT_VOLUME_NAME = os.environ.get("VM0_ARTIFACT_VOLUME_NAME", "")
7130
6879
  ARTIFACT_VERSION_ID = os.environ.get("VM0_ARTIFACT_VERSION_ID", "")
7131
6880
 
7132
- # Lifecycle hook - command to execute after working directory creation
7133
- POST_CREATE_COMMAND = os.environ.get("VM0_POST_CREATE_COMMAND", "")
7134
-
7135
6881
  # Construct webhook endpoint URLs
7136
6882
  WEBHOOK_URL = f"{API_URL}/api/webhooks/agent/events"
7137
6883
  CHECKPOINT_URL = f"{API_URL}/api/webhooks/agent/checkpoints"
@@ -8993,7 +8739,7 @@ sys.path.insert(0, "/usr/local/bin/vm0-agent/lib")
8993
8739
  from common import (
8994
8740
  WORKING_DIR, PROMPT, RESUME_SESSION_ID, COMPLETE_URL, RUN_ID,
8995
8741
  EVENT_ERROR_FLAG, HEARTBEAT_URL, HEARTBEAT_INTERVAL, AGENT_LOG_FILE,
8996
- CLI_AGENT_TYPE, OPENAI_MODEL, POST_CREATE_COMMAND, validate_config
8742
+ CLI_AGENT_TYPE, OPENAI_MODEL, validate_config
8997
8743
  )
8998
8744
  from log import log_info, log_error, log_warn
8999
8745
  from events import send_event
@@ -9103,26 +8849,6 @@ def _run() -> tuple[int, str]:
9103
8849
  except OSError as e:
9104
8850
  raise RuntimeError(f"Failed to create/change to working directory: {WORKING_DIR} - {e}") from e
9105
8851
 
9106
- # Execute postCreateCommand if specified (lifecycle hook)
9107
- # This runs after working directory is created but before agent execution
9108
- if POST_CREATE_COMMAND:
9109
- log_info(f"Running postCreateCommand: {POST_CREATE_COMMAND}")
9110
- try:
9111
- result = subprocess.run(
9112
- ["/bin/bash", "-c", POST_CREATE_COMMAND],
9113
- cwd=WORKING_DIR,
9114
- capture_output=True,
9115
- text=True
9116
- )
9117
- if result.returncode != 0:
9118
- stderr_output = result.stderr.strip() if result.stderr else "No error output"
9119
- raise RuntimeError(f"postCreateCommand failed with exit code {result.returncode}: {stderr_output}")
9120
- if result.stdout:
9121
- log_info(f"postCreateCommand output: {result.stdout.strip()}")
9122
- log_info("postCreateCommand completed successfully")
9123
- except subprocess.SubprocessError as e:
9124
- raise RuntimeError(f"Failed to execute postCreateCommand: {e}") from e
9125
-
9126
8852
  # Set up Codex configuration if using Codex CLI
9127
8853
  # Claude Code uses ~/.claude by default (no configuration needed)
9128
8854
  if CLI_AGENT_TYPE == "codex":
@@ -10277,6 +10003,208 @@ function initProxyManager(config) {
10277
10003
  return globalProxyManager;
10278
10004
  }
10279
10005
 
10006
+ // src/lib/metrics/provider.ts
10007
+ import {
10008
+ MeterProvider,
10009
+ PeriodicExportingMetricReader
10010
+ } from "@opentelemetry/sdk-metrics";
10011
+ import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
10012
+ import { Resource } from "@opentelemetry/resources";
10013
+ import { ATTR_SERVICE_NAME } from "@opentelemetry/semantic-conventions";
10014
+ import { metrics } from "@opentelemetry/api";
10015
+ var meterProvider = null;
10016
+ var initialized = false;
10017
+ var enabled = false;
10018
+ var _runnerLabel = "";
10019
+ function initMetrics(config) {
10020
+ if (initialized) return;
10021
+ initialized = true;
10022
+ _runnerLabel = config.runnerLabel;
10023
+ if (!config.axiomToken) {
10024
+ console.log("[metrics] AXIOM_TOKEN not configured, metrics disabled");
10025
+ return;
10026
+ }
10027
+ const env = config.environment ?? "dev";
10028
+ const exporter = new OTLPMetricExporter({
10029
+ url: "https://api.axiom.co/v1/metrics",
10030
+ headers: {
10031
+ Authorization: `Bearer ${config.axiomToken}`,
10032
+ "X-Axiom-Dataset": `runner-metrics-${env}`
10033
+ }
10034
+ });
10035
+ meterProvider = new MeterProvider({
10036
+ resource: new Resource({
10037
+ [ATTR_SERVICE_NAME]: config.serviceName,
10038
+ "deployment.environment": env,
10039
+ "runner.label": config.runnerLabel
10040
+ }),
10041
+ readers: [
10042
+ new PeriodicExportingMetricReader({
10043
+ exporter,
10044
+ exportIntervalMillis: config.exportIntervalMs ?? 3e4
10045
+ })
10046
+ ]
10047
+ });
10048
+ metrics.setGlobalMeterProvider(meterProvider);
10049
+ enabled = true;
10050
+ console.log(
10051
+ `[metrics] initialized for ${config.serviceName} (${env}), runner: ${config.runnerLabel}`
10052
+ );
10053
+ }
10054
+ function isMetricsEnabled() {
10055
+ return enabled;
10056
+ }
10057
+ function getRunnerLabel() {
10058
+ return _runnerLabel;
10059
+ }
10060
+ function getMeter(name) {
10061
+ return metrics.getMeter(name);
10062
+ }
10063
+ async function flushMetrics() {
10064
+ if (meterProvider) {
10065
+ await meterProvider.forceFlush();
10066
+ }
10067
+ }
10068
+ async function shutdownMetrics() {
10069
+ if (meterProvider) {
10070
+ await meterProvider.shutdown();
10071
+ }
10072
+ }
10073
+
10074
+ // src/lib/metrics/instruments.ts
10075
+ var runnerOperationTotal = null;
10076
+ var runnerOperationErrorsTotal = null;
10077
+ var runnerOperationDuration = null;
10078
+ var sandboxOperationTotal = null;
10079
+ var sandboxOperationErrorsTotal = null;
10080
+ var sandboxOperationDuration = null;
10081
+ function getRunnerInstruments() {
10082
+ if (!runnerOperationTotal) {
10083
+ const meter = getMeter("vm0-runner");
10084
+ runnerOperationTotal = meter.createCounter("runner_operation_total", {
10085
+ description: "Total number of runner operations"
10086
+ });
10087
+ runnerOperationErrorsTotal = meter.createCounter(
10088
+ "runner_operation_errors_total",
10089
+ {
10090
+ description: "Total number of runner operation errors"
10091
+ }
10092
+ );
10093
+ runnerOperationDuration = meter.createHistogram(
10094
+ "runner_operation_duration_ms",
10095
+ {
10096
+ description: "Runner operation duration in milliseconds",
10097
+ unit: "ms"
10098
+ }
10099
+ );
10100
+ }
10101
+ return {
10102
+ runnerOperationTotal,
10103
+ runnerOperationErrorsTotal,
10104
+ runnerOperationDuration
10105
+ };
10106
+ }
10107
+ function getSandboxInstruments() {
10108
+ if (!sandboxOperationTotal) {
10109
+ const meter = getMeter("vm0-runner");
10110
+ sandboxOperationTotal = meter.createCounter("sandbox_operation_total", {
10111
+ description: "Total number of sandbox operations"
10112
+ });
10113
+ sandboxOperationErrorsTotal = meter.createCounter(
10114
+ "sandbox_operation_errors_total",
10115
+ {
10116
+ description: "Total number of sandbox operation errors"
10117
+ }
10118
+ );
10119
+ sandboxOperationDuration = meter.createHistogram(
10120
+ "sandbox_operation_duration_ms",
10121
+ {
10122
+ description: "Sandbox operation duration in milliseconds",
10123
+ unit: "ms"
10124
+ }
10125
+ );
10126
+ }
10127
+ return {
10128
+ sandboxOperationTotal,
10129
+ sandboxOperationErrorsTotal,
10130
+ sandboxOperationDuration
10131
+ };
10132
+ }
10133
+ function recordRunnerOperation(attrs) {
10134
+ if (!isMetricsEnabled()) return;
10135
+ const {
10136
+ runnerOperationTotal: runnerOperationTotal2,
10137
+ runnerOperationErrorsTotal: runnerOperationErrorsTotal2,
10138
+ runnerOperationDuration: runnerOperationDuration2
10139
+ } = getRunnerInstruments();
10140
+ const labels = {
10141
+ action_type: attrs.actionType,
10142
+ runner_label: getRunnerLabel()
10143
+ };
10144
+ runnerOperationTotal2.add(1, labels);
10145
+ if (!attrs.success) {
10146
+ runnerOperationErrorsTotal2.add(1, labels);
10147
+ }
10148
+ runnerOperationDuration2.record(attrs.durationMs, {
10149
+ ...labels,
10150
+ success: String(attrs.success)
10151
+ });
10152
+ }
10153
+ function recordSandboxOperation(attrs) {
10154
+ if (!isMetricsEnabled()) return;
10155
+ const {
10156
+ sandboxOperationTotal: sandboxOperationTotal2,
10157
+ sandboxOperationErrorsTotal: sandboxOperationErrorsTotal2,
10158
+ sandboxOperationDuration: sandboxOperationDuration2
10159
+ } = getSandboxInstruments();
10160
+ const labels = {
10161
+ sandbox_type: "runner",
10162
+ action_type: attrs.actionType
10163
+ };
10164
+ sandboxOperationTotal2.add(1, labels);
10165
+ if (!attrs.success) {
10166
+ sandboxOperationErrorsTotal2.add(1, labels);
10167
+ }
10168
+ sandboxOperationDuration2.record(attrs.durationMs, {
10169
+ ...labels,
10170
+ success: String(attrs.success)
10171
+ });
10172
+ }
10173
+
10174
+ // src/lib/metrics/timing.ts
10175
+ async function withRunnerTiming(actionType, fn) {
10176
+ const startTime = Date.now();
10177
+ let success = true;
10178
+ try {
10179
+ return await fn();
10180
+ } catch (error) {
10181
+ success = false;
10182
+ throw error;
10183
+ } finally {
10184
+ recordRunnerOperation({
10185
+ actionType,
10186
+ durationMs: Date.now() - startTime,
10187
+ success
10188
+ });
10189
+ }
10190
+ }
10191
+ async function withSandboxTiming(actionType, fn) {
10192
+ const startTime = Date.now();
10193
+ let success = true;
10194
+ try {
10195
+ return await fn();
10196
+ } catch (error) {
10197
+ success = false;
10198
+ throw error;
10199
+ } finally {
10200
+ recordSandboxOperation({
10201
+ actionType,
10202
+ durationMs: Date.now() - startTime,
10203
+ success
10204
+ });
10205
+ }
10206
+ }
10207
+
10280
10208
  // src/lib/executor.ts
10281
10209
  function getVmIdFromRunId(runId) {
10282
10210
  return runId.split("-")[0] || runId.substring(0, 8);
@@ -10305,9 +10233,6 @@ function buildEnvironmentVariables(context, apiUrl) {
10305
10233
  envVars.VM0_ARTIFACT_VOLUME_NAME = artifact.vasStorageName;
10306
10234
  envVars.VM0_ARTIFACT_VERSION_ID = artifact.vasVersionId;
10307
10235
  }
10308
- if (context.postCreateCommand) {
10309
- envVars.VM0_POST_CREATE_COMMAND = context.postCreateCommand;
10310
- }
10311
10236
  if (context.resumeSession) {
10312
10237
  envVars.VM0_RESUME_SESSION_ID = context.resumeSession.sessionId;
10313
10238
  }
@@ -10467,11 +10392,12 @@ nameserver 1.1.1.1`;
10467
10392
  `sudo sh -c 'rm -f /etc/resolv.conf && echo "${dnsConfig}" > /etc/resolv.conf'`
10468
10393
  );
10469
10394
  }
10470
- async function executeJob(context, config) {
10395
+ async function executeJob(context, config, options = {}) {
10471
10396
  const vmId = getVmIdFromRunId(context.runId);
10472
10397
  let vm = null;
10473
10398
  let guestIp = null;
10474
- console.log(`[Executor] Starting job ${context.runId} in VM ${vmId}`);
10399
+ const log = options.logger ?? ((msg) => console.log(msg));
10400
+ log(`[Executor] Starting job ${context.runId} in VM ${vmId}`);
10475
10401
  try {
10476
10402
  const workspacesDir = path4.join(process.cwd(), "workspaces");
10477
10403
  const vmConfig = {
@@ -10483,24 +10409,27 @@ async function executeJob(context, config) {
10483
10409
  firecrackerBinary: config.firecracker.binary,
10484
10410
  workDir: path4.join(workspacesDir, `vm0-${vmId}`)
10485
10411
  };
10486
- console.log(`[Executor] Creating VM ${vmId}...`);
10412
+ log(`[Executor] Creating VM ${vmId}...`);
10487
10413
  vm = new FirecrackerVM(vmConfig);
10488
- await vm.start();
10414
+ await withSandboxTiming("vm_create", () => vm.start());
10489
10415
  guestIp = vm.getGuestIp();
10490
10416
  if (!guestIp) {
10491
10417
  throw new Error("VM started but no IP address available");
10492
10418
  }
10493
- console.log(`[Executor] VM ${vmId} started, guest IP: ${guestIp}`);
10419
+ log(`[Executor] VM ${vmId} started, guest IP: ${guestIp}`);
10494
10420
  const privateKeyPath = getRunnerSSHKeyPath();
10495
10421
  const ssh = createVMSSHClient(guestIp, "user", privateKeyPath || void 0);
10496
- console.log(`[Executor] Waiting for SSH on ${guestIp}...`);
10497
- await ssh.waitUntilReachable(12e4, 2e3);
10498
- console.log(`[Executor] SSH ready on ${guestIp}`);
10422
+ log(`[Executor] Waiting for SSH on ${guestIp}...`);
10423
+ await withSandboxTiming(
10424
+ "ssh_wait",
10425
+ () => ssh.waitUntilReachable(12e4, 2e3)
10426
+ );
10427
+ log(`[Executor] SSH ready on ${guestIp}`);
10499
10428
  const firewallConfig = context.experimentalFirewall;
10500
10429
  if (firewallConfig?.enabled) {
10501
10430
  const mitmEnabled = firewallConfig.experimental_mitm ?? false;
10502
10431
  const sealSecretsEnabled = firewallConfig.experimental_seal_secrets ?? false;
10503
- console.log(
10432
+ log(
10504
10433
  `[Executor] Setting up network security for VM ${guestIp} (mitm=${mitmEnabled}, sealSecrets=${sealSecretsEnabled})`
10505
10434
  );
10506
10435
  await setupVMProxyRules(guestIp, config.proxy.port);
@@ -10513,36 +10442,50 @@ async function executeJob(context, config) {
10513
10442
  await installProxyCA(ssh);
10514
10443
  }
10515
10444
  }
10516
- console.log(`[Executor] Configuring DNS...`);
10445
+ log(`[Executor] Configuring DNS...`);
10517
10446
  await configureDNS(ssh);
10518
- console.log(`[Executor] Uploading scripts...`);
10519
- await uploadScripts(ssh);
10520
- console.log(`[Executor] Scripts uploaded to ${SCRIPT_PATHS.baseDir}`);
10447
+ log(`[Executor] Uploading scripts...`);
10448
+ await withSandboxTiming("script_upload", () => uploadScripts(ssh));
10449
+ log(`[Executor] Scripts uploaded to ${SCRIPT_PATHS.baseDir}`);
10521
10450
  if (context.storageManifest) {
10522
- await downloadStorages(ssh, context.storageManifest);
10451
+ await withSandboxTiming(
10452
+ "storage_download",
10453
+ () => downloadStorages(ssh, context.storageManifest)
10454
+ );
10523
10455
  }
10524
10456
  if (context.resumeSession) {
10525
- await restoreSessionHistory(
10526
- ssh,
10527
- context.resumeSession,
10528
- context.workingDir,
10529
- context.cliAgentType || "claude-code"
10457
+ await withSandboxTiming(
10458
+ "session_restore",
10459
+ () => restoreSessionHistory(
10460
+ ssh,
10461
+ context.resumeSession,
10462
+ context.workingDir,
10463
+ context.cliAgentType || "claude-code"
10464
+ )
10530
10465
  );
10531
10466
  }
10532
10467
  const envVars = buildEnvironmentVariables(context, config.server.url);
10533
10468
  const envJson = JSON.stringify(envVars);
10534
- console.log(
10469
+ log(
10535
10470
  `[Executor] Writing env JSON (${envJson.length} bytes) to ${ENV_JSON_PATH}`
10536
10471
  );
10537
10472
  await ssh.writeFile(ENV_JSON_PATH, envJson);
10538
10473
  const systemLogFile = `/tmp/vm0-main-${context.runId}.log`;
10539
10474
  const exitCodeFile = `/tmp/vm0-exit-${context.runId}`;
10540
- console.log(`[Executor] Running agent via env-loader (background)...`);
10541
10475
  const startTime = Date.now();
10542
- await ssh.exec(
10543
- `nohup sh -c 'python3 -u ${ENV_LOADER_PATH}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
10544
- );
10545
- console.log(`[Executor] Agent started in background`);
10476
+ if (options.benchmarkMode) {
10477
+ log(`[Executor] Running command directly (benchmark mode)...`);
10478
+ await ssh.exec(
10479
+ `nohup sh -c '${context.prompt}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
10480
+ );
10481
+ log(`[Executor] Command started in background`);
10482
+ } else {
10483
+ log(`[Executor] Running agent via env-loader (background)...`);
10484
+ await ssh.exec(
10485
+ `nohup sh -c 'python3 -u ${ENV_LOADER_PATH}; echo $? > ${exitCodeFile}' > ${systemLogFile} 2>&1 &`
10486
+ );
10487
+ log(`[Executor] Agent started in background`);
10488
+ }
10546
10489
  const pollIntervalMs = 2e3;
10547
10490
  const maxWaitMs = 24 * 60 * 60 * 1e3;
10548
10491
  let exitCode = 1;
@@ -10551,25 +10494,35 @@ async function executeJob(context, config) {
10551
10494
  await new Promise((resolve) => setTimeout(resolve, pollIntervalMs));
10552
10495
  const checkResult = await ssh.exec(`cat ${exitCodeFile} 2>/dev/null`);
10553
10496
  if (checkResult.exitCode === 0 && checkResult.stdout.trim()) {
10554
- exitCode = parseInt(checkResult.stdout.trim(), 10) || 1;
10497
+ const parsed = parseInt(checkResult.stdout.trim(), 10);
10498
+ exitCode = Number.isNaN(parsed) ? 1 : parsed;
10555
10499
  completed = true;
10556
10500
  break;
10557
10501
  }
10558
10502
  }
10559
- const duration = Math.round((Date.now() - startTime) / 1e3);
10503
+ const durationMs = Date.now() - startTime;
10504
+ const duration = Math.round(durationMs / 1e3);
10560
10505
  if (!completed) {
10561
- console.log(`[Executor] Agent timed out after ${duration}s`);
10506
+ log(`[Executor] Agent timed out after ${duration}s`);
10507
+ recordRunnerOperation({
10508
+ actionType: "agent_execute",
10509
+ durationMs,
10510
+ success: false
10511
+ });
10562
10512
  return {
10563
10513
  exitCode: 1,
10564
10514
  error: `Agent execution timed out after ${duration}s`
10565
10515
  };
10566
10516
  }
10567
- console.log(
10568
- `[Executor] Agent finished in ${duration}s with exit code ${exitCode}`
10569
- );
10517
+ recordRunnerOperation({
10518
+ actionType: "agent_execute",
10519
+ durationMs,
10520
+ success: exitCode === 0
10521
+ });
10522
+ log(`[Executor] Agent finished in ${duration}s with exit code ${exitCode}`);
10570
10523
  const logResult = await ssh.exec(`tail -100 ${systemLogFile} 2>/dev/null`);
10571
10524
  if (logResult.stdout) {
10572
- console.log(
10525
+ log(
10573
10526
  `[Executor] Log output (${logResult.stdout.length} chars): ${logResult.stdout.substring(0, 500)}`
10574
10527
  );
10575
10528
  }
@@ -10586,7 +10539,7 @@ async function executeJob(context, config) {
10586
10539
  };
10587
10540
  } finally {
10588
10541
  if (context.experimentalFirewall?.enabled && guestIp) {
10589
- console.log(`[Executor] Cleaning up network security for VM ${guestIp}`);
10542
+ log(`[Executor] Cleaning up network security for VM ${guestIp}`);
10590
10543
  try {
10591
10544
  await removeVMProxyRules(guestIp, config.proxy.port);
10592
10545
  } catch (err) {
@@ -10595,21 +10548,23 @@ async function executeJob(context, config) {
10595
10548
  );
10596
10549
  }
10597
10550
  getVMRegistry().unregister(guestIp);
10598
- try {
10599
- await uploadNetworkLogs(
10600
- config.server.url,
10601
- context.sandboxToken,
10602
- context.runId
10603
- );
10604
- } catch (err) {
10605
- console.error(
10606
- `[Executor] Failed to upload network logs: ${err instanceof Error ? err.message : "Unknown error"}`
10607
- );
10551
+ if (!options.benchmarkMode) {
10552
+ try {
10553
+ await uploadNetworkLogs(
10554
+ config.server.url,
10555
+ context.sandboxToken,
10556
+ context.runId
10557
+ );
10558
+ } catch (err) {
10559
+ console.error(
10560
+ `[Executor] Failed to upload network logs: ${err instanceof Error ? err.message : "Unknown error"}`
10561
+ );
10562
+ }
10608
10563
  }
10609
10564
  }
10610
10565
  if (vm) {
10611
- console.log(`[Executor] Cleaning up VM ${vmId}...`);
10612
- await vm.kill();
10566
+ log(`[Executor] Cleaning up VM ${vmId}...`);
10567
+ await withSandboxTiming("cleanup", () => vm.kill());
10613
10568
  }
10614
10569
  }
10615
10570
  }
@@ -10656,6 +10611,18 @@ var startCommand = new Command("start").description("Start the runner").option("
10656
10611
  const config = loadConfig(options.config);
10657
10612
  validateFirecrackerPaths(config.firecracker);
10658
10613
  console.log("Config valid");
10614
+ const datasetSuffix = process.env.AXIOM_DATASET_SUFFIX;
10615
+ if (!datasetSuffix) {
10616
+ throw new Error(
10617
+ "AXIOM_DATASET_SUFFIX is required. Set to 'dev' or 'prod'."
10618
+ );
10619
+ }
10620
+ initMetrics({
10621
+ serviceName: "vm0-runner",
10622
+ runnerLabel: config.name,
10623
+ axiomToken: process.env.AXIOM_TOKEN,
10624
+ environment: datasetSuffix
10625
+ });
10659
10626
  const networkCheck = checkNetworkPrerequisites();
10660
10627
  if (!networkCheck.ok) {
10661
10628
  console.error("Network prerequisites not met:");
@@ -10744,7 +10711,10 @@ var startCommand = new Command("start").description("Start the runner").option("
10744
10711
  continue;
10745
10712
  }
10746
10713
  try {
10747
- const job = await pollForJob(config.server, config.group);
10714
+ const job = await withRunnerTiming(
10715
+ "poll",
10716
+ () => pollForJob(config.server, config.group)
10717
+ );
10748
10718
  if (!job) {
10749
10719
  await new Promise(
10750
10720
  (resolve) => setTimeout(resolve, config.sandbox.poll_interval_ms)
@@ -10753,7 +10723,10 @@ var startCommand = new Command("start").description("Start the runner").option("
10753
10723
  }
10754
10724
  console.log(`Found job: ${job.runId}`);
10755
10725
  try {
10756
- const context = await claimJob(config.server, job.runId);
10726
+ const context = await withRunnerTiming(
10727
+ "claim",
10728
+ () => claimJob(config.server, job.runId)
10729
+ );
10757
10730
  console.log(`Claimed job: ${context.runId}`);
10758
10731
  activeJobs.add(context.runId);
10759
10732
  updateStatus();
@@ -10792,6 +10765,9 @@ var startCommand = new Command("start").description("Start the runner").option("
10792
10765
  console.log("Stopping network proxy...");
10793
10766
  await getProxyManager().stop();
10794
10767
  }
10768
+ console.log("Flushing metrics...");
10769
+ await flushMetrics();
10770
+ await shutdownMetrics();
10795
10771
  state.mode = "stopped";
10796
10772
  updateStatus();
10797
10773
  console.log("Runner stopped");
@@ -10830,10 +10806,102 @@ var statusCommand = new Command2("status").description("Check runner connectivit
10830
10806
  }
10831
10807
  });
10832
10808
 
10809
+ // src/commands/benchmark.ts
10810
+ import { Command as Command3 } from "commander";
10811
+ import crypto from "crypto";
10812
+
10813
+ // src/lib/timing.ts
10814
+ var Timer = class {
10815
+ startTime;
10816
+ constructor() {
10817
+ this.startTime = Date.now();
10818
+ }
10819
+ /**
10820
+ * Get elapsed time formatted as [MM:SS.s]
10821
+ */
10822
+ elapsed() {
10823
+ const ms = Date.now() - this.startTime;
10824
+ const totalSeconds = ms / 1e3;
10825
+ const minutes = Math.floor(totalSeconds / 60);
10826
+ const seconds = (totalSeconds % 60).toFixed(1);
10827
+ return `[${String(minutes).padStart(2, "0")}:${seconds.padStart(4, "0")}]`;
10828
+ }
10829
+ /**
10830
+ * Log message with timestamp
10831
+ */
10832
+ log(message) {
10833
+ console.log(`${this.elapsed()} ${message}`);
10834
+ }
10835
+ /**
10836
+ * Get total elapsed time in seconds
10837
+ */
10838
+ totalSeconds() {
10839
+ return (Date.now() - this.startTime) / 1e3;
10840
+ }
10841
+ };
10842
+
10843
+ // src/commands/benchmark.ts
10844
+ function createBenchmarkContext(prompt, options) {
10845
+ return {
10846
+ runId: crypto.randomUUID(),
10847
+ prompt,
10848
+ agentComposeVersionId: "benchmark-local",
10849
+ vars: null,
10850
+ secretNames: null,
10851
+ checkpointId: null,
10852
+ sandboxToken: "benchmark-token-not-used",
10853
+ workingDir: options.workingDir,
10854
+ storageManifest: null,
10855
+ environment: null,
10856
+ resumeSession: null,
10857
+ secretValues: null,
10858
+ cliAgentType: options.agentType
10859
+ };
10860
+ }
10861
+ var benchmarkCommand = new Command3("benchmark").description(
10862
+ "Run a VM performance benchmark (executes bash command directly)"
10863
+ ).argument("<prompt>", "The bash command to execute in the VM").option("--config <path>", "Config file path", "./runner.yaml").option("--working-dir <path>", "Working directory in VM", "/home/user").option("--agent-type <type>", "Agent type", "claude-code").action(async (prompt, options) => {
10864
+ const timer = new Timer();
10865
+ try {
10866
+ timer.log("Loading configuration...");
10867
+ const config = loadDebugConfig(options.config);
10868
+ validateFirecrackerPaths(config.firecracker);
10869
+ timer.log("Checking network prerequisites...");
10870
+ const networkCheck = checkNetworkPrerequisites();
10871
+ if (!networkCheck.ok) {
10872
+ console.error("Network prerequisites not met:");
10873
+ for (const error of networkCheck.errors) {
10874
+ console.error(` - ${error}`);
10875
+ }
10876
+ process.exit(1);
10877
+ }
10878
+ timer.log("Setting up network bridge...");
10879
+ await setupBridge();
10880
+ timer.log(`Executing command: ${prompt}`);
10881
+ const context = createBenchmarkContext(prompt, options);
10882
+ const result = await executeJob(context, config, {
10883
+ benchmarkMode: true,
10884
+ logger: timer.log.bind(timer)
10885
+ });
10886
+ timer.log(`Exit code: ${result.exitCode}`);
10887
+ if (result.error) {
10888
+ timer.log(`Error: ${result.error}`);
10889
+ }
10890
+ timer.log(`Total time: ${timer.totalSeconds().toFixed(1)}s`);
10891
+ process.exit(result.exitCode);
10892
+ } catch (error) {
10893
+ timer.log(
10894
+ `Error: ${error instanceof Error ? error.message : "Unknown error"}`
10895
+ );
10896
+ process.exit(1);
10897
+ }
10898
+ });
10899
+
10833
10900
  // src/index.ts
10834
- var version = true ? "2.6.0" : "0.1.0";
10901
+ var version = true ? "2.7.0" : "0.1.0";
10835
10902
  program.name("vm0-runner").version(version).description("Self-hosted runner for VM0 agents");
10836
10903
  program.addCommand(startCommand);
10837
10904
  program.addCommand(statusCommand);
10905
+ program.addCommand(benchmarkCommand);
10838
10906
  program.parse();
10839
10907
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vm0/runner",
3
- "version": "2.6.0",
3
+ "version": "2.7.0",
4
4
  "description": "Self-hosted runner for VM0 agents",
5
5
  "repository": {
6
6
  "type": "git",
@@ -15,6 +15,11 @@
15
15
  "."
16
16
  ],
17
17
  "dependencies": {
18
+ "@opentelemetry/api": "^1.9.0",
19
+ "@opentelemetry/exporter-metrics-otlp-proto": "^0.52.0",
20
+ "@opentelemetry/resources": "^1.25.0",
21
+ "@opentelemetry/sdk-metrics": "^1.25.0",
22
+ "@opentelemetry/semantic-conventions": "^1.25.0",
18
23
  "commander": "^14.0.0",
19
24
  "yaml": "^2.3.4",
20
25
  "zod": "^3.25.64"