@vm0/cli 4.12.0 → 4.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +428 -398
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -176,14 +176,22 @@ async function setupToken() {
176
176
  );
177
177
  process.exit(1);
178
178
  }
179
+ console.log(chalk.green("\u2713 Authentication token exported successfully!"));
180
+ console.log("");
181
+ console.log("Your token:");
182
+ console.log("");
179
183
  console.log(token);
184
+ console.log("");
185
+ console.log(
186
+ `Use this token by setting: ${chalk.cyan("export VM0_TOKEN=<token>")}`
187
+ );
180
188
  }
181
189
 
182
190
  // src/commands/compose.ts
183
191
  import { Command } from "commander";
184
192
  import chalk2 from "chalk";
185
193
  import { readFile as readFile3 } from "fs/promises";
186
- import { existsSync as existsSync2 } from "fs";
194
+ import { existsSync as existsSync3 } from "fs";
187
195
  import { dirname as dirname2 } from "path";
188
196
  import { parse as parseYaml } from "yaml";
189
197
 
@@ -446,7 +454,7 @@ var ApiClient = class {
446
454
  /**
447
455
  * Generic GET request
448
456
  */
449
- async get(path13) {
457
+ async get(path12) {
450
458
  const baseUrl = await this.getBaseUrl();
451
459
  const token = await getToken();
452
460
  if (!token) {
@@ -459,7 +467,7 @@ var ApiClient = class {
459
467
  if (bypassSecret) {
460
468
  headers["x-vercel-protection-bypass"] = bypassSecret;
461
469
  }
462
- return fetch(`${baseUrl}${path13}`, {
470
+ return fetch(`${baseUrl}${path12}`, {
463
471
  method: "GET",
464
472
  headers
465
473
  });
@@ -467,7 +475,7 @@ var ApiClient = class {
467
475
  /**
468
476
  * Generic POST request
469
477
  */
470
- async post(path13, options) {
478
+ async post(path12, options) {
471
479
  const baseUrl = await this.getBaseUrl();
472
480
  const token = await getToken();
473
481
  if (!token) {
@@ -483,7 +491,7 @@ var ApiClient = class {
483
491
  if (bypassSecret) {
484
492
  headers["x-vercel-protection-bypass"] = bypassSecret;
485
493
  }
486
- return fetch(`${baseUrl}${path13}`, {
494
+ return fetch(`${baseUrl}${path12}`, {
487
495
  method: "POST",
488
496
  headers,
489
497
  body: options?.body
@@ -492,7 +500,7 @@ var ApiClient = class {
492
500
  /**
493
501
  * Generic DELETE request
494
502
  */
495
- async delete(path13) {
503
+ async delete(path12) {
496
504
  const baseUrl = await this.getBaseUrl();
497
505
  const token = await getToken();
498
506
  if (!token) {
@@ -505,7 +513,7 @@ var ApiClient = class {
505
513
  if (bypassSecret) {
506
514
  headers["x-vercel-protection-bypass"] = bypassSecret;
507
515
  }
508
- return fetch(`${baseUrl}${path13}`, {
516
+ return fetch(`${baseUrl}${path12}`, {
509
517
  method: "DELETE",
510
518
  headers
511
519
  });
@@ -709,10 +717,9 @@ function validateAgentCompose(config2) {
709
717
  }
710
718
 
711
719
  // src/lib/system-storage.ts
712
- import * as fs2 from "fs/promises";
713
- import * as path2 from "path";
714
- import * as os2 from "os";
715
- import * as tar from "tar";
720
+ import * as fs4 from "fs/promises";
721
+ import * as path4 from "path";
722
+ import * as os3 from "os";
716
723
 
717
724
  // src/lib/github-skills.ts
718
725
  import * as fs from "fs/promises";
@@ -787,100 +794,330 @@ async function validateSkillDirectory(skillDir) {
787
794
  }
788
795
  }
789
796
 
797
+ // src/lib/direct-upload.ts
798
+ import { createHash } from "crypto";
799
+ import * as fs3 from "fs";
800
+ import * as path3 from "path";
801
+ import * as os2 from "os";
802
+ import * as tar2 from "tar";
803
+
804
+ // src/lib/file-utils.ts
805
+ import * as fs2 from "fs";
806
+ import * as path2 from "path";
807
+ import * as tar from "tar";
808
+ function excludeVm0Filter(filePath) {
809
+ const shouldExclude = filePath === ".vm0" || filePath.startsWith(".vm0/") || filePath.startsWith("./.vm0");
810
+ return !shouldExclude;
811
+ }
812
+ function listTarFiles(tarPath) {
813
+ return new Promise((resolve2, reject) => {
814
+ const files = [];
815
+ tar.list({
816
+ file: tarPath,
817
+ onReadEntry: (entry) => {
818
+ if (entry.type === "File") {
819
+ files.push(entry.path);
820
+ }
821
+ }
822
+ }).then(() => resolve2(files)).catch(reject);
823
+ });
824
+ }
825
+ async function listLocalFiles(dir, excludeDirs = [".vm0"]) {
826
+ const files = [];
827
+ async function walkDir(currentDir, relativePath = "") {
828
+ const entries = await fs2.promises.readdir(currentDir, {
829
+ withFileTypes: true
830
+ });
831
+ for (const entry of entries) {
832
+ const entryRelativePath = relativePath ? path2.join(relativePath, entry.name) : entry.name;
833
+ if (entry.isDirectory()) {
834
+ if (!excludeDirs.includes(entry.name)) {
835
+ await walkDir(path2.join(currentDir, entry.name), entryRelativePath);
836
+ }
837
+ } else {
838
+ files.push(entryRelativePath);
839
+ }
840
+ }
841
+ }
842
+ await walkDir(dir);
843
+ return files;
844
+ }
845
+ async function removeExtraFiles(dir, remoteFiles, excludeDirs = [".vm0"]) {
846
+ const localFiles = await listLocalFiles(dir, excludeDirs);
847
+ let removedCount = 0;
848
+ for (const localFile of localFiles) {
849
+ const normalizedPath = localFile.replace(/\\/g, "/");
850
+ if (!remoteFiles.has(normalizedPath)) {
851
+ const fullPath = path2.join(dir, localFile);
852
+ await fs2.promises.unlink(fullPath);
853
+ removedCount++;
854
+ }
855
+ }
856
+ await removeEmptyDirs(dir, excludeDirs);
857
+ return removedCount;
858
+ }
859
+ async function removeEmptyDirs(dir, excludeDirs = [".vm0"]) {
860
+ const entries = await fs2.promises.readdir(dir, { withFileTypes: true });
861
+ let isEmpty = true;
862
+ for (const entry of entries) {
863
+ const fullPath = path2.join(dir, entry.name);
864
+ if (entry.isDirectory()) {
865
+ if (excludeDirs.includes(entry.name)) {
866
+ isEmpty = false;
867
+ } else {
868
+ const subDirEmpty = await removeEmptyDirs(fullPath, excludeDirs);
869
+ if (subDirEmpty) {
870
+ await fs2.promises.rmdir(fullPath);
871
+ } else {
872
+ isEmpty = false;
873
+ }
874
+ }
875
+ } else {
876
+ isEmpty = false;
877
+ }
878
+ }
879
+ return isEmpty;
880
+ }
881
+
882
+ // src/lib/direct-upload.ts
883
+ async function hashFileStream(filePath) {
884
+ return new Promise((resolve2, reject) => {
885
+ const hash2 = createHash("sha256");
886
+ const stream = fs3.createReadStream(filePath);
887
+ stream.on("data", (chunk) => hash2.update(chunk));
888
+ stream.on("end", () => resolve2(hash2.digest("hex")));
889
+ stream.on("error", reject);
890
+ });
891
+ }
892
+ async function getAllFiles(dirPath, baseDir = dirPath) {
893
+ const files = [];
894
+ const entries = await fs3.promises.readdir(dirPath, { withFileTypes: true });
895
+ for (const entry of entries) {
896
+ const fullPath = path3.join(dirPath, entry.name);
897
+ const relativePath = path3.relative(baseDir, fullPath);
898
+ if (relativePath.startsWith(".vm0")) {
899
+ continue;
900
+ }
901
+ if (entry.isDirectory()) {
902
+ const subFiles = await getAllFiles(fullPath, baseDir);
903
+ files.push(...subFiles);
904
+ } else {
905
+ files.push(fullPath);
906
+ }
907
+ }
908
+ return files;
909
+ }
910
+ async function collectFileMetadata(cwd, files, onProgress) {
911
+ const fileEntries = [];
912
+ for (let i = 0; i < files.length; i++) {
913
+ const file2 = files[i];
914
+ const relativePath = path3.relative(cwd, file2);
915
+ const [hash2, stats] = await Promise.all([
916
+ hashFileStream(file2),
917
+ fs3.promises.stat(file2)
918
+ ]);
919
+ fileEntries.push({
920
+ path: relativePath,
921
+ hash: hash2,
922
+ size: stats.size
923
+ });
924
+ if (onProgress && (i + 1) % 100 === 0) {
925
+ onProgress(`Hashing files... ${i + 1}/${files.length}`);
926
+ }
927
+ }
928
+ return fileEntries;
929
+ }
930
+ async function createArchive(cwd, files) {
931
+ const tmpDir = fs3.mkdtempSync(path3.join(os2.tmpdir(), "vm0-"));
932
+ const tarPath = path3.join(tmpDir, "archive.tar.gz");
933
+ try {
934
+ const relativePaths = files.map((file2) => path3.relative(cwd, file2));
935
+ if (relativePaths.length > 0) {
936
+ await tar2.create(
937
+ {
938
+ gzip: true,
939
+ file: tarPath,
940
+ cwd
941
+ },
942
+ relativePaths
943
+ );
944
+ } else {
945
+ await tar2.create(
946
+ {
947
+ gzip: true,
948
+ file: tarPath,
949
+ cwd,
950
+ filter: excludeVm0Filter
951
+ },
952
+ ["."]
953
+ );
954
+ }
955
+ const tarBuffer = await fs3.promises.readFile(tarPath);
956
+ return tarBuffer;
957
+ } finally {
958
+ if (fs3.existsSync(tarPath)) {
959
+ await fs3.promises.unlink(tarPath);
960
+ }
961
+ await fs3.promises.rmdir(tmpDir);
962
+ }
963
+ }
964
+ function createManifest(files) {
965
+ const manifest = {
966
+ version: 1,
967
+ files,
968
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
969
+ };
970
+ return Buffer.from(JSON.stringify(manifest, null, 2));
971
+ }
972
+ function sleep(ms) {
973
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
974
+ }
975
+ async function uploadToPresignedUrl(presignedUrl, data, contentType, maxRetries = 3) {
976
+ let lastError = null;
977
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
978
+ try {
979
+ const response = await fetch(presignedUrl, {
980
+ method: "PUT",
981
+ body: data,
982
+ headers: {
983
+ "Content-Type": contentType
984
+ }
985
+ });
986
+ if (response.ok) {
987
+ return;
988
+ }
989
+ if (response.status >= 400 && response.status < 500) {
990
+ const text2 = await response.text();
991
+ throw new Error(`S3 upload failed: ${response.status} - ${text2}`);
992
+ }
993
+ const text = await response.text();
994
+ lastError = new Error(`S3 upload failed: ${response.status} - ${text}`);
995
+ } catch (error43) {
996
+ lastError = error43 instanceof Error ? error43 : new Error("Unknown upload error");
997
+ if (lastError.message.includes("400") || lastError.message.includes("403")) {
998
+ throw lastError;
999
+ }
1000
+ }
1001
+ if (attempt < maxRetries) {
1002
+ const backoffMs = Math.pow(2, attempt - 1) * 1e3;
1003
+ await sleep(backoffMs);
1004
+ }
1005
+ }
1006
+ throw lastError || new Error("S3 upload failed after retries");
1007
+ }
1008
+ async function directUpload(storageName, storageType, cwd, options) {
1009
+ const { onProgress, force } = options || {};
1010
+ onProgress?.("Collecting files...");
1011
+ const files = await getAllFiles(cwd);
1012
+ onProgress?.("Computing file hashes...");
1013
+ const fileEntries = await collectFileMetadata(cwd, files, onProgress);
1014
+ const totalSize = fileEntries.reduce((sum, f) => sum + f.size, 0);
1015
+ onProgress?.("Preparing upload...");
1016
+ const prepareResponse = await apiClient.post("/api/storages/prepare", {
1017
+ body: JSON.stringify({
1018
+ storageName,
1019
+ storageType,
1020
+ files: fileEntries,
1021
+ force
1022
+ })
1023
+ });
1024
+ if (!prepareResponse.ok) {
1025
+ const error43 = await prepareResponse.json();
1026
+ throw new Error(error43.error?.message || "Prepare failed");
1027
+ }
1028
+ const prepareResult = await prepareResponse.json();
1029
+ if (prepareResult.existing) {
1030
+ return {
1031
+ versionId: prepareResult.versionId,
1032
+ size: totalSize,
1033
+ fileCount: fileEntries.length,
1034
+ deduplicated: true,
1035
+ empty: fileEntries.length === 0
1036
+ };
1037
+ }
1038
+ onProgress?.("Compressing files...");
1039
+ const archiveBuffer = await createArchive(cwd, files);
1040
+ onProgress?.("Uploading archive to S3...");
1041
+ if (!prepareResult.uploads) {
1042
+ throw new Error("No upload URLs received from prepare endpoint");
1043
+ }
1044
+ await uploadToPresignedUrl(
1045
+ prepareResult.uploads.archive.presignedUrl,
1046
+ archiveBuffer,
1047
+ "application/gzip"
1048
+ );
1049
+ onProgress?.("Uploading manifest...");
1050
+ const manifestBuffer = createManifest(fileEntries);
1051
+ await uploadToPresignedUrl(
1052
+ prepareResult.uploads.manifest.presignedUrl,
1053
+ manifestBuffer,
1054
+ "application/json"
1055
+ );
1056
+ onProgress?.("Committing...");
1057
+ const commitResponse = await apiClient.post("/api/storages/commit", {
1058
+ body: JSON.stringify({
1059
+ storageName,
1060
+ storageType,
1061
+ versionId: prepareResult.versionId,
1062
+ files: fileEntries
1063
+ })
1064
+ });
1065
+ if (!commitResponse.ok) {
1066
+ const error43 = await commitResponse.json();
1067
+ throw new Error(error43.error?.message || "Commit failed");
1068
+ }
1069
+ const commitResult = await commitResponse.json();
1070
+ return {
1071
+ versionId: commitResult.versionId,
1072
+ size: commitResult.size,
1073
+ fileCount: commitResult.fileCount,
1074
+ deduplicated: commitResult.deduplicated || false,
1075
+ empty: commitResult.fileCount === 0
1076
+ };
1077
+ }
1078
+
790
1079
  // src/lib/system-storage.ts
791
1080
  async function uploadSystemPrompt(agentName, promptFilePath, basePath) {
792
1081
  const storageName = getSystemPromptStorageName(agentName);
793
- const absolutePath = path2.isAbsolute(promptFilePath) ? promptFilePath : path2.join(basePath, promptFilePath);
794
- const content = await fs2.readFile(absolutePath, "utf8");
795
- const tmpDir = await fs2.mkdtemp(path2.join(os2.tmpdir(), "vm0-prompt-"));
796
- const promptDir = path2.join(tmpDir, "prompt");
797
- await fs2.mkdir(promptDir);
798
- await fs2.writeFile(path2.join(promptDir, "CLAUDE.md"), content);
1082
+ const absolutePath = path4.isAbsolute(promptFilePath) ? promptFilePath : path4.join(basePath, promptFilePath);
1083
+ const content = await fs4.readFile(absolutePath, "utf8");
1084
+ const tmpDir = await fs4.mkdtemp(path4.join(os3.tmpdir(), "vm0-prompt-"));
1085
+ const promptDir = path4.join(tmpDir, "prompt");
1086
+ await fs4.mkdir(promptDir);
1087
+ await fs4.writeFile(path4.join(promptDir, "CLAUDE.md"), content);
799
1088
  try {
800
- const tarPath = path2.join(tmpDir, "prompt.tar.gz");
801
- await tar.create(
802
- {
803
- gzip: true,
804
- file: tarPath,
805
- cwd: promptDir
806
- },
807
- ["."]
808
- );
809
- const tarBuffer = await fs2.readFile(tarPath);
810
- const formData = new FormData();
811
- formData.append("name", storageName);
812
- formData.append("type", "volume");
813
- formData.append(
814
- "file",
815
- new Blob([new Uint8Array(tarBuffer)], { type: "application/gzip" }),
816
- "volume.tar.gz"
817
- );
818
- const response = await apiClient.post("/api/storages", {
819
- body: formData
820
- });
821
- if (!response.ok) {
822
- const errorBody = await response.json();
823
- const errorMessage = typeof errorBody.error === "string" ? errorBody.error : errorBody.error?.message || "Upload failed";
824
- throw new Error(errorMessage);
825
- }
826
- const result = await response.json();
1089
+ const result = await directUpload(storageName, "volume", promptDir);
827
1090
  return {
828
1091
  name: storageName,
829
1092
  versionId: result.versionId,
830
1093
  action: result.deduplicated ? "deduplicated" : "created"
831
1094
  };
832
1095
  } finally {
833
- await fs2.rm(tmpDir, { recursive: true, force: true });
1096
+ await fs4.rm(tmpDir, { recursive: true, force: true });
834
1097
  }
835
1098
  }
836
1099
  async function uploadSystemSkill(skillUrl) {
837
1100
  const parsed = parseGitHubTreeUrl(skillUrl);
838
1101
  const storageName = getSkillStorageName(parsed);
839
- const tmpDir = await fs2.mkdtemp(path2.join(os2.tmpdir(), "vm0-skill-"));
1102
+ const tmpDir = await fs4.mkdtemp(path4.join(os3.tmpdir(), "vm0-skill-"));
840
1103
  try {
841
1104
  const skillDir = await downloadGitHubSkill(parsed, tmpDir);
842
1105
  await validateSkillDirectory(skillDir);
843
- const tarPath = path2.join(tmpDir, "skill.tar.gz");
844
- await tar.create(
845
- {
846
- gzip: true,
847
- file: tarPath,
848
- cwd: skillDir
849
- },
850
- ["."]
851
- );
852
- const tarBuffer = await fs2.readFile(tarPath);
853
- const formData = new FormData();
854
- formData.append("name", storageName);
855
- formData.append("type", "volume");
856
- formData.append(
857
- "file",
858
- new Blob([new Uint8Array(tarBuffer)], { type: "application/gzip" }),
859
- "volume.tar.gz"
860
- );
861
- const response = await apiClient.post("/api/storages", {
862
- body: formData
863
- });
864
- if (!response.ok) {
865
- const errorBody = await response.json();
866
- const errorMessage = typeof errorBody.error === "string" ? errorBody.error : errorBody.error?.message || "Upload failed";
867
- throw new Error(errorMessage);
868
- }
869
- const result = await response.json();
1106
+ const result = await directUpload(storageName, "volume", skillDir);
870
1107
  return {
871
1108
  name: storageName,
872
1109
  versionId: result.versionId,
873
1110
  action: result.deduplicated ? "deduplicated" : "created"
874
1111
  };
875
1112
  } finally {
876
- await fs2.rm(tmpDir, { recursive: true, force: true });
1113
+ await fs4.rm(tmpDir, { recursive: true, force: true });
877
1114
  }
878
1115
  }
879
1116
 
880
1117
  // src/commands/compose.ts
881
1118
  var composeCommand = new Command().name("compose").description("Create or update agent compose").argument("<config-file>", "Path to config YAML file").action(async (configFile) => {
882
1119
  try {
883
- if (!existsSync2(configFile)) {
1120
+ if (!existsSync3(configFile)) {
884
1121
  console.error(chalk2.red(`\u2717 Config file not found: ${configFile}`));
885
1122
  process.exit(1);
886
1123
  }
@@ -1003,8 +1240,8 @@ var composeCommand = new Command().name("compose").description("Create or update
1003
1240
  // src/commands/run.ts
1004
1241
  import { Command as Command2 } from "commander";
1005
1242
  import chalk4 from "chalk";
1006
- import * as fs3 from "fs";
1007
- import * as path3 from "path";
1243
+ import * as fs5 from "fs";
1244
+ import * as path5 from "path";
1008
1245
  import { config as dotenvConfig } from "dotenv";
1009
1246
 
1010
1247
  // src/lib/event-parser.ts
@@ -2088,15 +2325,15 @@ function mergeDefs(...defs) {
2088
2325
  function cloneDef(schema) {
2089
2326
  return mergeDefs(schema._zod.def);
2090
2327
  }
2091
- function getElementAtPath(obj, path13) {
2092
- if (!path13)
2328
+ function getElementAtPath(obj, path12) {
2329
+ if (!path12)
2093
2330
  return obj;
2094
- return path13.reduce((acc, key) => acc?.[key], obj);
2331
+ return path12.reduce((acc, key) => acc?.[key], obj);
2095
2332
  }
2096
2333
  function promiseAllObject(promisesObj) {
2097
2334
  const keys = Object.keys(promisesObj);
2098
- const promises6 = keys.map((key) => promisesObj[key]);
2099
- return Promise.all(promises6).then((results) => {
2335
+ const promises5 = keys.map((key) => promisesObj[key]);
2336
+ return Promise.all(promises5).then((results) => {
2100
2337
  const resolvedObj = {};
2101
2338
  for (let i = 0; i < keys.length; i++) {
2102
2339
  resolvedObj[keys[i]] = results[i];
@@ -2450,11 +2687,11 @@ function aborted(x, startIndex = 0) {
2450
2687
  }
2451
2688
  return false;
2452
2689
  }
2453
- function prefixIssues(path13, issues) {
2690
+ function prefixIssues(path12, issues) {
2454
2691
  return issues.map((iss) => {
2455
2692
  var _a;
2456
2693
  (_a = iss).path ?? (_a.path = []);
2457
- iss.path.unshift(path13);
2694
+ iss.path.unshift(path12);
2458
2695
  return iss;
2459
2696
  });
2460
2697
  }
@@ -2622,7 +2859,7 @@ function treeifyError(error43, _mapper) {
2622
2859
  return issue2.message;
2623
2860
  };
2624
2861
  const result = { errors: [] };
2625
- const processError = (error44, path13 = []) => {
2862
+ const processError = (error44, path12 = []) => {
2626
2863
  var _a, _b;
2627
2864
  for (const issue2 of error44.issues) {
2628
2865
  if (issue2.code === "invalid_union" && issue2.errors.length) {
@@ -2632,7 +2869,7 @@ function treeifyError(error43, _mapper) {
2632
2869
  } else if (issue2.code === "invalid_element") {
2633
2870
  processError({ issues: issue2.issues }, issue2.path);
2634
2871
  } else {
2635
- const fullpath = [...path13, ...issue2.path];
2872
+ const fullpath = [...path12, ...issue2.path];
2636
2873
  if (fullpath.length === 0) {
2637
2874
  result.errors.push(mapper(issue2));
2638
2875
  continue;
@@ -2664,8 +2901,8 @@ function treeifyError(error43, _mapper) {
2664
2901
  }
2665
2902
  function toDotPath(_path) {
2666
2903
  const segs = [];
2667
- const path13 = _path.map((seg) => typeof seg === "object" ? seg.key : seg);
2668
- for (const seg of path13) {
2904
+ const path12 = _path.map((seg) => typeof seg === "object" ? seg.key : seg);
2905
+ for (const seg of path12) {
2669
2906
  if (typeof seg === "number")
2670
2907
  segs.push(`[${seg}]`);
2671
2908
  else if (typeof seg === "symbol")
@@ -14536,9 +14773,9 @@ function loadValues(cliValues, configNames) {
14536
14773
  const result = { ...cliValues };
14537
14774
  const missingNames = configNames.filter((name) => !(name in result));
14538
14775
  if (missingNames.length > 0) {
14539
- const envFilePath = path3.resolve(process.cwd(), ".env");
14776
+ const envFilePath = path5.resolve(process.cwd(), ".env");
14540
14777
  let dotenvValues = {};
14541
- if (fs3.existsSync(envFilePath)) {
14778
+ if (fs5.existsSync(envFilePath)) {
14542
14779
  const dotenvResult = dotenvConfig({ path: envFilePath });
14543
14780
  if (dotenvResult.parsed) {
14544
14781
  dotenvValues = Object.fromEntries(
@@ -15021,13 +15258,13 @@ import { Command as Command6 } from "commander";
15021
15258
  // src/commands/volume/init.ts
15022
15259
  import { Command as Command3 } from "commander";
15023
15260
  import chalk5 from "chalk";
15024
- import path5 from "path";
15261
+ import path7 from "path";
15025
15262
 
15026
15263
  // src/lib/storage-utils.ts
15027
15264
  import { readFile as readFile4, writeFile as writeFile4, mkdir as mkdir4 } from "fs/promises";
15028
- import { existsSync as existsSync4 } from "fs";
15265
+ import { existsSync as existsSync5 } from "fs";
15029
15266
  import { parse as parseYaml2, stringify as stringifyYaml } from "yaml";
15030
- import path4 from "path";
15267
+ import path6 from "path";
15031
15268
  var CONFIG_DIR2 = ".vm0";
15032
15269
  var CONFIG_FILE2 = "storage.yaml";
15033
15270
  function isValidStorageName(name) {
@@ -15038,12 +15275,12 @@ function isValidStorageName(name) {
15038
15275
  return pattern.test(name) && !name.includes("--");
15039
15276
  }
15040
15277
  async function readStorageConfig(basePath = process.cwd()) {
15041
- const configPath = path4.join(basePath, CONFIG_DIR2, CONFIG_FILE2);
15042
- const legacyConfigPath = path4.join(basePath, CONFIG_DIR2, "volume.yaml");
15278
+ const configPath = path6.join(basePath, CONFIG_DIR2, CONFIG_FILE2);
15279
+ const legacyConfigPath = path6.join(basePath, CONFIG_DIR2, "volume.yaml");
15043
15280
  let actualPath = null;
15044
- if (existsSync4(configPath)) {
15281
+ if (existsSync5(configPath)) {
15045
15282
  actualPath = configPath;
15046
- } else if (existsSync4(legacyConfigPath)) {
15283
+ } else if (existsSync5(legacyConfigPath)) {
15047
15284
  actualPath = legacyConfigPath;
15048
15285
  }
15049
15286
  if (!actualPath) {
@@ -15057,9 +15294,9 @@ async function readStorageConfig(basePath = process.cwd()) {
15057
15294
  return config2;
15058
15295
  }
15059
15296
  async function writeStorageConfig(storageName, basePath = process.cwd(), type = "volume") {
15060
- const configDir = path4.join(basePath, CONFIG_DIR2);
15061
- const configPath = path4.join(configDir, CONFIG_FILE2);
15062
- if (!existsSync4(configDir)) {
15297
+ const configDir = path6.join(basePath, CONFIG_DIR2);
15298
+ const configPath = path6.join(configDir, CONFIG_FILE2);
15299
+ if (!existsSync5(configDir)) {
15063
15300
  await mkdir4(configDir, { recursive: true });
15064
15301
  }
15065
15302
  const config2 = {
@@ -15074,14 +15311,14 @@ async function writeStorageConfig(storageName, basePath = process.cwd(), type =
15074
15311
  var initCommand = new Command3().name("init").description("Initialize a volume in the current directory").action(async () => {
15075
15312
  try {
15076
15313
  const cwd = process.cwd();
15077
- const dirName = path5.basename(cwd);
15314
+ const dirName = path7.basename(cwd);
15078
15315
  const existingConfig = await readStorageConfig(cwd);
15079
15316
  if (existingConfig) {
15080
15317
  console.log(
15081
15318
  chalk5.yellow(`Volume already initialized: ${existingConfig.name}`)
15082
15319
  );
15083
15320
  console.log(
15084
- chalk5.gray(`Config file: ${path5.join(cwd, ".vm0", "storage.yaml")}`)
15321
+ chalk5.gray(`Config file: ${path7.join(cwd, ".vm0", "storage.yaml")}`)
15085
15322
  );
15086
15323
  return;
15087
15324
  }
@@ -15102,7 +15339,7 @@ var initCommand = new Command3().name("init").description("Initialize a volume i
15102
15339
  console.log(chalk5.green(`\u2713 Initialized volume: ${volumeName}`));
15103
15340
  console.log(
15104
15341
  chalk5.gray(
15105
- `\u2713 Config saved to ${path5.join(cwd, ".vm0", "storage.yaml")}`
15342
+ `\u2713 Config saved to ${path7.join(cwd, ".vm0", "storage.yaml")}`
15106
15343
  )
15107
15344
  );
15108
15345
  } catch (error43) {
@@ -15117,108 +15354,6 @@ var initCommand = new Command3().name("init").description("Initialize a volume i
15117
15354
  // src/commands/volume/push.ts
15118
15355
  import { Command as Command4 } from "commander";
15119
15356
  import chalk6 from "chalk";
15120
- import path7 from "path";
15121
- import * as fs5 from "fs";
15122
- import * as os3 from "os";
15123
- import * as tar3 from "tar";
15124
-
15125
- // src/lib/file-utils.ts
15126
- import * as fs4 from "fs";
15127
- import * as path6 from "path";
15128
- import * as tar2 from "tar";
15129
- function excludeVm0Filter(filePath) {
15130
- const shouldExclude = filePath === ".vm0" || filePath.startsWith(".vm0/") || filePath.startsWith("./.vm0");
15131
- return !shouldExclude;
15132
- }
15133
- function listTarFiles(tarPath) {
15134
- return new Promise((resolve2, reject) => {
15135
- const files = [];
15136
- tar2.list({
15137
- file: tarPath,
15138
- onReadEntry: (entry) => {
15139
- if (entry.type === "File") {
15140
- files.push(entry.path);
15141
- }
15142
- }
15143
- }).then(() => resolve2(files)).catch(reject);
15144
- });
15145
- }
15146
- async function listLocalFiles(dir, excludeDirs = [".vm0"]) {
15147
- const files = [];
15148
- async function walkDir(currentDir, relativePath = "") {
15149
- const entries = await fs4.promises.readdir(currentDir, {
15150
- withFileTypes: true
15151
- });
15152
- for (const entry of entries) {
15153
- const entryRelativePath = relativePath ? path6.join(relativePath, entry.name) : entry.name;
15154
- if (entry.isDirectory()) {
15155
- if (!excludeDirs.includes(entry.name)) {
15156
- await walkDir(path6.join(currentDir, entry.name), entryRelativePath);
15157
- }
15158
- } else {
15159
- files.push(entryRelativePath);
15160
- }
15161
- }
15162
- }
15163
- await walkDir(dir);
15164
- return files;
15165
- }
15166
- async function removeExtraFiles(dir, remoteFiles, excludeDirs = [".vm0"]) {
15167
- const localFiles = await listLocalFiles(dir, excludeDirs);
15168
- let removedCount = 0;
15169
- for (const localFile of localFiles) {
15170
- const normalizedPath = localFile.replace(/\\/g, "/");
15171
- if (!remoteFiles.has(normalizedPath)) {
15172
- const fullPath = path6.join(dir, localFile);
15173
- await fs4.promises.unlink(fullPath);
15174
- removedCount++;
15175
- }
15176
- }
15177
- await removeEmptyDirs(dir, excludeDirs);
15178
- return removedCount;
15179
- }
15180
- async function removeEmptyDirs(dir, excludeDirs = [".vm0"]) {
15181
- const entries = await fs4.promises.readdir(dir, { withFileTypes: true });
15182
- let isEmpty = true;
15183
- for (const entry of entries) {
15184
- const fullPath = path6.join(dir, entry.name);
15185
- if (entry.isDirectory()) {
15186
- if (excludeDirs.includes(entry.name)) {
15187
- isEmpty = false;
15188
- } else {
15189
- const subDirEmpty = await removeEmptyDirs(fullPath, excludeDirs);
15190
- if (subDirEmpty) {
15191
- await fs4.promises.rmdir(fullPath);
15192
- } else {
15193
- isEmpty = false;
15194
- }
15195
- }
15196
- } else {
15197
- isEmpty = false;
15198
- }
15199
- }
15200
- return isEmpty;
15201
- }
15202
-
15203
- // src/commands/volume/push.ts
15204
- async function getAllFiles(dirPath, baseDir = dirPath) {
15205
- const files = [];
15206
- const entries = await fs5.promises.readdir(dirPath, { withFileTypes: true });
15207
- for (const entry of entries) {
15208
- const fullPath = path7.join(dirPath, entry.name);
15209
- const relativePath = path7.relative(baseDir, fullPath);
15210
- if (relativePath.startsWith(".vm0")) {
15211
- continue;
15212
- }
15213
- if (entry.isDirectory()) {
15214
- const subFiles = await getAllFiles(fullPath, baseDir);
15215
- files.push(...subFiles);
15216
- } else {
15217
- files.push(fullPath);
15218
- }
15219
- }
15220
- return files;
15221
- }
15222
15357
  function formatBytes(bytes) {
15223
15358
  if (bytes === 0) return "0 B";
15224
15359
  const k = 1024;
@@ -15239,73 +15374,16 @@ var pushCommand = new Command4().name("push").description("Push local files to c
15239
15374
  process.exit(1);
15240
15375
  }
15241
15376
  console.log(chalk6.cyan(`Pushing volume: ${config2.name}`));
15242
- console.log(chalk6.gray("Collecting files..."));
15243
- const files = await getAllFiles(cwd);
15244
- let totalSize = 0;
15245
- for (const file2 of files) {
15246
- const stats = await fs5.promises.stat(file2);
15247
- totalSize += stats.size;
15248
- }
15249
- if (files.length === 0) {
15250
- console.log(chalk6.gray("No files found (empty volume)"));
15251
- } else {
15252
- console.log(
15253
- chalk6.gray(`Found ${files.length} files (${formatBytes(totalSize)})`)
15254
- );
15255
- }
15256
- console.log(chalk6.gray("Compressing files..."));
15257
- const tmpDir = fs5.mkdtempSync(path7.join(os3.tmpdir(), "vm0-"));
15258
- const tarPath = path7.join(tmpDir, "volume.tar.gz");
15259
- const relativePaths = files.map((file2) => path7.relative(cwd, file2));
15260
- if (relativePaths.length > 0) {
15261
- await tar3.create(
15262
- {
15263
- gzip: true,
15264
- file: tarPath,
15265
- cwd
15266
- },
15267
- relativePaths
15268
- );
15269
- } else {
15270
- await tar3.create(
15271
- {
15272
- gzip: true,
15273
- file: tarPath,
15274
- cwd,
15275
- filter: excludeVm0Filter
15276
- },
15277
- ["."]
15278
- );
15279
- }
15280
- const tarBuffer = await fs5.promises.readFile(tarPath);
15281
- await fs5.promises.unlink(tarPath);
15282
- await fs5.promises.rmdir(tmpDir);
15283
- console.log(
15284
- chalk6.green(`\u2713 Compressed to ${formatBytes(tarBuffer.length)}`)
15285
- );
15286
- console.log(chalk6.gray("Uploading..."));
15287
- const formData = new FormData();
15288
- formData.append("name", config2.name);
15289
- formData.append("type", "volume");
15290
- if (options.force) {
15291
- formData.append("force", "true");
15292
- }
15293
- formData.append(
15294
- "file",
15295
- new Blob([tarBuffer], { type: "application/gzip" }),
15296
- "volume.tar.gz"
15297
- );
15298
- const response = await apiClient.post("/api/storages", {
15299
- body: formData
15377
+ const result = await directUpload(config2.name, "volume", cwd, {
15378
+ onProgress: (message) => {
15379
+ console.log(chalk6.gray(message));
15380
+ },
15381
+ force: options.force
15300
15382
  });
15301
- if (!response.ok) {
15302
- const error43 = await response.json();
15303
- const message = error43.cause ? `${error43.error} (cause: ${error43.cause})` : error43.error || "Upload failed";
15304
- throw new Error(message);
15305
- }
15306
- const result = await response.json();
15307
15383
  const shortVersion = result.versionId.slice(0, 8);
15308
- if (result.deduplicated) {
15384
+ if (result.empty) {
15385
+ console.log(chalk6.yellow("No files found (empty volume)"));
15386
+ } else if (result.deduplicated) {
15309
15387
  console.log(chalk6.green("\u2713 Content unchanged (deduplicated)"));
15310
15388
  } else {
15311
15389
  console.log(chalk6.green("\u2713 Upload complete"));
@@ -15328,7 +15406,7 @@ import chalk8 from "chalk";
15328
15406
  import path8 from "path";
15329
15407
  import * as fs6 from "fs";
15330
15408
  import * as os4 from "os";
15331
- import * as tar4 from "tar";
15409
+ import * as tar3 from "tar";
15332
15410
 
15333
15411
  // src/lib/pull-utils.ts
15334
15412
  import chalk7 from "chalk";
@@ -15366,8 +15444,8 @@ var pullCommand = new Command5().name("pull").description("Pull cloud files to l
15366
15444
  } else {
15367
15445
  console.log(chalk8.cyan(`Pulling volume: ${config2.name}`));
15368
15446
  }
15369
- console.log(chalk8.gray("Downloading..."));
15370
- let url2 = `/api/storages?name=${encodeURIComponent(config2.name)}&type=volume`;
15447
+ console.log(chalk8.gray("Getting download URL..."));
15448
+ let url2 = `/api/storages/download?name=${encodeURIComponent(config2.name)}&type=volume`;
15371
15449
  if (versionId) {
15372
15450
  url2 += `&version=${encodeURIComponent(versionId)}`;
15373
15451
  }
@@ -15389,11 +15467,20 @@ var pullCommand = new Command5().name("pull").description("Pull cloud files to l
15389
15467
  }
15390
15468
  process.exit(1);
15391
15469
  }
15392
- if (response.status === 204) {
15470
+ const downloadInfo = await response.json();
15471
+ if (downloadInfo.empty) {
15393
15472
  await handleEmptyStorageResponse(cwd);
15394
15473
  return;
15395
15474
  }
15396
- const arrayBuffer = await response.arrayBuffer();
15475
+ if (!downloadInfo.url) {
15476
+ throw new Error("No download URL returned");
15477
+ }
15478
+ console.log(chalk8.gray("Downloading from S3..."));
15479
+ const s3Response = await fetch(downloadInfo.url);
15480
+ if (!s3Response.ok) {
15481
+ throw new Error(`S3 download failed: ${s3Response.status}`);
15482
+ }
15483
+ const arrayBuffer = await s3Response.arrayBuffer();
15397
15484
  const tarBuffer = Buffer.from(arrayBuffer);
15398
15485
  console.log(chalk8.green(`\u2713 Downloaded ${formatBytes2(tarBuffer.length)}`));
15399
15486
  const tmpDir = fs6.mkdtempSync(path8.join(os4.tmpdir(), "vm0-"));
@@ -15411,7 +15498,7 @@ var pullCommand = new Command5().name("pull").description("Pull cloud files to l
15411
15498
  );
15412
15499
  }
15413
15500
  console.log(chalk8.gray("Extracting files..."));
15414
- await tar4.extract({
15501
+ await tar3.extract({
15415
15502
  file: tarPath,
15416
15503
  cwd,
15417
15504
  gzip: true
@@ -15499,28 +15586,6 @@ var initCommand2 = new Command7().name("init").description("Initialize an artifa
15499
15586
  // src/commands/artifact/push.ts
15500
15587
  import { Command as Command8 } from "commander";
15501
15588
  import chalk10 from "chalk";
15502
- import path10 from "path";
15503
- import * as fs7 from "fs";
15504
- import * as os5 from "os";
15505
- import * as tar5 from "tar";
15506
- async function getAllFiles2(dirPath, baseDir = dirPath) {
15507
- const files = [];
15508
- const entries = await fs7.promises.readdir(dirPath, { withFileTypes: true });
15509
- for (const entry of entries) {
15510
- const fullPath = path10.join(dirPath, entry.name);
15511
- const relativePath = path10.relative(baseDir, fullPath);
15512
- if (relativePath.startsWith(".vm0")) {
15513
- continue;
15514
- }
15515
- if (entry.isDirectory()) {
15516
- const subFiles = await getAllFiles2(fullPath, baseDir);
15517
- files.push(...subFiles);
15518
- } else {
15519
- files.push(fullPath);
15520
- }
15521
- }
15522
- return files;
15523
- }
15524
15589
  function formatBytes3(bytes) {
15525
15590
  if (bytes === 0) return "0 B";
15526
15591
  const k = 1024;
@@ -15550,72 +15615,16 @@ var pushCommand2 = new Command8().name("push").description("Push local files to
15550
15615
  process.exit(1);
15551
15616
  }
15552
15617
  console.log(chalk10.cyan(`Pushing artifact: ${config2.name}`));
15553
- console.log(chalk10.gray("Collecting files..."));
15554
- const files = await getAllFiles2(cwd);
15555
- let totalSize = 0;
15556
- for (const file2 of files) {
15557
- const stats = await fs7.promises.stat(file2);
15558
- totalSize += stats.size;
15559
- }
15560
- if (files.length === 0) {
15561
- console.log(chalk10.gray("No files found (empty artifact)"));
15562
- } else {
15563
- console.log(
15564
- chalk10.gray(`Found ${files.length} files (${formatBytes3(totalSize)})`)
15565
- );
15566
- }
15567
- console.log(chalk10.gray("Compressing files..."));
15568
- const tmpDir = fs7.mkdtempSync(path10.join(os5.tmpdir(), "vm0-"));
15569
- const tarPath = path10.join(tmpDir, "artifact.tar.gz");
15570
- const relativePaths = files.map((file2) => path10.relative(cwd, file2));
15571
- if (relativePaths.length > 0) {
15572
- await tar5.create(
15573
- {
15574
- gzip: true,
15575
- file: tarPath,
15576
- cwd
15577
- },
15578
- relativePaths
15579
- );
15580
- } else {
15581
- await tar5.create(
15582
- {
15583
- gzip: true,
15584
- file: tarPath,
15585
- cwd,
15586
- filter: excludeVm0Filter
15587
- },
15588
- ["."]
15589
- );
15590
- }
15591
- const tarBuffer = await fs7.promises.readFile(tarPath);
15592
- await fs7.promises.unlink(tarPath);
15593
- await fs7.promises.rmdir(tmpDir);
15594
- console.log(
15595
- chalk10.green(`\u2713 Compressed to ${formatBytes3(tarBuffer.length)}`)
15596
- );
15597
- console.log(chalk10.gray("Uploading..."));
15598
- const formData = new FormData();
15599
- formData.append("name", config2.name);
15600
- formData.append("type", "artifact");
15601
- if (options.force) {
15602
- formData.append("force", "true");
15603
- }
15604
- formData.append(
15605
- "file",
15606
- new Blob([tarBuffer], { type: "application/gzip" }),
15607
- "artifact.tar.gz"
15608
- );
15609
- const response = await apiClient.post("/api/storages", {
15610
- body: formData
15618
+ const result = await directUpload(config2.name, "artifact", cwd, {
15619
+ onProgress: (message) => {
15620
+ console.log(chalk10.gray(message));
15621
+ },
15622
+ force: options.force
15611
15623
  });
15612
- if (!response.ok) {
15613
- const error43 = await response.json();
15614
- throw new Error(error43.error || "Upload failed");
15615
- }
15616
- const result = await response.json();
15617
15624
  const shortVersion = result.versionId.slice(0, 8);
15618
- if (result.deduplicated) {
15625
+ if (result.empty) {
15626
+ console.log(chalk10.yellow("No files found (empty artifact)"));
15627
+ } else if (result.deduplicated) {
15619
15628
  console.log(chalk10.green("\u2713 Content unchanged (deduplicated)"));
15620
15629
  } else {
15621
15630
  console.log(chalk10.green("\u2713 Upload complete"));
@@ -15635,10 +15644,10 @@ var pushCommand2 = new Command8().name("push").description("Push local files to
15635
15644
  // src/commands/artifact/pull.ts
15636
15645
  import { Command as Command9 } from "commander";
15637
15646
  import chalk11 from "chalk";
15638
- import path11 from "path";
15639
- import * as fs8 from "fs";
15640
- import * as os6 from "os";
15641
- import * as tar6 from "tar";
15647
+ import path10 from "path";
15648
+ import * as fs7 from "fs";
15649
+ import * as os5 from "os";
15650
+ import * as tar4 from "tar";
15642
15651
  function formatBytes4(bytes) {
15643
15652
  if (bytes === 0) return "0 B";
15644
15653
  const k = 1024;
@@ -15673,8 +15682,8 @@ var pullCommand2 = new Command9().name("pull").description("Pull cloud artifact
15673
15682
  } else {
15674
15683
  console.log(chalk11.cyan(`Pulling artifact: ${config2.name}`));
15675
15684
  }
15676
- console.log(chalk11.gray("Downloading..."));
15677
- let url2 = `/api/storages?name=${encodeURIComponent(config2.name)}&type=artifact`;
15685
+ console.log(chalk11.gray("Getting download URL..."));
15686
+ let url2 = `/api/storages/download?name=${encodeURIComponent(config2.name)}&type=artifact`;
15678
15687
  if (versionId) {
15679
15688
  url2 += `&version=${encodeURIComponent(versionId)}`;
15680
15689
  }
@@ -15696,16 +15705,25 @@ var pullCommand2 = new Command9().name("pull").description("Pull cloud artifact
15696
15705
  }
15697
15706
  process.exit(1);
15698
15707
  }
15699
- if (response.status === 204) {
15708
+ const downloadInfo = await response.json();
15709
+ if (downloadInfo.empty) {
15700
15710
  await handleEmptyStorageResponse(cwd);
15701
15711
  return;
15702
15712
  }
15703
- const arrayBuffer = await response.arrayBuffer();
15713
+ if (!downloadInfo.url) {
15714
+ throw new Error("No download URL returned");
15715
+ }
15716
+ console.log(chalk11.gray("Downloading from S3..."));
15717
+ const s3Response = await fetch(downloadInfo.url);
15718
+ if (!s3Response.ok) {
15719
+ throw new Error(`S3 download failed: ${s3Response.status}`);
15720
+ }
15721
+ const arrayBuffer = await s3Response.arrayBuffer();
15704
15722
  const tarBuffer = Buffer.from(arrayBuffer);
15705
15723
  console.log(chalk11.green(`\u2713 Downloaded ${formatBytes4(tarBuffer.length)}`));
15706
- const tmpDir = fs8.mkdtempSync(path11.join(os6.tmpdir(), "vm0-"));
15707
- const tarPath = path11.join(tmpDir, "artifact.tar.gz");
15708
- await fs8.promises.writeFile(tarPath, tarBuffer);
15724
+ const tmpDir = fs7.mkdtempSync(path10.join(os5.tmpdir(), "vm0-"));
15725
+ const tarPath = path10.join(tmpDir, "artifact.tar.gz");
15726
+ await fs7.promises.writeFile(tarPath, tarBuffer);
15709
15727
  console.log(chalk11.gray("Syncing local files..."));
15710
15728
  const remoteFiles = await listTarFiles(tarPath);
15711
15729
  const remoteFilesSet = new Set(
@@ -15718,13 +15736,13 @@ var pullCommand2 = new Command9().name("pull").description("Pull cloud artifact
15718
15736
  );
15719
15737
  }
15720
15738
  console.log(chalk11.gray("Extracting files..."));
15721
- await tar6.extract({
15739
+ await tar4.extract({
15722
15740
  file: tarPath,
15723
15741
  cwd,
15724
15742
  gzip: true
15725
15743
  });
15726
- await fs8.promises.unlink(tarPath);
15727
- await fs8.promises.rmdir(tmpDir);
15744
+ await fs7.promises.unlink(tarPath);
15745
+ await fs7.promises.rmdir(tmpDir);
15728
15746
  console.log(chalk11.green(`\u2713 Extracted ${remoteFiles.length} files`));
15729
15747
  } catch (error43) {
15730
15748
  console.error(chalk11.red("\u2717 Pull failed"));
@@ -15742,8 +15760,8 @@ var artifactCommand = new Command10().name("artifact").description("Manage cloud
15742
15760
  import { Command as Command11 } from "commander";
15743
15761
  import chalk13 from "chalk";
15744
15762
  import { readFile as readFile5, mkdir as mkdir5, writeFile as writeFile5, appendFile } from "fs/promises";
15745
- import { existsSync as existsSync5, readFileSync } from "fs";
15746
- import path12 from "path";
15763
+ import { existsSync as existsSync6, readFileSync } from "fs";
15764
+ import path11 from "path";
15747
15765
  import { spawn as spawn2 } from "child_process";
15748
15766
  import { parse as parseYaml3 } from "yaml";
15749
15767
  import { config as dotenvConfig2 } from "dotenv";
@@ -15755,6 +15773,13 @@ import chalk12 from "chalk";
15755
15773
  var PACKAGE_NAME = "@vm0/cli";
15756
15774
  var NPM_REGISTRY_URL = `https://registry.npmjs.org/${encodeURIComponent(PACKAGE_NAME)}/latest`;
15757
15775
  var TIMEOUT_MS = 5e3;
15776
+ function detectPackageManager() {
15777
+ const execPath = process.argv[1] ?? "";
15778
+ if (execPath.includes("pnpm")) {
15779
+ return "pnpm";
15780
+ }
15781
+ return "npm";
15782
+ }
15758
15783
  function escapeForShell(str) {
15759
15784
  return `"${str.replace(/"/g, '\\"')}"`;
15760
15785
  }
@@ -15789,12 +15814,14 @@ function getLatestVersion() {
15789
15814
  });
15790
15815
  });
15791
15816
  }
15792
- function performUpgrade() {
15817
+ function performUpgrade(packageManager) {
15793
15818
  return new Promise((resolve2) => {
15794
- const npm = process.platform === "win32" ? "npm.cmd" : "npm";
15795
- const child = spawn(npm, ["install", "-g", `${PACKAGE_NAME}@latest`], {
15819
+ const isWindows = process.platform === "win32";
15820
+ const command = isWindows ? `${packageManager}.cmd` : packageManager;
15821
+ const args = packageManager === "pnpm" ? ["add", "-g", `${PACKAGE_NAME}@latest`] : ["install", "-g", `${PACKAGE_NAME}@latest`];
15822
+ const child = spawn(command, args, {
15796
15823
  stdio: "inherit",
15797
- shell: process.platform === "win32"
15824
+ shell: isWindows
15798
15825
  });
15799
15826
  child.on("close", (code) => {
15800
15827
  resolve2(code === 0);
@@ -15826,8 +15853,9 @@ async function checkAndUpgrade(currentVersion, prompt) {
15826
15853
  )
15827
15854
  );
15828
15855
  console.log();
15829
- console.log("Upgrading...");
15830
- const success2 = await performUpgrade();
15856
+ const packageManager = detectPackageManager();
15857
+ console.log(`Upgrading via ${packageManager}...`);
15858
+ const success2 = await performUpgrade(packageManager);
15831
15859
  if (success2) {
15832
15860
  console.log(chalk12.green(`Upgraded to ${latestVersion}`));
15833
15861
  console.log();
@@ -15838,6 +15866,8 @@ async function checkAndUpgrade(currentVersion, prompt) {
15838
15866
  console.log();
15839
15867
  console.log(chalk12.red("Upgrade failed. Please run manually:"));
15840
15868
  console.log(chalk12.cyan(` npm install -g ${PACKAGE_NAME}@latest`));
15869
+ console.log(chalk12.gray(" # or"));
15870
+ console.log(chalk12.cyan(` pnpm add -g ${PACKAGE_NAME}@latest`));
15841
15871
  console.log();
15842
15872
  console.log("Then re-run:");
15843
15873
  console.log(chalk12.cyan(` ${buildRerunCommand(prompt)}`));
@@ -15931,7 +15961,7 @@ function extractRequiredVarNames(config2) {
15931
15961
  }
15932
15962
  function checkMissingVariables(varNames, envFilePath) {
15933
15963
  let dotenvValues = {};
15934
- if (existsSync5(envFilePath)) {
15964
+ if (existsSync6(envFilePath)) {
15935
15965
  const result = dotenvConfig2({ path: envFilePath });
15936
15966
  if (result.parsed) {
15937
15967
  dotenvValues = result.parsed;
@@ -15949,7 +15979,7 @@ function checkMissingVariables(varNames, envFilePath) {
15949
15979
  }
15950
15980
  async function generateEnvPlaceholders(missingVars, envFilePath) {
15951
15981
  const placeholders = missingVars.map((name) => `${name}=`).join("\n");
15952
- if (existsSync5(envFilePath)) {
15982
+ if (existsSync6(envFilePath)) {
15953
15983
  const existingContent = readFileSync(envFilePath, "utf8");
15954
15984
  const needsNewline = existingContent.length > 0 && !existingContent.endsWith("\n");
15955
15985
  const prefix = needsNewline ? "\n" : "";
@@ -15961,13 +15991,13 @@ async function generateEnvPlaceholders(missingVars, envFilePath) {
15961
15991
  }
15962
15992
  }
15963
15993
  var cookCommand = new Command11().name("cook").description("One-click agent preparation and execution from vm0.yaml").argument("[prompt]", "Prompt for the agent").action(async (prompt) => {
15964
- const shouldExit = await checkAndUpgrade("4.12.0", prompt);
15994
+ const shouldExit = await checkAndUpgrade("4.13.0", prompt);
15965
15995
  if (shouldExit) {
15966
15996
  process.exit(0);
15967
15997
  }
15968
15998
  const cwd = process.cwd();
15969
15999
  console.log(chalk13.blue(`Reading config: ${CONFIG_FILE3}`));
15970
- if (!existsSync5(CONFIG_FILE3)) {
16000
+ if (!existsSync6(CONFIG_FILE3)) {
15971
16001
  console.error(chalk13.red(`\u2717 Config file not found: ${CONFIG_FILE3}`));
15972
16002
  process.exit(1);
15973
16003
  }
@@ -15995,7 +16025,7 @@ var cookCommand = new Command11().name("cook").description("One-click agent prep
15995
16025
  );
15996
16026
  const requiredVarNames = extractRequiredVarNames(config2);
15997
16027
  if (requiredVarNames.length > 0) {
15998
- const envFilePath = path12.join(cwd, ".env");
16028
+ const envFilePath = path11.join(cwd, ".env");
15999
16029
  const missingVars = checkMissingVariables(requiredVarNames, envFilePath);
16000
16030
  if (missingVars.length > 0) {
16001
16031
  await generateEnvPlaceholders(missingVars, envFilePath);
@@ -16015,9 +16045,9 @@ var cookCommand = new Command11().name("cook").description("One-click agent prep
16015
16045
  console.log();
16016
16046
  console.log(chalk13.blue("Processing volumes..."));
16017
16047
  for (const volumeConfig of Object.values(config2.volumes)) {
16018
- const volumeDir = path12.join(cwd, volumeConfig.name);
16048
+ const volumeDir = path11.join(cwd, volumeConfig.name);
16019
16049
  console.log(chalk13.gray(` ${volumeConfig.name}/`));
16020
- if (!existsSync5(volumeDir)) {
16050
+ if (!existsSync6(volumeDir)) {
16021
16051
  console.error(
16022
16052
  chalk13.red(
16023
16053
  ` \u2717 Directory not found. Create the directory and add files first.`
@@ -16050,10 +16080,10 @@ var cookCommand = new Command11().name("cook").description("One-click agent prep
16050
16080
  }
16051
16081
  console.log();
16052
16082
  console.log(chalk13.blue("Processing artifact..."));
16053
- const artifactDir = path12.join(cwd, ARTIFACT_DIR);
16083
+ const artifactDir = path11.join(cwd, ARTIFACT_DIR);
16054
16084
  console.log(chalk13.gray(` ${ARTIFACT_DIR}/`));
16055
16085
  try {
16056
- if (!existsSync5(artifactDir)) {
16086
+ if (!existsSync6(artifactDir)) {
16057
16087
  await mkdir5(artifactDir, { recursive: true });
16058
16088
  console.log(chalk13.green(` \u2713 Created directory`));
16059
16089
  }
@@ -16147,12 +16177,12 @@ import { Command as Command15 } from "commander";
16147
16177
  import { Command as Command12 } from "commander";
16148
16178
  import chalk14 from "chalk";
16149
16179
  import { readFile as readFile6 } from "fs/promises";
16150
- import { existsSync as existsSync6 } from "fs";
16151
- var sleep = (ms) => new Promise((resolve2) => setTimeout(resolve2, ms));
16180
+ import { existsSync as existsSync7 } from "fs";
16181
+ var sleep2 = (ms) => new Promise((resolve2) => setTimeout(resolve2, ms));
16152
16182
  var buildCommand = new Command12().name("build").description("Build a custom image from a Dockerfile").requiredOption("-f, --file <path>", "Path to Dockerfile").requiredOption("-n, --name <name>", "Name for the image").option("--delete-existing", "Delete existing image before building").action(
16153
16183
  async (options) => {
16154
16184
  const { file: file2, name, deleteExisting } = options;
16155
- if (!existsSync6(file2)) {
16185
+ if (!existsSync7(file2)) {
16156
16186
  console.error(chalk14.red(`\u2717 Dockerfile not found: ${file2}`));
16157
16187
  process.exit(1);
16158
16188
  }
@@ -16205,7 +16235,7 @@ var buildCommand = new Command12().name("build").description("Build a custom ima
16205
16235
  logsOffset = statusData.logsOffset;
16206
16236
  status = statusData.status;
16207
16237
  if (status === "building") {
16208
- await sleep(2e3);
16238
+ await sleep2(2e3);
16209
16239
  }
16210
16240
  }
16211
16241
  console.log();
@@ -16598,7 +16628,7 @@ function handleError(error43, runId) {
16598
16628
 
16599
16629
  // src/index.ts
16600
16630
  var program = new Command17();
16601
- program.name("vm0").description("VM0 CLI - A modern build tool").version("4.12.0");
16631
+ program.name("vm0").description("VM0 CLI - A modern build tool").version("4.13.0");
16602
16632
  program.command("info").description("Display environment information").action(async () => {
16603
16633
  console.log(chalk18.cyan("System Information:"));
16604
16634
  console.log(`Node Version: ${process.version}`);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vm0/cli",
3
- "version": "4.12.0",
3
+ "version": "4.13.0",
4
4
  "description": "CLI application",
5
5
  "repository": {
6
6
  "type": "git",