@lightprotocol/zk-compression-cli 0.27.1-alpha.1 → 0.27.1-alpha.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/accounts/address_merkle_tree_amt1Ayt45jfbdw5YSo7iz6WZxUmnZsQTYXy82hVwyC2.json +1 -1
  2. package/accounts/address_merkle_tree_queue_aq1S9z4reTSQAdgWHGD2zDaS39sjGrAxbR31vxJ2F4F.json +1 -1
  3. package/accounts/batch_address_merkle_tree_EzKE84aVTkCUhDHLELqyJaq1Y7UVVmqxXqZjVHwHY3rK.json +14 -1
  4. package/accounts/{batch_state_merkle_tree_2_2Yb3fGo2E9aWLjY8KuESaqurYpGGhEeJr7eynKrSgXwS.json → batch_address_merkle_tree_amt2kaJA14v3urZbZvnc5v2np8jqvc4Z8zDep5wbtzx.json} +1 -1
  5. package/accounts/{batch_state_merkle_tree_HLKs5NJ8FXkJg8BrzJt56adFYYuwg5etzDtBbQYTsixu.json → batch_state_merkle_tree_bmt1LryLZUMmF7ZtqESaw7wifBXLfXHQYoE4GAmrahU.json} +1 -1
  6. package/accounts/batch_state_merkle_tree_bmt2UxoBxB9xWev4BkLvkGdapsz6sZGkzViPNph7VFi.json +1 -0
  7. package/accounts/batch_state_merkle_tree_bmt3ccLd4bqSVZVeCJnH1F6C8jNygAhaDfxDwePyyGb.json +1 -0
  8. package/accounts/batch_state_merkle_tree_bmt4d3p1a4YQgk9PeZv5s4DBUmbF5NxqYpk9HGjQsd8.json +1 -0
  9. package/accounts/batch_state_merkle_tree_bmt5yU97jC88YXTuSukYHa8Z5Bi2ZDUtmzfkDTA2mG2.json +1 -0
  10. package/accounts/{batched_output_queue_2_12wJT3xYd46rtjeqDU6CrtT8unqLjPiheggzqhN9YsyB.json → batched_output_queue_oq1na8gojfdUhsfCpyjNt6h4JaDWtHf1yQj4koBWfto.json} +1 -1
  11. package/accounts/{batched_output_queue_6L7SzhYB3anwEQ9cphpJ1U7Scwj57bx2xueReg7R9cKU.json → batched_output_queue_oq2UkeMsJLfXt2QHzim242SUi3nvjJs8Pn7Eac9H9vg.json} +1 -1
  12. package/accounts/batched_output_queue_oq3AxjekBWgo64gpauB6QtuZNesuv19xrhaC1ZM1THQ.json +1 -0
  13. package/accounts/batched_output_queue_oq4ypwvVGzCUMoiKKHWh4S1SgZJ9vCvKpcz6RT6A8dq.json +1 -0
  14. package/accounts/batched_output_queue_oq5oh5ZR3yGomuQgFduNDzjtGvVWfDRGLuDVjv9a96P.json +1 -0
  15. package/accounts/compressible_config_pda_ACXg8a7VaqecBWrSbdu73W4Pg9gsqXJ3EXAqkHyhvVXg.json +1 -0
  16. package/accounts/config_counter_pda_8gH9tmziWsS8Wc4fnoN5ax3jsSumNYoRDuSBvmH2GMH8.json +1 -0
  17. package/accounts/{test_batched_cpi_context_7Hp52chxaew8bW1ApR4fck2bh6Y8qA1pu3qwH6N9zaLj.json → cpi_context_cpi15BoVPKgEPw5o8wc2T816GE7b378nMXnhH3Xbq4y.json} +1 -1
  18. package/accounts/cpi_context_cpi1uHzrEhBG733DoEJNgHCyRS3XmmyVNZx5fonubE4.json +1 -1
  19. package/accounts/cpi_context_cpi2cdhkH5roePvcudTgUL8ppEBfTay1desGh8G8QxK.json +1 -1
  20. package/accounts/{cpi_context_batched_2_HwtjxDvFEXiWnzeMeWkMBzpQN45A95rTJNZmz1Z3pe8R.json → cpi_context_cpi2yGapXUR3As5SjnHBAVvmApNiLsbeZpF3euWnW6B.json} +1 -14
  21. package/accounts/cpi_context_cpi3mbwMpSX8FAGMZVP85AwxqCaQMfEk9Em1v8QK9Rf.json +1 -0
  22. package/accounts/cpi_context_cpi4yyPDc4bCgHAnsenunGA8Y77j3XEDyjgfyCKgcoc.json +1 -0
  23. package/accounts/cpi_context_cpi5ZTjdgYpZ1Xr7B1cMLLUE81oTtJbNNAyKary2nV6.json +1 -0
  24. package/accounts/epoch_pda_34w7KcLBXabMkHuXE2fY368vFe6kP3v5EJn8nPvQ8SKn.json +1 -1
  25. package/accounts/forester_epoch_pda_3FBt1BPQHCQkS8k3wrUXMfB6JBhtMhEqQXueHRw2ojZV.json +1 -1
  26. package/accounts/governance_authority_pda_CuEtcKkkbTn6qy2qxqDswq5U2ADsqoipYDAYfRvxPjcp.json +1 -1
  27. package/accounts/group_pda_24rt4RgeyjUCWGS2eF7L7gyNMuz6JWdqYpAvb1KRoHxs.json +1 -1
  28. package/accounts/merkle_tree_pubkey_smt1NamzXdq4AMqS2fS2F1i5KTYPZRhoHgWx38d8WsT.json +1 -1
  29. package/accounts/merkle_tree_pubkey_smt2rJAFdyJJupwMKAqTNAJwvjhmiZ4JYGZmbVRw1Ho.json +1 -1
  30. package/accounts/nullifier_queue_pubkey_nfq1NvQDJ2GEgnS8zt9prAe8rjjpAW1zFkrvZoBR148.json +1 -1
  31. package/accounts/nullifier_queue_pubkey_nfq2hgS7NYemXsFaFUCe3EMXSDSfnZnAe27jC6aPP1X.json +1 -1
  32. package/accounts/registered_forester_pda_2KNqEh23Se8AHecuzR1UkxL26euq2qXSpQPTH1jH7VqU.json +1 -1
  33. package/accounts/rent_sponsor_pda_r18WwUxfG8kQ69bQPAB2jV6zGNKy3GosFGctjQoV4ti.json +1 -0
  34. package/bin/account_compression.so +0 -0
  35. package/bin/light_compressed_token.so +0 -0
  36. package/bin/light_registry.so +0 -0
  37. package/bin/light_system_program_pinocchio.so +0 -0
  38. package/dist/commands/approve-and-mint-to/index.js +26 -29
  39. package/dist/commands/balance/index.js +12 -11
  40. package/dist/commands/compress-sol/index.js +13 -13
  41. package/dist/commands/compress-spl/index.js +19 -19
  42. package/dist/commands/config/config.js +67 -33
  43. package/dist/commands/create-mint/index.js +19 -20
  44. package/dist/commands/create-token-pool/index.js +9 -9
  45. package/dist/commands/decompress-sol/index.js +13 -13
  46. package/dist/commands/decompress-spl/index.js +19 -19
  47. package/dist/commands/init/index.js +8 -8
  48. package/dist/commands/merge-token-accounts/index.js +13 -14
  49. package/dist/commands/mint-to/index.js +24 -25
  50. package/dist/commands/start-prover/index.d.ts +0 -3
  51. package/dist/commands/start-prover/index.js +13 -58
  52. package/dist/commands/test-validator/index.d.ts +4 -2
  53. package/dist/commands/test-validator/index.js +107 -109
  54. package/dist/commands/token-balance/index.js +15 -15
  55. package/dist/commands/transfer/index.js +23 -24
  56. package/dist/psp-utils/download.js +1 -1
  57. package/dist/utils/constants.d.ts +2 -2
  58. package/dist/utils/constants.js +2 -3
  59. package/dist/utils/downloadProverBinary.d.ts +7 -0
  60. package/dist/utils/downloadProverBinary.js +107 -0
  61. package/dist/utils/initTestEnv.d.ts +12 -5
  62. package/dist/utils/initTestEnv.js +89 -16
  63. package/dist/utils/process.d.ts +10 -0
  64. package/dist/utils/process.js +56 -6
  65. package/dist/utils/processPhotonIndexer.d.ts +1 -1
  66. package/dist/utils/processPhotonIndexer.js +7 -4
  67. package/dist/utils/processProverServer.d.ts +1 -2
  68. package/dist/utils/processProverServer.js +70 -86
  69. package/dist/utils/proverVersion.generated.d.ts +1 -0
  70. package/dist/utils/proverVersion.generated.js +5 -0
  71. package/dist/utils/utils.d.ts +4 -0
  72. package/dist/utils/utils.js +44 -3
  73. package/oclif.manifest.json +173 -221
  74. package/package.json +63 -64
  75. package/test_bin/dev +4 -7
  76. package/test_bin/run +2 -2
  77. package/bin/forester +0 -0
  78. package/bin/forester.toml +0 -15
  79. package/bin/light_system_program.so +0 -0
  80. package/bin/prover-darwin-arm64 +0 -0
  81. package/bin/prover-darwin-x64 +0 -0
  82. package/bin/prover-linux-arm64 +0 -0
  83. package/bin/prover-linux-x64 +0 -0
  84. package/bin/proving-keys/combined_26_1_1.key +0 -0
  85. package/bin/proving-keys/combined_26_1_2.key +0 -0
  86. package/bin/proving-keys/combined_26_2_1.key +0 -0
  87. package/bin/proving-keys/combined_32_40_1_1.key +0 -0
  88. package/bin/proving-keys/combined_32_40_1_2.key +0 -0
  89. package/bin/proving-keys/combined_32_40_2_1.key +0 -0
  90. package/bin/proving-keys/inclusion_32_1.key +0 -0
  91. package/bin/proving-keys/inclusion_32_2.key +0 -0
  92. package/bin/proving-keys/inclusion_32_3.key +0 -0
  93. package/bin/proving-keys/inclusion_32_4.key +0 -0
  94. package/bin/proving-keys/mainnet_inclusion_26_1.key +0 -0
  95. package/bin/proving-keys/mainnet_inclusion_26_2.key +0 -0
  96. package/bin/proving-keys/mainnet_inclusion_26_3.key +0 -0
  97. package/bin/proving-keys/mainnet_inclusion_26_4.key +0 -0
  98. package/bin/proving-keys/non-inclusion_26_1.key +0 -0
  99. package/bin/proving-keys/non-inclusion_26_2.key +0 -0
  100. package/bin/proving-keys/non-inclusion_40_1.key +0 -0
  101. package/bin/proving-keys/non-inclusion_40_2.key +0 -0
  102. package/bin/proving-keys/non-inclusion_40_3.key +0 -0
  103. package/bin/proving-keys/non-inclusion_40_4.key +0 -0
  104. package/test_bin/lut.json +0 -1
@@ -0,0 +1,107 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.downloadProverBinary = downloadProverBinary;
4
+ exports.getProverVersion = getProverVersion;
5
+ const tslib_1 = require("tslib");
6
+ const fs_1 = tslib_1.__importDefault(require("fs"));
7
+ const path_1 = tslib_1.__importDefault(require("path"));
8
+ const https_1 = tslib_1.__importDefault(require("https"));
9
+ const http_1 = tslib_1.__importDefault(require("http"));
10
+ const promises_1 = require("stream/promises");
11
+ const proverVersion_generated_1 = require("./proverVersion.generated");
12
+ const GITHUB_RELEASES_BASE_URL = `https://github.com/Lightprotocol/light-protocol/releases/download/light-prover-v${proverVersion_generated_1.PROVER_VERSION}`;
13
+ const MAX_REDIRECTS = 10;
14
+ async function downloadProverBinary(binaryPath, binaryName, options = {}) {
15
+ const { maxRetries = 3, retryDelay = 2000 } = options;
16
+ const url = `${GITHUB_RELEASES_BASE_URL}/${binaryName}`;
17
+ console.log(`\nDownloading prover binary: ${binaryName}`);
18
+ console.log(` From: ${url}`);
19
+ console.log(` To: ${binaryPath}\n`);
20
+ const dir = path_1.default.dirname(binaryPath);
21
+ if (!fs_1.default.existsSync(dir)) {
22
+ fs_1.default.mkdirSync(dir, { recursive: true });
23
+ }
24
+ let lastError = null;
25
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
26
+ try {
27
+ await downloadFile(url, binaryPath);
28
+ if (process.platform !== "win32") {
29
+ fs_1.default.chmodSync(binaryPath, 0o755);
30
+ }
31
+ console.log("\nProver binary downloaded.\n");
32
+ return;
33
+ }
34
+ catch (error) {
35
+ lastError = error;
36
+ console.error(`\nDownload attempt ${attempt}/${maxRetries} failed: ${lastError.message}`);
37
+ if (attempt < maxRetries) {
38
+ console.log(` Retrying in ${retryDelay / 1000}s...\n`);
39
+ await new Promise((resolve) => setTimeout(resolve, retryDelay));
40
+ }
41
+ }
42
+ }
43
+ throw new Error(`Failed to download prover binary after ${maxRetries} attempts: ${lastError?.message}`);
44
+ }
45
+ async function downloadFile(url, outputPath, redirectDepth = 0) {
46
+ return new Promise((resolve, reject) => {
47
+ const protocol = url.startsWith("https") ? https_1.default : http_1.default;
48
+ const request = protocol.get(url, (response) => {
49
+ if (response.statusCode === 301 ||
50
+ response.statusCode === 302 ||
51
+ response.statusCode === 307 ||
52
+ response.statusCode === 308) {
53
+ const redirectUrl = response.headers.location;
54
+ if (!redirectUrl) {
55
+ return reject(new Error("Redirect without location header"));
56
+ }
57
+ if (redirectDepth >= MAX_REDIRECTS) {
58
+ return reject(new Error(`Too many redirects: exceeded maximum of ${MAX_REDIRECTS} redirects`));
59
+ }
60
+ return downloadFile(redirectUrl, outputPath, redirectDepth + 1).then(resolve, reject);
61
+ }
62
+ if (response.statusCode !== 200) {
63
+ return reject(new Error(`HTTP ${response.statusCode}: ${response.statusMessage}`));
64
+ }
65
+ const totalBytes = parseInt(response.headers["content-length"] || "0", 10);
66
+ let downloadedBytes = 0;
67
+ let lastProgress = 0;
68
+ const fileStream = fs_1.default.createWriteStream(outputPath);
69
+ response.on("data", (chunk) => {
70
+ downloadedBytes += chunk.length;
71
+ if (totalBytes > 0) {
72
+ const progress = Math.floor((downloadedBytes / totalBytes) * 100);
73
+ if (progress >= lastProgress + 5) {
74
+ lastProgress = progress;
75
+ const mb = (downloadedBytes / 1024 / 1024).toFixed(1);
76
+ const totalMb = (totalBytes / 1024 / 1024).toFixed(1);
77
+ process.stdout.write(`\r Progress: ${progress}% (${mb}MB / ${totalMb}MB)`);
78
+ }
79
+ }
80
+ });
81
+ (0, promises_1.pipeline)(response, fileStream)
82
+ .then(() => {
83
+ if (totalBytes > 0) {
84
+ process.stdout.write("\r Progress: 100% - Download complete\n");
85
+ }
86
+ resolve();
87
+ })
88
+ .catch((error) => {
89
+ fs_1.default.unlinkSync(outputPath);
90
+ reject(error);
91
+ });
92
+ });
93
+ request.on("error", (error) => {
94
+ if (fs_1.default.existsSync(outputPath)) {
95
+ fs_1.default.unlinkSync(outputPath);
96
+ }
97
+ reject(error);
98
+ });
99
+ request.setTimeout(60000, () => {
100
+ request.destroy();
101
+ reject(new Error("Download timeout"));
102
+ });
103
+ });
104
+ }
105
+ function getProverVersion() {
106
+ return proverVersion_generated_1.PROVER_VERSION;
107
+ }
@@ -9,7 +9,7 @@ export declare function stopTestEnv(options: {
9
9
  indexer: boolean;
10
10
  prover: boolean;
11
11
  }): Promise<void>;
12
- export declare function initTestEnv({ additionalPrograms, skipSystemAccounts, indexer, prover, rpcPort, indexerPort, proverPort, gossipHost, checkPhotonVersion, photonDatabaseUrl, limitLedgerSize, proverRunMode, circuits, geyserConfig, validatorArgs, }: {
12
+ export declare function initTestEnv({ additionalPrograms, skipSystemAccounts, indexer, prover, rpcPort, indexerPort, proverPort, gossipHost, checkPhotonVersion, photonDatabaseUrl, limitLedgerSize, geyserConfig, validatorArgs, cloneNetwork, verbose, skipReset, }: {
13
13
  additionalPrograms?: {
14
14
  address: string;
15
15
  path: string;
@@ -24,10 +24,11 @@ export declare function initTestEnv({ additionalPrograms, skipSystemAccounts, in
24
24
  checkPhotonVersion?: boolean;
25
25
  photonDatabaseUrl?: string;
26
26
  limitLedgerSize?: number;
27
- proverRunMode?: "local-rpc" | "inclusion" | "non-inclusion" | "forester" | "forester-test" | "rpc" | "full" | "full-test";
28
- circuits?: string[];
29
27
  validatorArgs?: string;
30
28
  geyserConfig?: string;
29
+ cloneNetwork?: "devnet" | "mainnet";
30
+ verbose?: boolean;
31
+ skipReset?: boolean;
31
32
  }): Promise<void>;
32
33
  export declare function initTestEnvIfNeeded({ additionalPrograms, skipSystemAccounts, indexer, prover, geyserConfig, validatorArgs, }?: {
33
34
  additionalPrograms?: {
@@ -42,7 +43,7 @@ export declare function initTestEnvIfNeeded({ additionalPrograms, skipSystemAcco
42
43
  }): Promise<void>;
43
44
  export declare function programsDirPath(): string;
44
45
  export declare function programFilePath(programName: string): string;
45
- export declare function getSolanaArgs({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, downloadBinaries, }: {
46
+ export declare function getSolanaArgs({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, downloadBinaries, cloneNetwork, verbose, skipReset, }: {
46
47
  additionalPrograms?: {
47
48
  address: string;
48
49
  path: string;
@@ -52,8 +53,11 @@ export declare function getSolanaArgs({ additionalPrograms, skipSystemAccounts,
52
53
  rpcPort?: number;
53
54
  gossipHost?: string;
54
55
  downloadBinaries?: boolean;
56
+ cloneNetwork?: "devnet" | "mainnet";
57
+ verbose?: boolean;
58
+ skipReset?: boolean;
55
59
  }): Promise<Array<string>>;
56
- export declare function startTestValidator({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, validatorArgs, geyserConfig, }: {
60
+ export declare function startTestValidator({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, validatorArgs, geyserConfig, cloneNetwork, verbose, skipReset, }: {
57
61
  additionalPrograms?: {
58
62
  address: string;
59
63
  path: string;
@@ -64,6 +68,9 @@ export declare function startTestValidator({ additionalPrograms, skipSystemAccou
64
68
  gossipHost?: string;
65
69
  validatorArgs?: string;
66
70
  geyserConfig?: string;
71
+ cloneNetwork?: "devnet" | "mainnet";
72
+ verbose?: boolean;
73
+ skipReset?: boolean;
67
74
  }): Promise<void>;
68
75
  export declare function killTestValidator(): Promise<void>;
69
76
  export {};
@@ -17,6 +17,7 @@ const psp_utils_1 = require("../psp-utils");
17
17
  const process_1 = require("./process");
18
18
  const processProverServer_1 = require("./processProverServer");
19
19
  const processPhotonIndexer_1 = require("./processPhotonIndexer");
20
+ const web3_js_1 = require("@solana/web3.js");
20
21
  exports.SYSTEM_PROGRAMS = [
21
22
  {
22
23
  id: "noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV",
@@ -44,6 +45,38 @@ exports.SYSTEM_PROGRAMS = [
44
45
  tag: constants_1.LIGHT_REGISTRY_TAG,
45
46
  },
46
47
  ];
48
+ // Programs to clone from devnet/mainnet (the three core Light programs)
49
+ const PROGRAMS_TO_CLONE = [
50
+ "Lighton6oQpVkeewmo2mcPTQQp7kYHr4fWpAgJyEmDX", // Light Registry
51
+ "SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7", // Light System Program
52
+ "compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq", // Account Compression
53
+ ];
54
+ // Known Light Registry accounts to clone (excludes forester/epoch accounts)
55
+ // These are the core config accounts needed for protocol operation
56
+ const REGISTRY_ACCOUNTS_TO_CLONE = [
57
+ "CuEtcKkkbTn6qy2qxqDswq5U2ADsqoipYDAYfRvxPjcp", // governance_authority_pda (ProtocolConfigPda)
58
+ "8gH9tmziWsS8Wc4fnoN5ax3jsSumNYoRDuSBvmH2GMH8", // config_counter_pda
59
+ "35hkDgaAKwMCaxRz2ocSZ6NaUrtKkyNqU6c4RV3tYJRh", // registered_program_pda
60
+ "DumMsyvkaGJG4QnQ1BhTgvoRMXsgGxfpKDUCr22Xqu4w", // registered_registry_program_pda
61
+ "24rt4RgeyjUCWGS2eF7L7gyNMuz6JWdqYpAvb1KRoHxs", // group_pda
62
+ ];
63
+ /**
64
+ * Fetches account public keys owned by a program from a given cluster.
65
+ * For Light Registry, returns known config accounts (skips forester/epoch accounts).
66
+ */
67
+ async function getProgramOwnedAccounts(programId, rpcUrl) {
68
+ const isRegistry = programId === "Lighton6oQpVkeewmo2mcPTQQp7kYHr4fWpAgJyEmDX";
69
+ if (isRegistry) {
70
+ // Return known registry accounts instead of fetching all (too slow due to 88k+ forester accounts)
71
+ return REGISTRY_ACCOUNTS_TO_CLONE;
72
+ }
73
+ else {
74
+ // For other programs, fetch all accounts
75
+ const connection = new web3_js_1.Connection(rpcUrl);
76
+ const accounts = await connection.getProgramAccounts(new web3_js_1.PublicKey(programId), { dataSlice: { offset: 0, length: 0 } });
77
+ return accounts.map((acc) => acc.pubkey.toBase58());
78
+ }
79
+ }
47
80
  async function stopTestEnv(options) {
48
81
  const processesToKill = [
49
82
  { name: "photon", condition: options.indexer, killFunction: processPhotonIndexer_1.killIndexer },
@@ -70,7 +103,7 @@ async function stopTestEnv(options) {
70
103
  await Promise.all(killPromises);
71
104
  console.log("All specified processes and validator stopped.");
72
105
  }
73
- async function initTestEnv({ additionalPrograms, skipSystemAccounts, indexer = true, prover = true, rpcPort = 8899, indexerPort = 8784, proverPort = 3001, gossipHost = "127.0.0.1", checkPhotonVersion = true, photonDatabaseUrl, limitLedgerSize, proverRunMode, circuits, geyserConfig, validatorArgs, }) {
106
+ async function initTestEnv({ additionalPrograms, skipSystemAccounts, indexer = true, prover = true, rpcPort = 8899, indexerPort = 8784, proverPort = 3001, gossipHost = "127.0.0.1", checkPhotonVersion = true, photonDatabaseUrl, limitLedgerSize, geyserConfig, validatorArgs, cloneNetwork, verbose, skipReset, }) {
74
107
  // We cannot await this promise directly because it will hang the process
75
108
  startTestValidator({
76
109
  additionalPrograms,
@@ -80,14 +113,21 @@ async function initTestEnv({ additionalPrograms, skipSystemAccounts, indexer = t
80
113
  gossipHost,
81
114
  validatorArgs,
82
115
  geyserConfig,
116
+ cloneNetwork,
117
+ verbose,
118
+ skipReset,
83
119
  });
84
120
  await (0, process_1.waitForServers)([{ port: rpcPort, path: "/health" }]);
85
121
  await (0, process_1.confirmServerStability)(`http://127.0.0.1:${rpcPort}/health`);
122
+ await (0, process_1.confirmRpcReadiness)(`http://127.0.0.1:${rpcPort}`);
86
123
  if (indexer) {
87
124
  const config = (0, utils_1.getConfig)();
88
125
  config.indexerUrl = `http://127.0.0.1:${indexerPort}`;
89
126
  (0, utils_1.setConfig)(config);
90
- await (0, processPhotonIndexer_1.startIndexer)(`http://127.0.0.1:${rpcPort}`, indexerPort, checkPhotonVersion, photonDatabaseUrl);
127
+ const proverUrlForIndexer = prover
128
+ ? `http://127.0.0.1:${proverPort}`
129
+ : undefined;
130
+ await (0, processPhotonIndexer_1.startIndexer)(`http://127.0.0.1:${rpcPort}`, indexerPort, checkPhotonVersion, photonDatabaseUrl, proverUrlForIndexer);
91
131
  }
92
132
  if (prover) {
93
133
  const config = (0, utils_1.getConfig)();
@@ -95,7 +135,7 @@ async function initTestEnv({ additionalPrograms, skipSystemAccounts, indexer = t
95
135
  (0, utils_1.setConfig)(config);
96
136
  try {
97
137
  // TODO: check if using redisUrl is better here.
98
- await (0, processProverServer_1.startProver)(proverPort, proverRunMode, circuits);
138
+ await (0, processProverServer_1.startProver)(proverPort);
99
139
  }
100
140
  catch (error) {
101
141
  console.error("Failed to start prover:", error);
@@ -154,26 +194,31 @@ function programFilePath(programName) {
154
194
  }
155
195
  return path_1.default.resolve(__dirname, path_1.default.join(constants_1.BASE_PATH, programName));
156
196
  }
157
- async function getSolanaArgs({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, downloadBinaries = true, }) {
158
- // TODO: adjust program tags
159
- const programs = [...exports.SYSTEM_PROGRAMS];
160
- if (additionalPrograms)
161
- additionalPrograms.forEach((program) => {
162
- programs.push({ id: program.address, path: program.path });
163
- });
197
+ async function getSolanaArgs({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, downloadBinaries = true, cloneNetwork, verbose = false, skipReset = false, }) {
164
198
  const dirPath = programsDirPath();
165
199
  const solanaArgs = [
166
- "--reset",
167
200
  `--limit-ledger-size=${limitLedgerSize}`,
168
201
  `--rpc-port=${rpcPort}`,
169
202
  `--gossip-host=${gossipHost}`,
170
203
  "--quiet",
171
204
  ];
172
- for (const program of programs) {
173
- if (program.path) {
174
- solanaArgs.push("--bpf-program", program.id, program.path);
205
+ if (!skipReset) {
206
+ solanaArgs.unshift("--reset");
207
+ }
208
+ // Add cluster URL if cloning from a network
209
+ if (cloneNetwork) {
210
+ const clusterUrl = cloneNetwork === "devnet" ? "devnet" : "mainnet-beta";
211
+ solanaArgs.push("--url", clusterUrl);
212
+ }
213
+ // Process system programs
214
+ for (const program of exports.SYSTEM_PROGRAMS) {
215
+ const shouldClone = cloneNetwork && PROGRAMS_TO_CLONE.includes(program.id);
216
+ if (shouldClone) {
217
+ // Clone program from network
218
+ solanaArgs.push("--clone-upgradeable-program", program.id);
175
219
  }
176
220
  else {
221
+ // Load program from local binary
177
222
  const localFilePath = programFilePath(program.name);
178
223
  if (program.name === "spl_noop.so" || downloadBinaries) {
179
224
  await (0, psp_utils_1.downloadBinIfNotExists)({
@@ -188,14 +233,39 @@ async function getSolanaArgs({ additionalPrograms, skipSystemAccounts, limitLedg
188
233
  solanaArgs.push("--bpf-program", program.id, localFilePath);
189
234
  }
190
235
  }
191
- if (!skipSystemAccounts) {
236
+ // Clone all accounts owned by the programs being cloned
237
+ if (cloneNetwork) {
238
+ const rpcUrl = cloneNetwork === "devnet"
239
+ ? "https://api.devnet.solana.com"
240
+ : "https://api.mainnet-beta.solana.com";
241
+ for (const programId of PROGRAMS_TO_CLONE) {
242
+ if (verbose) {
243
+ console.log(`Fetching accounts owned by ${programId}...`);
244
+ }
245
+ const accounts = await getProgramOwnedAccounts(programId, rpcUrl);
246
+ if (verbose) {
247
+ console.log(`Found ${accounts.length} accounts`);
248
+ }
249
+ for (const account of accounts) {
250
+ solanaArgs.push("--maybe-clone", account);
251
+ }
252
+ }
253
+ }
254
+ // Add additional user-provided programs (always loaded locally)
255
+ if (additionalPrograms) {
256
+ for (const program of additionalPrograms) {
257
+ solanaArgs.push("--bpf-program", program.address, program.path);
258
+ }
259
+ }
260
+ // Load local system accounts only if not cloning from network
261
+ if (!skipSystemAccounts && !cloneNetwork) {
192
262
  const accountsRelPath = "../../accounts";
193
263
  const accountsPath = path_1.default.resolve(__dirname, accountsRelPath);
194
264
  solanaArgs.push("--account-dir", accountsPath);
195
265
  }
196
266
  return solanaArgs;
197
267
  }
198
- async function startTestValidator({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, validatorArgs, geyserConfig, }) {
268
+ async function startTestValidator({ additionalPrograms, skipSystemAccounts, limitLedgerSize, rpcPort, gossipHost, validatorArgs, geyserConfig, cloneNetwork, verbose, skipReset, }) {
199
269
  const command = "solana-test-validator";
200
270
  const solanaArgs = await getSolanaArgs({
201
271
  additionalPrograms,
@@ -203,6 +273,9 @@ async function startTestValidator({ additionalPrograms, skipSystemAccounts, limi
203
273
  limitLedgerSize,
204
274
  rpcPort,
205
275
  gossipHost,
276
+ cloneNetwork,
277
+ verbose,
278
+ skipReset,
206
279
  });
207
280
  await killTestValidator();
208
281
  await new Promise((r) => setTimeout(r, 1000));
@@ -33,3 +33,13 @@ export declare function waitForServers(servers: {
33
33
  path: string;
34
34
  }[]): Promise<void>;
35
35
  export declare function confirmServerStability(url: string, attempts?: number): Promise<void>;
36
+ /**
37
+ * Confirms that the Solana RPC is fully ready to process requests.
38
+ * This goes beyond HTTP availability and verifies the RPC can handle actual Solana requests.
39
+ *
40
+ * @param rpcUrl - The RPC endpoint URL
41
+ * @param maxAttempts - Maximum number of attempts (default: 30)
42
+ * @param delayMs - Delay between attempts in milliseconds (default: 500ms)
43
+ * @throws Error if RPC doesn't become ready within maxAttempts
44
+ */
45
+ export declare function confirmRpcReadiness(rpcUrl: string, maxAttempts?: number, delayMs?: number): Promise<void>;
@@ -8,6 +8,7 @@ exports.execute = execute;
8
8
  exports.spawnBinary = spawnBinary;
9
9
  exports.waitForServers = waitForServers;
10
10
  exports.confirmServerStability = confirmServerStability;
11
+ exports.confirmRpcReadiness = confirmRpcReadiness;
11
12
  const tslib_1 = require("tslib");
12
13
  const child_process_1 = require("child_process");
13
14
  const path_1 = tslib_1.__importDefault(require("path"));
@@ -16,14 +17,14 @@ const find_process_1 = tslib_1.__importDefault(require("find-process"));
16
17
  const node_child_process_1 = require("node:child_process");
17
18
  const util_1 = require("util");
18
19
  const axios_1 = tslib_1.__importDefault(require("axios"));
19
- const waitOn = require("wait-on");
20
+ const wait_on_1 = tslib_1.__importDefault(require("wait-on"));
20
21
  const readdir = (0, util_1.promisify)(fs_1.default.readdir);
21
22
  const readFile = (0, util_1.promisify)(fs_1.default.readFile);
22
23
  /**
23
24
  * Logs the contents of prover log files in test-ledger dir.
24
25
  */
25
26
  async function logProverFileContents() {
26
- const testLedgerDir = path_1.default.join(__dirname, "../..", "test-ledger");
27
+ const testLedgerDir = path_1.default.join(process.cwd(), "test-ledger");
27
28
  try {
28
29
  if (!fs_1.default.existsSync(testLedgerDir)) {
29
30
  console.log("test-ledger directory does not exist");
@@ -167,9 +168,7 @@ async function execute(command) {
167
168
  function spawnBinary(command, args = []) {
168
169
  const logDir = "test-ledger";
169
170
  const binaryName = path_1.default.basename(command);
170
- console.log("command", command);
171
- console.log("args", args);
172
- const dir = path_1.default.join(__dirname, "../..", logDir);
171
+ const dir = path_1.default.join(process.cwd(), logDir);
173
172
  try {
174
173
  if (!fs_1.default.existsSync(dir)) {
175
174
  fs_1.default.mkdirSync(dir, { recursive: true });
@@ -181,6 +180,10 @@ function spawnBinary(command, args = []) {
181
180
  stdio: ["ignore", out, err],
182
181
  shell: false,
183
182
  detached: true,
183
+ env: {
184
+ ...process.env,
185
+ RUST_LOG: process.env.RUST_LOG || "debug",
186
+ },
184
187
  });
185
188
  spawnedProcess.on("close", async (code) => {
186
189
  console.log(`${binaryName} process exited with code ${code}`);
@@ -213,7 +216,7 @@ async function waitForServers(servers) {
213
216
  },
214
217
  };
215
218
  try {
216
- await waitOn(opts);
219
+ await (0, wait_on_1.default)(opts);
217
220
  servers.forEach((server) => {
218
221
  console.log(`${server.port} is up!`);
219
222
  });
@@ -240,3 +243,50 @@ async function confirmServerStability(url, attempts = 20) {
240
243
  throw error;
241
244
  }
242
245
  }
246
+ /**
247
+ * Confirms that the Solana RPC is fully ready to process requests.
248
+ * This goes beyond HTTP availability and verifies the RPC can handle actual Solana requests.
249
+ *
250
+ * @param rpcUrl - The RPC endpoint URL
251
+ * @param maxAttempts - Maximum number of attempts (default: 30)
252
+ * @param delayMs - Delay between attempts in milliseconds (default: 500ms)
253
+ * @throws Error if RPC doesn't become ready within maxAttempts
254
+ */
255
+ async function confirmRpcReadiness(rpcUrl, maxAttempts = 30, delayMs = 500) {
256
+ let lastError;
257
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
258
+ try {
259
+ const response = await axios_1.default.post(rpcUrl, {
260
+ jsonrpc: "2.0",
261
+ id: 1,
262
+ method: "getHealth",
263
+ params: [],
264
+ }, {
265
+ headers: { "Content-Type": "application/json" },
266
+ timeout: 3000,
267
+ });
268
+ if (response.data?.result === "ok") {
269
+ console.log(`RPC is ready after ${attempt} attempt${attempt > 1 ? "s" : ""}.`);
270
+ return;
271
+ }
272
+ // Response received but not "ok"
273
+ lastError = new Error(`RPC returned unexpected result: ${JSON.stringify(response.data)}`);
274
+ }
275
+ catch (error) {
276
+ lastError = error;
277
+ // Log connection errors only on later attempts to reduce noise
278
+ if (attempt > 5 && attempt % 5 === 0) {
279
+ const errorMsg = error instanceof Error ? error.message : String(error);
280
+ console.log(`RPC not ready yet (attempt ${attempt}/${maxAttempts}): ${errorMsg}`);
281
+ }
282
+ }
283
+ // Don't sleep after the last attempt
284
+ if (attempt < maxAttempts) {
285
+ await new Promise((resolve) => setTimeout(resolve, delayMs));
286
+ }
287
+ }
288
+ // If we get here, all attempts failed
289
+ const errorMsg = lastError instanceof Error ? lastError.message : String(lastError);
290
+ const totalTime = Math.round((maxAttempts * delayMs) / 1000);
291
+ throw new Error(`RPC failed to become ready after ${maxAttempts} attempts (~${totalTime}s). Last error: ${errorMsg}`);
292
+ }
@@ -1,2 +1,2 @@
1
- export declare function startIndexer(rpcUrl: string, indexerPort: number, checkPhotonVersion?: boolean, photonDatabaseUrl?: string): Promise<undefined>;
1
+ export declare function startIndexer(rpcUrl: string, indexerPort: number, checkPhotonVersion?: boolean, photonDatabaseUrl?: string, proverUrl?: string): Promise<undefined>;
2
2
  export declare function killIndexer(): Promise<void>;
@@ -23,16 +23,16 @@ async function isExpectedPhotonVersion(requiredVersion) {
23
23
  }
24
24
  function getPhotonInstallMessage() {
25
25
  if (constants_1.USE_PHOTON_FROM_GIT && constants_1.PHOTON_GIT_COMMIT) {
26
- return `\nLatest Photon indexer not found. Please install it by running: "cargo install --git ${constants_1.PHOTON_GIT_REPO} --rev ${constants_1.PHOTON_GIT_COMMIT} --locked"`;
26
+ return `\nPhoton indexer ${constants_1.PHOTON_VERSION} (commit ${constants_1.PHOTON_GIT_COMMIT}) not found. Please install it by running: "cargo install --git ${constants_1.PHOTON_GIT_REPO} --rev ${constants_1.PHOTON_GIT_COMMIT} --locked --force"`;
27
27
  }
28
28
  else if (constants_1.USE_PHOTON_FROM_GIT) {
29
- return `\nLatest Photon indexer not found. Please install it by running: "cargo install --git ${constants_1.PHOTON_GIT_REPO} --locked"`;
29
+ return `\nPhoton indexer ${constants_1.PHOTON_VERSION} not found. Please install it by running: "cargo install --git ${constants_1.PHOTON_GIT_REPO} --locked --force"`;
30
30
  }
31
31
  else {
32
- return `\nLatest Photon indexer not found. Please install it by running: "cargo install photon-indexer --version ${constants_1.PHOTON_VERSION} --locked"`;
32
+ return `\nPhoton indexer ${constants_1.PHOTON_VERSION} not found. Please install it by running: "cargo install photon-indexer --version ${constants_1.PHOTON_VERSION} --locked --force"`;
33
33
  }
34
34
  }
35
- async function startIndexer(rpcUrl, indexerPort, checkPhotonVersion = true, photonDatabaseUrl) {
35
+ async function startIndexer(rpcUrl, indexerPort, checkPhotonVersion = true, photonDatabaseUrl, proverUrl) {
36
36
  await killIndexer();
37
37
  const resolvedOrNull = which_1.default.sync("photon", { nothrow: true });
38
38
  if (resolvedOrNull === null ||
@@ -51,6 +51,9 @@ async function startIndexer(rpcUrl, indexerPort, checkPhotonVersion = true, phot
51
51
  if (photonDatabaseUrl) {
52
52
  args.push("--db-url", photonDatabaseUrl);
53
53
  }
54
+ if (proverUrl) {
55
+ args.push("--prover-url", proverUrl);
56
+ }
54
57
  (0, process_1.spawnBinary)(constants_1.INDEXER_PROCESS_NAME, args);
55
58
  await (0, process_1.waitForServers)([{ port: indexerPort, path: "/getIndexerHealth" }]);
56
59
  console.log("Indexer started successfully!");
@@ -1,6 +1,5 @@
1
1
  export declare function killProver(): Promise<void>;
2
- export declare function isProverRunningWithFlags(runMode?: string, circuits?: string[], proverPort?: number, redisUrl?: string): Promise<boolean>;
3
- export declare function startProver(proverPort: number, runMode: string | undefined, circuits?: string[] | undefined, force?: boolean, redisUrl?: string): Promise<void>;
2
+ export declare function startProver(proverPort: number, redisUrl?: string): Promise<void>;
4
3
  export declare function getProverNameByArch(): string;
5
4
  export declare function getProverPathByArch(): string;
6
5
  export declare function healthCheck(port: number, retries?: number, timeout?: number): Promise<boolean>;