@layerzerolabs/evm-sdks-build 2.1.3 → 2.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -103,8 +103,44 @@ async function populateErrors(callerRootFile, destPath = "src/errors", maxConcur
103
103
  console.log(`Generated abi for ${sortedErrors.length} errors`);
104
104
  await promises.writeFile(path2__default.default.join(errorDir, "errorSelectors.json"), JSON.stringify(sortedSelectors, null, 2));
105
105
  }
106
+ var buildPaths = (node) => {
107
+ if (typeof node === "string")
108
+ return [node];
109
+ const paths = [];
110
+ if (Array.isArray(node)) {
111
+ for (const contract of node) {
112
+ paths.push(...buildPaths(contract));
113
+ }
114
+ } else {
115
+ for (const [parent, child] of Object.entries(node)) {
116
+ const children = buildPaths(child);
117
+ children.forEach((path4) => paths.push(`${parent}/${path4}`));
118
+ }
119
+ }
120
+ return paths;
121
+ };
122
+ async function copyPackageFiles(packageFiles, callerRootFile) {
123
+ const callerRootDir = path2__default.default.dirname(callerRootFile);
124
+ for (const [srcPackage, srcFiles] of Object.entries(packageFiles)) {
125
+ const allPaths = buildPaths(srcFiles);
126
+ const srcDir = path2__default.default.dirname(module$1.createRequire(callerRootFile).resolve(`${srcPackage}/package.json`));
127
+ let count = 0;
128
+ for (const uniquePath of allPaths) {
129
+ const files = await glob.glob(path2__default.default.join(srcDir, uniquePath));
130
+ for (const file of files) {
131
+ const relativePath = path2__default.default.relative(srcDir, file);
132
+ const destPath = path2__default.default.join(callerRootDir, relativePath);
133
+ await promises.mkdir(path2__default.default.dirname(destPath), { recursive: true });
134
+ await promises.copyFile(file, destPath);
135
+ count++;
136
+ }
137
+ }
138
+ console.log(`Copied ${count} files from ${srcPackage}`);
139
+ }
140
+ }
106
141
 
107
142
  exports.Semaphore = Semaphore;
143
+ exports.copyPackageFiles = copyPackageFiles;
108
144
  exports.populate = populate;
109
145
  exports.populateErrors = populateErrors;
110
146
  //# sourceMappingURL=out.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/populate.ts","../src/errors.ts","../src/semaphore.ts"],"names":["mkdir","path"],"mappings":";AAAA,SAAS,UAAU,aAAa;AAChC,SAAS,qBAAqB;AAC9B,OAAO,UAAU;AAEjB,SAAS,YAAY;AAMrB,eAAsB,SAAS,aAA0B,gBAAwB;AAC7E,QAAM,gBAAgB,KAAK,QAAQ,cAAc;AACjD,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,WAAW,GAAG;AAC9D,UAAM,SAAS,KAAK,QAAQ,cAAc,cAAc,EAAE,QAAQ,GAAG,UAAU,eAAe,CAAC;AAC/F,eAAW,CAAC,UAAU,QAAQ,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACzD,YAAM,QAAQ,MAAM,KAAK,SAAS,IAAI,CAAC,MAAM,KAAK,KAAK,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC5E,UAAI,QAAQ;AACZ,iBAAW,QAAQ,OAAO;AACtB,cAAM,eAAe,KAAK,SAAS,QAAQ,IAAI;AAC/C,cAAM,WAAW,KAAK,KAAK,eAAe,YAAY;AACtD,cAAM,MAAM,KAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,cAAM,SAAS,MAAM,QAAQ;AAC7B;AAAA,MACJ;AACA,cAAQ,IAAI,UAAU,KAAK,IAAI,QAAQ,SAAS,UAAU,EAAE;AAAA,IAChE;AAAA,EACJ;AACJ;;;AC3BA,SAAS,SAAAA,QAAO,UAAU,SAAS,iBAAiB;AACpD,OAAOC,WAAU;AAEjB,SAAS,aAAa;;;ACCf,IAAM,YAAN,MAAgB;AAAA,EAGnB,YAAoB,KAAa;AAAb;AAFpB,SAAQ,UAAU;AAClB,SAAQ,QAAwB,CAAC;AAAA,EACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOlC,MAAa,UAAyB;AAClC,QAAI,KAAK,WAAW,KAAK,KAAK;AAC1B,YAAM,IAAI,QAAc,CAAC,YAAY,KAAK,MAAM,KAAK,OAAO,CAAC;AAAA,IACjE;AACA,SAAK;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKO,UAAgB;AACnB,QAAI,KAAK,WAAW;AAAG;AACvB,SAAK;AACL,UAAM,UAAU,KAAK,MAAM,MAAM,MAAM,MAAM;AAC7C,YAAQ;AAAA,EACZ;AACJ;;;ADvBA,IAAM,EAAE,eAAe,aAAa,UAAU,IAAI;AAOlD,eAAe,aAAa,UAAkB;AAC1C,QAAM,OAAO,MAAM,SAAS,UAAU,EAAE,UAAU,OAAO,CAAC;AAC1D,SAAO,KAAK,MAAM,IAAI;AAC1B;AAQA,eAAe,gBACX,UACA,WACA,QACF;AACE,QAAM,UAAU,QAAQ;AACxB,MAAI;AACA,UAAM,EAAE,IAAI,IAAI,MAAM,aAAa,QAAQ;AAC3C,QAAI,CAAC,OAAO,CAAC,MAAM,QAAQ,GAAG;AAAG;AACjC,QAAI,OAAO,CAAC,EAAE,KAAK,MAAM,SAAS,OAAO,EAAE,QAAQ,CAAC,QAAQ;AACxD,YAAM,OAAO,cAAc,KAAK,GAAG;AACnC,aAAO,KAAK,IAAI,KAAK,OAAO,YAAY,IAAI,CAAC;AAC7C,aAAO,SAAS,UAAU,WAAW,IAAI,CAAC,IAAI,KAAK,OAAO,YAAY,OAAO;AAAA,IACjF,CAAC;AAAA,EACL,UAAE;AACE,cAAU,QAAQ;AAAA,EACtB;AACJ;AAQA,eAAsB,eAAe,gBAAwB,WAAW,cAAc,gBAAgB,IAAI;AAEtG,QAAM,gBAAgBA,MAAK,QAAQ,cAAc;AACjD,QAAM,sBAAsBA,MAAK,KAAK,eAAe,WAAW;AAGhE,QAAM,QAAQ,MAAM,QAAQ,qBAAqB,EAAE,WAAW,KAAK,CAAC;AACpE,QAAM,YAAY,MAAM,OAAO,CAAC,SAASA,MAAK,QAAQ,IAAI,EAAE,YAAY,MAAM,OAAO;AAErF,QAAM,YAAY,IAAI,UAAU,aAAa;AAC7C,QAAM,SAAS,EAAE,MAAM,oBAAI,IAAY,GAAG,UAAU,CAAC,EAAE;AAGvD,QAAM,eAAe,UAAU;AAAA,IAAI,CAAC,SAChC,gBAAgBA,MAAK,KAAK,qBAAqB,IAAI,GAAG,WAAW,MAAM;AAAA,EAC3E;AACA,QAAM,QAAQ,IAAI,YAAY;AAG9B,QAAM,EAAE,MAAM,SAAS,IAAI;AAC3B,MAAI,SAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,KAAK,CAAC;AAC1E,QAAM,eAAe,CAAC,GAAG,IAAI,EAAE,KAAK,OAAO,OAAO;AAClD,WAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,MAAM,CAAC;AACvE,QAAM,kBAAkB,OAAO,YAAY,OAAO,QAAQ,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,OAAO,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC5G,QAAM,WAAWA,MAAK,KAAK,eAAe,QAAQ;AAElD,QAAMD,OAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AACzC,QAAM,UAAUC,MAAK,KAAK,UAAU,aAAa,GAAG,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AACzF,UAAQ,IAAI,qBAAqB,aAAa,MAAM,SAAS;AAC7D,QAAM,UAAUA,MAAK,KAAK,UAAU,qBAAqB,GAAG,KAAK,UAAU,iBAAiB,MAAM,CAAC,CAAC;AACxG","sourcesContent":["import { copyFile, mkdir } from 'fs/promises'\nimport { createRequire } from 'module'\nimport path from 'path'\n\nimport { glob } from 'glob'\n\ntype AllowedFiles = 'artifacts' | 'deployments'\nexport type SrcFiles = { [key in AllowedFiles]?: readonly string[] }\nexport type CopyTargets = Record<string, SrcFiles>\n\nexport async function populate(copyTargets: CopyTargets, callerRootFile: string) {\n const callerRootDir = path.dirname(callerRootFile)\n for (const [srcPackage, srcFiles] of Object.entries(copyTargets)) {\n const srcDir = path.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`))\n for (const [filePath, patterns] of Object.entries(srcFiles)) {\n const files = await glob(patterns.map((p) => path.join(srcDir, filePath, p)))\n let count = 0\n for (const file of files) {\n const relativePath = path.relative(srcDir, file)\n const destPath = path.join(callerRootDir, relativePath)\n await mkdir(path.dirname(destPath), { recursive: true })\n await copyFile(file, destPath)\n count++\n }\n console.log(`Copied ${count} ${filePath} from ${srcPackage}`)\n }\n }\n}\n","import { mkdir, readFile, readdir, writeFile } from 'fs/promises'\nimport path from 'path'\n\nimport { utils } from 'ethers'\n\nimport { Semaphore } from './semaphore'\n\nconst { ErrorFragment, FormatTypes, Interface } = utils\n\n/**\n * Asynchronously reads a JSON file from a given file path.\n * @param filePath - The path to the JSON file.\n * @returns A promise that resolves to the parsed JSON data.\n */\nasync function readJSONFile(filePath: string) {\n const data = await readFile(filePath, { encoding: 'utf8' })\n return JSON.parse(data)\n}\n\n/**\n * Processes ABI errors in parallel, controlled by a semaphore, and populates error collections.\n * @param filePath - The file path to read.\n * @param semaphore - Semaphore instance for concurrency control.\n * @param errors - Object to collect full errors and error selectors.\n */\nasync function parallelProcess(\n filePath: string,\n semaphore: Semaphore,\n errors: { full: Set<string>; selector: Record<string, string> }\n) {\n await semaphore.acquire()\n try {\n const { abi } = await readJSONFile(filePath)\n if (!abi || !Array.isArray(abi)) return\n abi.filter(({ type }) => type === 'error').forEach((obj) => {\n const frag = ErrorFragment.from(obj)\n errors.full.add(frag.format(FormatTypes.full))\n errors.selector[Interface.getSighash(frag)] = frag.format(FormatTypes.sighash)\n })\n } finally {\n semaphore.release()\n }\n}\n\n/**\n * Populates error information from ABI files in parallel.\n * @param callerRootFile - The root file path to start the search for ABI files.\n * @param destPath - The path relative to root to copy the files in.\n * @param maxConcurrent - Maximum number of concurrent file processing operations.\n */\nexport async function populateErrors(callerRootFile: string, destPath = 'src/errors', maxConcurrent = 50) {\n // Determining the directory paths from the caller's root file\n const callerRootDir = path.dirname(callerRootFile)\n const callerArtifactsPath = path.join(callerRootDir, 'artifacts')\n\n // Reading and filtering artifacts to process only JSON files\n const files = await readdir(callerArtifactsPath, { recursive: true })\n const jsonFiles = files.filter((file) => path.extname(file).toLowerCase() === '.json')\n\n const semaphore = new Semaphore(maxConcurrent)\n const errors = { full: new Set<string>(), selector: {} }\n\n // Parallel processing of files using the semaphore for concurrency control\n const filePromises = jsonFiles.map((file) =>\n parallelProcess(path.join(callerArtifactsPath, file), semaphore, errors)\n )\n await Promise.all(filePromises)\n\n // Sorting and organizing errors for output\n const { full, selector } = errors\n let sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: true })\n const sortedErrors = [...full].sort(sorter.compare)\n sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: false })\n const sortedSelectors = Object.fromEntries(Object.entries(selector).sort(([a], [b]) => sorter.compare(a, b)))\n const errorDir = path.join(callerRootDir, destPath)\n // Writing errors to files in the error directory\n await mkdir(errorDir, { recursive: true })\n await writeFile(path.join(errorDir, 'errors.json'), JSON.stringify(sortedErrors, null, 2))\n console.log(`Generated abi for ${sortedErrors.length} errors`)\n await writeFile(path.join(errorDir, 'errorSelectors.json'), JSON.stringify(sortedSelectors, null, 2))\n}\n","/**\n * Semaphore class for controlling access to a resource by multiple processes.\n * It maintains a counter and a queue for managing access.\n */\nexport class Semaphore {\n private counter = 0\n private queue: (() => void)[] = []\n constructor(private max: number) {}\n\n /**\n * Acquires a lock on the semaphore. If the semaphore is at its maximum,\n * the function will wait until it can acquire the lock.\n * @returns A promise that resolves when the lock has been acquired.\n */\n public async acquire(): Promise<void> {\n if (this.counter >= this.max) {\n await new Promise<void>((resolve) => this.queue.push(resolve))\n }\n this.counter++\n }\n\n /**\n * Releases a lock on the semaphore.\n */\n public release(): void {\n if (this.counter == 0) return\n this.counter--\n const resolve = this.queue.shift() ?? (() => null)\n resolve()\n }\n}\n"]}
1
+ {"version":3,"sources":["../src/populate.ts","../src/errors.ts","../src/semaphore.ts","../src/copyFiles.ts"],"names":["mkdir","path","copyFile","createRequire","glob"],"mappings":";AAAA,SAAS,UAAU,aAAa;AAChC,SAAS,qBAAqB;AAC9B,OAAO,UAAU;AAEjB,SAAS,YAAY;AAMrB,eAAsB,SAAS,aAA0B,gBAAwB;AAC7E,QAAM,gBAAgB,KAAK,QAAQ,cAAc;AACjD,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,WAAW,GAAG;AAC9D,UAAM,SAAS,KAAK,QAAQ,cAAc,cAAc,EAAE,QAAQ,GAAG,UAAU,eAAe,CAAC;AAC/F,eAAW,CAAC,UAAU,QAAQ,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACzD,YAAM,QAAQ,MAAM,KAAK,SAAS,IAAI,CAAC,MAAM,KAAK,KAAK,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC5E,UAAI,QAAQ;AACZ,iBAAW,QAAQ,OAAO;AACtB,cAAM,eAAe,KAAK,SAAS,QAAQ,IAAI;AAC/C,cAAM,WAAW,KAAK,KAAK,eAAe,YAAY;AACtD,cAAM,MAAM,KAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,cAAM,SAAS,MAAM,QAAQ;AAC7B;AAAA,MACJ;AACA,cAAQ,IAAI,UAAU,KAAK,IAAI,QAAQ,SAAS,UAAU,EAAE;AAAA,IAChE;AAAA,EACJ;AACJ;;;AC3BA,SAAS,SAAAA,QAAO,UAAU,SAAS,iBAAiB;AACpD,OAAOC,WAAU;AAEjB,SAAS,aAAa;;;ACCf,IAAM,YAAN,MAAgB;AAAA,EAGnB,YAAoB,KAAa;AAAb;AAFpB,SAAQ,UAAU;AAClB,SAAQ,QAAwB,CAAC;AAAA,EACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOlC,MAAa,UAAyB;AAClC,QAAI,KAAK,WAAW,KAAK,KAAK;AAC1B,YAAM,IAAI,QAAc,CAAC,YAAY,KAAK,MAAM,KAAK,OAAO,CAAC;AAAA,IACjE;AACA,SAAK;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKO,UAAgB;AACnB,QAAI,KAAK,WAAW;AAAG;AACvB,SAAK;AACL,UAAM,UAAU,KAAK,MAAM,MAAM,MAAM,MAAM;AAC7C,YAAQ;AAAA,EACZ;AACJ;;;ADvBA,IAAM,EAAE,eAAe,aAAa,UAAU,IAAI;AAOlD,eAAe,aAAa,UAAkB;AAC1C,QAAM,OAAO,MAAM,SAAS,UAAU,EAAE,UAAU,OAAO,CAAC;AAC1D,SAAO,KAAK,MAAM,IAAI;AAC1B;AAQA,eAAe,gBACX,UACA,WACA,QACF;AACE,QAAM,UAAU,QAAQ;AACxB,MAAI;AACA,UAAM,EAAE,IAAI,IAAI,MAAM,aAAa,QAAQ;AAC3C,QAAI,CAAC,OAAO,CAAC,MAAM,QAAQ,GAAG;AAAG;AACjC,QAAI,OAAO,CAAC,EAAE,KAAK,MAAM,SAAS,OAAO,EAAE,QAAQ,CAAC,QAAQ;AACxD,YAAM,OAAO,cAAc,KAAK,GAAG;AACnC,aAAO,KAAK,IAAI,KAAK,OAAO,YAAY,IAAI,CAAC;AAC7C,aAAO,SAAS,UAAU,WAAW,IAAI,CAAC,IAAI,KAAK,OAAO,YAAY,OAAO;AAAA,IACjF,CAAC;AAAA,EACL,UAAE;AACE,cAAU,QAAQ;AAAA,EACtB;AACJ;AAQA,eAAsB,eAAe,gBAAwB,WAAW,cAAc,gBAAgB,IAAI;AAEtG,QAAM,gBAAgBA,MAAK,QAAQ,cAAc;AACjD,QAAM,sBAAsBA,MAAK,KAAK,eAAe,WAAW;AAGhE,QAAM,QAAQ,MAAM,QAAQ,qBAAqB,EAAE,WAAW,KAAK,CAAC;AACpE,QAAM,YAAY,MAAM,OAAO,CAAC,SAASA,MAAK,QAAQ,IAAI,EAAE,YAAY,MAAM,OAAO;AAErF,QAAM,YAAY,IAAI,UAAU,aAAa;AAC7C,QAAM,SAAS,EAAE,MAAM,oBAAI,IAAY,GAAG,UAAU,CAAC,EAAE;AAGvD,QAAM,eAAe,UAAU;AAAA,IAAI,CAAC,SAChC,gBAAgBA,MAAK,KAAK,qBAAqB,IAAI,GAAG,WAAW,MAAM;AAAA,EAC3E;AACA,QAAM,QAAQ,IAAI,YAAY;AAG9B,QAAM,EAAE,MAAM,SAAS,IAAI;AAC3B,MAAI,SAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,KAAK,CAAC;AAC1E,QAAM,eAAe,CAAC,GAAG,IAAI,EAAE,KAAK,OAAO,OAAO;AAClD,WAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,MAAM,CAAC;AACvE,QAAM,kBAAkB,OAAO,YAAY,OAAO,QAAQ,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,OAAO,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC5G,QAAM,WAAWA,MAAK,KAAK,eAAe,QAAQ;AAElD,QAAMD,OAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AACzC,QAAM,UAAUC,MAAK,KAAK,UAAU,aAAa,GAAG,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AACzF,UAAQ,IAAI,qBAAqB,aAAa,MAAM,SAAS;AAC7D,QAAM,UAAUA,MAAK,KAAK,UAAU,qBAAqB,GAAG,KAAK,UAAU,iBAAiB,MAAM,CAAC,CAAC;AACxG;;;AEhFA,SAAS,YAAAC,WAAU,SAAAF,cAAa;AAChC,SAAS,iBAAAG,sBAAqB;AAC9B,OAAOF,WAAU;AAEjB,SAAS,QAAAG,aAAY;AAiBd,IAAM,aAAa,CAAC,SAAuD;AAE9E,MAAI,OAAO,SAAS;AAAU,WAAO,CAAC,IAAI;AAE1C,QAAM,QAAkB,CAAC;AAEzB,MAAI,MAAM,QAAQ,IAAI,GAAG;AACrB,eAAW,YAAY,MAAM;AACzB,YAAM,KAAK,GAAG,WAAW,QAAQ,CAAC;AAAA,IACtC;AAAA,EACJ,OAAO;AAEH,eAAW,CAAC,QAAQ,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAM,WAAW,WAAW,KAAK;AACjC,eAAS,QAAQ,CAACH,UAAS,MAAM,KAAK,GAAG,MAAM,IAAIA,KAAI,EAAE,CAAC;AAAA,IAC9D;AAAA,EACJ;AACA,SAAO;AACX;AAWA,eAAsB,iBAAiB,cAA4B,gBAAwB;AACvF,QAAM,gBAAgBA,MAAK,QAAQ,cAAc;AACjD,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,YAAY,GAAG;AAC/D,UAAM,WAAW,WAAW,QAAQ;AACpC,UAAM,SAASA,MAAK,QAAQE,eAAc,cAAc,EAAE,QAAQ,GAAG,UAAU,eAAe,CAAC;AAC/F,QAAI,QAAQ;AACZ,eAAW,cAAc,UAAU;AAC/B,YAAM,QAAQ,MAAMC,MAAKH,MAAK,KAAK,QAAQ,UAAU,CAAC;AACtD,iBAAW,QAAQ,OAAO;AACtB,cAAM,eAAeA,MAAK,SAAS,QAAQ,IAAI;AAC/C,cAAM,WAAWA,MAAK,KAAK,eAAe,YAAY;AACtD,cAAMD,OAAMC,MAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,cAAMC,UAAS,MAAM,QAAQ;AAC7B;AAAA,MACJ;AAAA,IACJ;AACA,YAAQ,IAAI,UAAU,KAAK,eAAe,UAAU,EAAE;AAAA,EAC1D;AACJ","sourcesContent":["import { copyFile, mkdir } from 'fs/promises'\nimport { createRequire } from 'module'\nimport path from 'path'\n\nimport { glob } from 'glob'\n\ntype AllowedFiles = 'artifacts' | 'deployments'\nexport type SrcFiles = { [key in AllowedFiles]?: readonly string[] }\nexport type CopyTargets = Record<string, SrcFiles>\n\nexport async function populate(copyTargets: CopyTargets, callerRootFile: string) {\n const callerRootDir = path.dirname(callerRootFile)\n for (const [srcPackage, srcFiles] of Object.entries(copyTargets)) {\n const srcDir = path.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`))\n for (const [filePath, patterns] of Object.entries(srcFiles)) {\n const files = await glob(patterns.map((p) => path.join(srcDir, filePath, p)))\n let count = 0\n for (const file of files) {\n const relativePath = path.relative(srcDir, file)\n const destPath = path.join(callerRootDir, relativePath)\n await mkdir(path.dirname(destPath), { recursive: true })\n await copyFile(file, destPath)\n count++\n }\n console.log(`Copied ${count} ${filePath} from ${srcPackage}`)\n }\n }\n}\n","import { mkdir, readFile, readdir, writeFile } from 'fs/promises'\nimport path from 'path'\n\nimport { utils } from 'ethers'\n\nimport { Semaphore } from './semaphore'\n\nconst { ErrorFragment, FormatTypes, Interface } = utils\n\n/**\n * Asynchronously reads a JSON file from a given file path.\n * @param filePath - The path to the JSON file.\n * @returns A promise that resolves to the parsed JSON data.\n */\nasync function readJSONFile(filePath: string) {\n const data = await readFile(filePath, { encoding: 'utf8' })\n return JSON.parse(data)\n}\n\n/**\n * Processes ABI errors in parallel, controlled by a semaphore, and populates error collections.\n * @param filePath - The file path to read.\n * @param semaphore - Semaphore instance for concurrency control.\n * @param errors - Object to collect full errors and error selectors.\n */\nasync function parallelProcess(\n filePath: string,\n semaphore: Semaphore,\n errors: { full: Set<string>; selector: Record<string, string> }\n) {\n await semaphore.acquire()\n try {\n const { abi } = await readJSONFile(filePath)\n if (!abi || !Array.isArray(abi)) return\n abi.filter(({ type }) => type === 'error').forEach((obj) => {\n const frag = ErrorFragment.from(obj)\n errors.full.add(frag.format(FormatTypes.full))\n errors.selector[Interface.getSighash(frag)] = frag.format(FormatTypes.sighash)\n })\n } finally {\n semaphore.release()\n }\n}\n\n/**\n * Populates error information from ABI files in parallel.\n * @param callerRootFile - The root file path to start the search for ABI files.\n * @param destPath - The path relative to root to copy the files in.\n * @param maxConcurrent - Maximum number of concurrent file processing operations.\n */\nexport async function populateErrors(callerRootFile: string, destPath = 'src/errors', maxConcurrent = 50) {\n // Determining the directory paths from the caller's root file\n const callerRootDir = path.dirname(callerRootFile)\n const callerArtifactsPath = path.join(callerRootDir, 'artifacts')\n\n // Reading and filtering artifacts to process only JSON files\n const files = await readdir(callerArtifactsPath, { recursive: true })\n const jsonFiles = files.filter((file) => path.extname(file).toLowerCase() === '.json')\n\n const semaphore = new Semaphore(maxConcurrent)\n const errors = { full: new Set<string>(), selector: {} }\n\n // Parallel processing of files using the semaphore for concurrency control\n const filePromises = jsonFiles.map((file) =>\n parallelProcess(path.join(callerArtifactsPath, file), semaphore, errors)\n )\n await Promise.all(filePromises)\n\n // Sorting and organizing errors for output\n const { full, selector } = errors\n let sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: true })\n const sortedErrors = [...full].sort(sorter.compare)\n sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: false })\n const sortedSelectors = Object.fromEntries(Object.entries(selector).sort(([a], [b]) => sorter.compare(a, b)))\n const errorDir = path.join(callerRootDir, destPath)\n // Writing errors to files in the error directory\n await mkdir(errorDir, { recursive: true })\n await writeFile(path.join(errorDir, 'errors.json'), JSON.stringify(sortedErrors, null, 2))\n console.log(`Generated abi for ${sortedErrors.length} errors`)\n await writeFile(path.join(errorDir, 'errorSelectors.json'), JSON.stringify(sortedSelectors, null, 2))\n}\n","/**\n * Semaphore class for controlling access to a resource by multiple processes.\n * It maintains a counter and a queue for managing access.\n */\nexport class Semaphore {\n private counter = 0\n private queue: (() => void)[] = []\n constructor(private max: number) {}\n\n /**\n * Acquires a lock on the semaphore. If the semaphore is at its maximum,\n * the function will wait until it can acquire the lock.\n * @returns A promise that resolves when the lock has been acquired.\n */\n public async acquire(): Promise<void> {\n if (this.counter >= this.max) {\n await new Promise<void>((resolve) => this.queue.push(resolve))\n }\n this.counter++\n }\n\n /**\n * Releases a lock on the semaphore.\n */\n public release(): void {\n if (this.counter == 0) return\n this.counter--\n const resolve = this.queue.shift() ?? (() => null)\n resolve()\n }\n}\n","import { copyFile, mkdir } from 'fs/promises'\nimport { createRequire } from 'module'\nimport path from 'path'\n\nimport { glob } from 'glob'\n\ntype Package = string\ntype File = string\ntype Directory = Array<File | NestedDirectory>\ninterface NestedDirectory {\n [key: string]: Directory | NestedDirectory\n}\nexport interface PackageFiles {\n [key: Package]: NestedDirectory | Directory\n}\n\n/**\n * Recursively traverses a directory structure to build paths to leaf nodes.\n * @param node - A directory structure represented as a string (for files), an array (for directories), or an object (for nested directories).\n * @returns An array of strings, each representing a path from the root to a leaf node.\n */\nexport const buildPaths = (node: NestedDirectory | Directory | File): string[] => {\n // base case, we're at a leaf node, return the filename\n if (typeof node === 'string') return [node]\n\n const paths: string[] = []\n // If the current node is an array, recursively process each file / nested dir\n if (Array.isArray(node)) {\n for (const contract of node) {\n paths.push(...buildPaths(contract))\n }\n } else {\n // Current node is a nested directory. Recursively process each entry.\n for (const [parent, child] of Object.entries(node)) {\n const children = buildPaths(child)\n children.forEach((path) => paths.push(`${parent}/${path}`))\n }\n }\n return paths\n}\n\n/**\n * Asynchronously copies files from source packages to a target root directory based on the provided directory structure.\n * This function iterates over each source package, resolves all file paths,\n * and copies each file to the corresponding location within the caller's root directory.\n *\n * @param packageFiles - An object mapping package names to their directory structures,\n * where each directory structure defines the files and nested directories to be copied.\n * @param callerRootFile - The absolute path to the root file of the caller, used to determine the root directory into which the files will be copied.\n */\nexport async function copyPackageFiles(packageFiles: PackageFiles, callerRootFile: string) {\n const callerRootDir = path.dirname(callerRootFile)\n for (const [srcPackage, srcFiles] of Object.entries(packageFiles)) {\n const allPaths = buildPaths(srcFiles)\n const srcDir = path.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`))\n let count = 0\n for (const uniquePath of allPaths) {\n const files = await glob(path.join(srcDir, uniquePath))\n for (const file of files) {\n const relativePath = path.relative(srcDir, file)\n const destPath = path.join(callerRootDir, relativePath)\n await mkdir(path.dirname(destPath), { recursive: true })\n await copyFile(file, destPath)\n count++\n }\n }\n console.log(`Copied ${count} files from ${srcPackage}`)\n }\n}\n"]}
package/dist/index.d.mts CHANGED
@@ -34,4 +34,23 @@ declare class Semaphore {
34
34
  release(): void;
35
35
  }
36
36
 
37
- export { type CopyTargets, Semaphore, type SrcFiles, populate, populateErrors };
37
+ type File = string;
38
+ type Directory = Array<File | NestedDirectory>;
39
+ interface NestedDirectory {
40
+ [key: string]: Directory | NestedDirectory;
41
+ }
42
+ interface PackageFiles {
43
+ [key: Package]: NestedDirectory | Directory;
44
+ }
45
+ /**
46
+ * Asynchronously copies files from source packages to a target root directory based on the provided directory structure.
47
+ * This function iterates over each source package, resolves all file paths,
48
+ * and copies each file to the corresponding location within the caller's root directory.
49
+ *
50
+ * @param packageFiles - An object mapping package names to their directory structures,
51
+ * where each directory structure defines the files and nested directories to be copied.
52
+ * @param callerRootFile - The absolute path to the root file of the caller, used to determine the root directory into which the files will be copied.
53
+ */
54
+ declare function copyPackageFiles(packageFiles: PackageFiles, callerRootFile: string): Promise<void>;
55
+
56
+ export { type CopyTargets, type PackageFiles, Semaphore, type SrcFiles, copyPackageFiles, populate, populateErrors };
package/dist/index.d.ts CHANGED
@@ -34,4 +34,23 @@ declare class Semaphore {
34
34
  release(): void;
35
35
  }
36
36
 
37
- export { type CopyTargets, Semaphore, type SrcFiles, populate, populateErrors };
37
+ type File = string;
38
+ type Directory = Array<File | NestedDirectory>;
39
+ interface NestedDirectory {
40
+ [key: string]: Directory | NestedDirectory;
41
+ }
42
+ interface PackageFiles {
43
+ [key: Package]: NestedDirectory | Directory;
44
+ }
45
+ /**
46
+ * Asynchronously copies files from source packages to a target root directory based on the provided directory structure.
47
+ * This function iterates over each source package, resolves all file paths,
48
+ * and copies each file to the corresponding location within the caller's root directory.
49
+ *
50
+ * @param packageFiles - An object mapping package names to their directory structures,
51
+ * where each directory structure defines the files and nested directories to be copied.
52
+ * @param callerRootFile - The absolute path to the root file of the caller, used to determine the root directory into which the files will be copied.
53
+ */
54
+ declare function copyPackageFiles(packageFiles: PackageFiles, callerRootFile: string): Promise<void>;
55
+
56
+ export { type CopyTargets, type PackageFiles, Semaphore, type SrcFiles, copyPackageFiles, populate, populateErrors };
package/dist/index.mjs CHANGED
@@ -97,7 +97,42 @@ async function populateErrors(callerRootFile, destPath = "src/errors", maxConcur
97
97
  console.log(`Generated abi for ${sortedErrors.length} errors`);
98
98
  await writeFile(path2.join(errorDir, "errorSelectors.json"), JSON.stringify(sortedSelectors, null, 2));
99
99
  }
100
+ var buildPaths = (node) => {
101
+ if (typeof node === "string")
102
+ return [node];
103
+ const paths = [];
104
+ if (Array.isArray(node)) {
105
+ for (const contract of node) {
106
+ paths.push(...buildPaths(contract));
107
+ }
108
+ } else {
109
+ for (const [parent, child] of Object.entries(node)) {
110
+ const children = buildPaths(child);
111
+ children.forEach((path4) => paths.push(`${parent}/${path4}`));
112
+ }
113
+ }
114
+ return paths;
115
+ };
116
+ async function copyPackageFiles(packageFiles, callerRootFile) {
117
+ const callerRootDir = path2.dirname(callerRootFile);
118
+ for (const [srcPackage, srcFiles] of Object.entries(packageFiles)) {
119
+ const allPaths = buildPaths(srcFiles);
120
+ const srcDir = path2.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`));
121
+ let count = 0;
122
+ for (const uniquePath of allPaths) {
123
+ const files = await glob(path2.join(srcDir, uniquePath));
124
+ for (const file of files) {
125
+ const relativePath = path2.relative(srcDir, file);
126
+ const destPath = path2.join(callerRootDir, relativePath);
127
+ await mkdir(path2.dirname(destPath), { recursive: true });
128
+ await copyFile(file, destPath);
129
+ count++;
130
+ }
131
+ }
132
+ console.log(`Copied ${count} files from ${srcPackage}`);
133
+ }
134
+ }
100
135
 
101
- export { Semaphore, populate, populateErrors };
136
+ export { Semaphore, copyPackageFiles, populate, populateErrors };
102
137
  //# sourceMappingURL=out.js.map
103
138
  //# sourceMappingURL=index.mjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/populate.ts","../src/errors.ts","../src/semaphore.ts"],"names":["mkdir","path"],"mappings":";AAAA,SAAS,UAAU,aAAa;AAChC,SAAS,qBAAqB;AAC9B,OAAO,UAAU;AAEjB,SAAS,YAAY;AAMrB,eAAsB,SAAS,aAA0B,gBAAwB;AAC7E,QAAM,gBAAgB,KAAK,QAAQ,cAAc;AACjD,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,WAAW,GAAG;AAC9D,UAAM,SAAS,KAAK,QAAQ,cAAc,cAAc,EAAE,QAAQ,GAAG,UAAU,eAAe,CAAC;AAC/F,eAAW,CAAC,UAAU,QAAQ,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACzD,YAAM,QAAQ,MAAM,KAAK,SAAS,IAAI,CAAC,MAAM,KAAK,KAAK,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC5E,UAAI,QAAQ;AACZ,iBAAW,QAAQ,OAAO;AACtB,cAAM,eAAe,KAAK,SAAS,QAAQ,IAAI;AAC/C,cAAM,WAAW,KAAK,KAAK,eAAe,YAAY;AACtD,cAAM,MAAM,KAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,cAAM,SAAS,MAAM,QAAQ;AAC7B;AAAA,MACJ;AACA,cAAQ,IAAI,UAAU,KAAK,IAAI,QAAQ,SAAS,UAAU,EAAE;AAAA,IAChE;AAAA,EACJ;AACJ;;;AC3BA,SAAS,SAAAA,QAAO,UAAU,SAAS,iBAAiB;AACpD,OAAOC,WAAU;AAEjB,SAAS,aAAa;;;ACCf,IAAM,YAAN,MAAgB;AAAA,EAGnB,YAAoB,KAAa;AAAb;AAFpB,SAAQ,UAAU;AAClB,SAAQ,QAAwB,CAAC;AAAA,EACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOlC,MAAa,UAAyB;AAClC,QAAI,KAAK,WAAW,KAAK,KAAK;AAC1B,YAAM,IAAI,QAAc,CAAC,YAAY,KAAK,MAAM,KAAK,OAAO,CAAC;AAAA,IACjE;AACA,SAAK;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKO,UAAgB;AACnB,QAAI,KAAK,WAAW;AAAG;AACvB,SAAK;AACL,UAAM,UAAU,KAAK,MAAM,MAAM,MAAM,MAAM;AAC7C,YAAQ;AAAA,EACZ;AACJ;;;ADvBA,IAAM,EAAE,eAAe,aAAa,UAAU,IAAI;AAOlD,eAAe,aAAa,UAAkB;AAC1C,QAAM,OAAO,MAAM,SAAS,UAAU,EAAE,UAAU,OAAO,CAAC;AAC1D,SAAO,KAAK,MAAM,IAAI;AAC1B;AAQA,eAAe,gBACX,UACA,WACA,QACF;AACE,QAAM,UAAU,QAAQ;AACxB,MAAI;AACA,UAAM,EAAE,IAAI,IAAI,MAAM,aAAa,QAAQ;AAC3C,QAAI,CAAC,OAAO,CAAC,MAAM,QAAQ,GAAG;AAAG;AACjC,QAAI,OAAO,CAAC,EAAE,KAAK,MAAM,SAAS,OAAO,EAAE,QAAQ,CAAC,QAAQ;AACxD,YAAM,OAAO,cAAc,KAAK,GAAG;AACnC,aAAO,KAAK,IAAI,KAAK,OAAO,YAAY,IAAI,CAAC;AAC7C,aAAO,SAAS,UAAU,WAAW,IAAI,CAAC,IAAI,KAAK,OAAO,YAAY,OAAO;AAAA,IACjF,CAAC;AAAA,EACL,UAAE;AACE,cAAU,QAAQ;AAAA,EACtB;AACJ;AAQA,eAAsB,eAAe,gBAAwB,WAAW,cAAc,gBAAgB,IAAI;AAEtG,QAAM,gBAAgBA,MAAK,QAAQ,cAAc;AACjD,QAAM,sBAAsBA,MAAK,KAAK,eAAe,WAAW;AAGhE,QAAM,QAAQ,MAAM,QAAQ,qBAAqB,EAAE,WAAW,KAAK,CAAC;AACpE,QAAM,YAAY,MAAM,OAAO,CAAC,SAASA,MAAK,QAAQ,IAAI,EAAE,YAAY,MAAM,OAAO;AAErF,QAAM,YAAY,IAAI,UAAU,aAAa;AAC7C,QAAM,SAAS,EAAE,MAAM,oBAAI,IAAY,GAAG,UAAU,CAAC,EAAE;AAGvD,QAAM,eAAe,UAAU;AAAA,IAAI,CAAC,SAChC,gBAAgBA,MAAK,KAAK,qBAAqB,IAAI,GAAG,WAAW,MAAM;AAAA,EAC3E;AACA,QAAM,QAAQ,IAAI,YAAY;AAG9B,QAAM,EAAE,MAAM,SAAS,IAAI;AAC3B,MAAI,SAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,KAAK,CAAC;AAC1E,QAAM,eAAe,CAAC,GAAG,IAAI,EAAE,KAAK,OAAO,OAAO;AAClD,WAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,MAAM,CAAC;AACvE,QAAM,kBAAkB,OAAO,YAAY,OAAO,QAAQ,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,OAAO,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC5G,QAAM,WAAWA,MAAK,KAAK,eAAe,QAAQ;AAElD,QAAMD,OAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AACzC,QAAM,UAAUC,MAAK,KAAK,UAAU,aAAa,GAAG,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AACzF,UAAQ,IAAI,qBAAqB,aAAa,MAAM,SAAS;AAC7D,QAAM,UAAUA,MAAK,KAAK,UAAU,qBAAqB,GAAG,KAAK,UAAU,iBAAiB,MAAM,CAAC,CAAC;AACxG","sourcesContent":["import { copyFile, mkdir } from 'fs/promises'\nimport { createRequire } from 'module'\nimport path from 'path'\n\nimport { glob } from 'glob'\n\ntype AllowedFiles = 'artifacts' | 'deployments'\nexport type SrcFiles = { [key in AllowedFiles]?: readonly string[] }\nexport type CopyTargets = Record<string, SrcFiles>\n\nexport async function populate(copyTargets: CopyTargets, callerRootFile: string) {\n const callerRootDir = path.dirname(callerRootFile)\n for (const [srcPackage, srcFiles] of Object.entries(copyTargets)) {\n const srcDir = path.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`))\n for (const [filePath, patterns] of Object.entries(srcFiles)) {\n const files = await glob(patterns.map((p) => path.join(srcDir, filePath, p)))\n let count = 0\n for (const file of files) {\n const relativePath = path.relative(srcDir, file)\n const destPath = path.join(callerRootDir, relativePath)\n await mkdir(path.dirname(destPath), { recursive: true })\n await copyFile(file, destPath)\n count++\n }\n console.log(`Copied ${count} ${filePath} from ${srcPackage}`)\n }\n }\n}\n","import { mkdir, readFile, readdir, writeFile } from 'fs/promises'\nimport path from 'path'\n\nimport { utils } from 'ethers'\n\nimport { Semaphore } from './semaphore'\n\nconst { ErrorFragment, FormatTypes, Interface } = utils\n\n/**\n * Asynchronously reads a JSON file from a given file path.\n * @param filePath - The path to the JSON file.\n * @returns A promise that resolves to the parsed JSON data.\n */\nasync function readJSONFile(filePath: string) {\n const data = await readFile(filePath, { encoding: 'utf8' })\n return JSON.parse(data)\n}\n\n/**\n * Processes ABI errors in parallel, controlled by a semaphore, and populates error collections.\n * @param filePath - The file path to read.\n * @param semaphore - Semaphore instance for concurrency control.\n * @param errors - Object to collect full errors and error selectors.\n */\nasync function parallelProcess(\n filePath: string,\n semaphore: Semaphore,\n errors: { full: Set<string>; selector: Record<string, string> }\n) {\n await semaphore.acquire()\n try {\n const { abi } = await readJSONFile(filePath)\n if (!abi || !Array.isArray(abi)) return\n abi.filter(({ type }) => type === 'error').forEach((obj) => {\n const frag = ErrorFragment.from(obj)\n errors.full.add(frag.format(FormatTypes.full))\n errors.selector[Interface.getSighash(frag)] = frag.format(FormatTypes.sighash)\n })\n } finally {\n semaphore.release()\n }\n}\n\n/**\n * Populates error information from ABI files in parallel.\n * @param callerRootFile - The root file path to start the search for ABI files.\n * @param destPath - The path relative to root to copy the files in.\n * @param maxConcurrent - Maximum number of concurrent file processing operations.\n */\nexport async function populateErrors(callerRootFile: string, destPath = 'src/errors', maxConcurrent = 50) {\n // Determining the directory paths from the caller's root file\n const callerRootDir = path.dirname(callerRootFile)\n const callerArtifactsPath = path.join(callerRootDir, 'artifacts')\n\n // Reading and filtering artifacts to process only JSON files\n const files = await readdir(callerArtifactsPath, { recursive: true })\n const jsonFiles = files.filter((file) => path.extname(file).toLowerCase() === '.json')\n\n const semaphore = new Semaphore(maxConcurrent)\n const errors = { full: new Set<string>(), selector: {} }\n\n // Parallel processing of files using the semaphore for concurrency control\n const filePromises = jsonFiles.map((file) =>\n parallelProcess(path.join(callerArtifactsPath, file), semaphore, errors)\n )\n await Promise.all(filePromises)\n\n // Sorting and organizing errors for output\n const { full, selector } = errors\n let sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: true })\n const sortedErrors = [...full].sort(sorter.compare)\n sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: false })\n const sortedSelectors = Object.fromEntries(Object.entries(selector).sort(([a], [b]) => sorter.compare(a, b)))\n const errorDir = path.join(callerRootDir, destPath)\n // Writing errors to files in the error directory\n await mkdir(errorDir, { recursive: true })\n await writeFile(path.join(errorDir, 'errors.json'), JSON.stringify(sortedErrors, null, 2))\n console.log(`Generated abi for ${sortedErrors.length} errors`)\n await writeFile(path.join(errorDir, 'errorSelectors.json'), JSON.stringify(sortedSelectors, null, 2))\n}\n","/**\n * Semaphore class for controlling access to a resource by multiple processes.\n * It maintains a counter and a queue for managing access.\n */\nexport class Semaphore {\n private counter = 0\n private queue: (() => void)[] = []\n constructor(private max: number) {}\n\n /**\n * Acquires a lock on the semaphore. If the semaphore is at its maximum,\n * the function will wait until it can acquire the lock.\n * @returns A promise that resolves when the lock has been acquired.\n */\n public async acquire(): Promise<void> {\n if (this.counter >= this.max) {\n await new Promise<void>((resolve) => this.queue.push(resolve))\n }\n this.counter++\n }\n\n /**\n * Releases a lock on the semaphore.\n */\n public release(): void {\n if (this.counter == 0) return\n this.counter--\n const resolve = this.queue.shift() ?? (() => null)\n resolve()\n }\n}\n"]}
1
+ {"version":3,"sources":["../src/populate.ts","../src/errors.ts","../src/semaphore.ts","../src/copyFiles.ts"],"names":["mkdir","path","copyFile","createRequire","glob"],"mappings":";AAAA,SAAS,UAAU,aAAa;AAChC,SAAS,qBAAqB;AAC9B,OAAO,UAAU;AAEjB,SAAS,YAAY;AAMrB,eAAsB,SAAS,aAA0B,gBAAwB;AAC7E,QAAM,gBAAgB,KAAK,QAAQ,cAAc;AACjD,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,WAAW,GAAG;AAC9D,UAAM,SAAS,KAAK,QAAQ,cAAc,cAAc,EAAE,QAAQ,GAAG,UAAU,eAAe,CAAC;AAC/F,eAAW,CAAC,UAAU,QAAQ,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACzD,YAAM,QAAQ,MAAM,KAAK,SAAS,IAAI,CAAC,MAAM,KAAK,KAAK,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC5E,UAAI,QAAQ;AACZ,iBAAW,QAAQ,OAAO;AACtB,cAAM,eAAe,KAAK,SAAS,QAAQ,IAAI;AAC/C,cAAM,WAAW,KAAK,KAAK,eAAe,YAAY;AACtD,cAAM,MAAM,KAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,cAAM,SAAS,MAAM,QAAQ;AAC7B;AAAA,MACJ;AACA,cAAQ,IAAI,UAAU,KAAK,IAAI,QAAQ,SAAS,UAAU,EAAE;AAAA,IAChE;AAAA,EACJ;AACJ;;;AC3BA,SAAS,SAAAA,QAAO,UAAU,SAAS,iBAAiB;AACpD,OAAOC,WAAU;AAEjB,SAAS,aAAa;;;ACCf,IAAM,YAAN,MAAgB;AAAA,EAGnB,YAAoB,KAAa;AAAb;AAFpB,SAAQ,UAAU;AAClB,SAAQ,QAAwB,CAAC;AAAA,EACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOlC,MAAa,UAAyB;AAClC,QAAI,KAAK,WAAW,KAAK,KAAK;AAC1B,YAAM,IAAI,QAAc,CAAC,YAAY,KAAK,MAAM,KAAK,OAAO,CAAC;AAAA,IACjE;AACA,SAAK;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKO,UAAgB;AACnB,QAAI,KAAK,WAAW;AAAG;AACvB,SAAK;AACL,UAAM,UAAU,KAAK,MAAM,MAAM,MAAM,MAAM;AAC7C,YAAQ;AAAA,EACZ;AACJ;;;ADvBA,IAAM,EAAE,eAAe,aAAa,UAAU,IAAI;AAOlD,eAAe,aAAa,UAAkB;AAC1C,QAAM,OAAO,MAAM,SAAS,UAAU,EAAE,UAAU,OAAO,CAAC;AAC1D,SAAO,KAAK,MAAM,IAAI;AAC1B;AAQA,eAAe,gBACX,UACA,WACA,QACF;AACE,QAAM,UAAU,QAAQ;AACxB,MAAI;AACA,UAAM,EAAE,IAAI,IAAI,MAAM,aAAa,QAAQ;AAC3C,QAAI,CAAC,OAAO,CAAC,MAAM,QAAQ,GAAG;AAAG;AACjC,QAAI,OAAO,CAAC,EAAE,KAAK,MAAM,SAAS,OAAO,EAAE,QAAQ,CAAC,QAAQ;AACxD,YAAM,OAAO,cAAc,KAAK,GAAG;AACnC,aAAO,KAAK,IAAI,KAAK,OAAO,YAAY,IAAI,CAAC;AAC7C,aAAO,SAAS,UAAU,WAAW,IAAI,CAAC,IAAI,KAAK,OAAO,YAAY,OAAO;AAAA,IACjF,CAAC;AAAA,EACL,UAAE;AACE,cAAU,QAAQ;AAAA,EACtB;AACJ;AAQA,eAAsB,eAAe,gBAAwB,WAAW,cAAc,gBAAgB,IAAI;AAEtG,QAAM,gBAAgBA,MAAK,QAAQ,cAAc;AACjD,QAAM,sBAAsBA,MAAK,KAAK,eAAe,WAAW;AAGhE,QAAM,QAAQ,MAAM,QAAQ,qBAAqB,EAAE,WAAW,KAAK,CAAC;AACpE,QAAM,YAAY,MAAM,OAAO,CAAC,SAASA,MAAK,QAAQ,IAAI,EAAE,YAAY,MAAM,OAAO;AAErF,QAAM,YAAY,IAAI,UAAU,aAAa;AAC7C,QAAM,SAAS,EAAE,MAAM,oBAAI,IAAY,GAAG,UAAU,CAAC,EAAE;AAGvD,QAAM,eAAe,UAAU;AAAA,IAAI,CAAC,SAChC,gBAAgBA,MAAK,KAAK,qBAAqB,IAAI,GAAG,WAAW,MAAM;AAAA,EAC3E;AACA,QAAM,QAAQ,IAAI,YAAY;AAG9B,QAAM,EAAE,MAAM,SAAS,IAAI;AAC3B,MAAI,SAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,KAAK,CAAC;AAC1E,QAAM,eAAe,CAAC,GAAG,IAAI,EAAE,KAAK,OAAO,OAAO;AAClD,WAAS,IAAI,KAAK,SAAS,MAAM,EAAE,WAAW,SAAS,SAAS,MAAM,CAAC;AACvE,QAAM,kBAAkB,OAAO,YAAY,OAAO,QAAQ,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,OAAO,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC5G,QAAM,WAAWA,MAAK,KAAK,eAAe,QAAQ;AAElD,QAAMD,OAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AACzC,QAAM,UAAUC,MAAK,KAAK,UAAU,aAAa,GAAG,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AACzF,UAAQ,IAAI,qBAAqB,aAAa,MAAM,SAAS;AAC7D,QAAM,UAAUA,MAAK,KAAK,UAAU,qBAAqB,GAAG,KAAK,UAAU,iBAAiB,MAAM,CAAC,CAAC;AACxG;;;AEhFA,SAAS,YAAAC,WAAU,SAAAF,cAAa;AAChC,SAAS,iBAAAG,sBAAqB;AAC9B,OAAOF,WAAU;AAEjB,SAAS,QAAAG,aAAY;AAiBd,IAAM,aAAa,CAAC,SAAuD;AAE9E,MAAI,OAAO,SAAS;AAAU,WAAO,CAAC,IAAI;AAE1C,QAAM,QAAkB,CAAC;AAEzB,MAAI,MAAM,QAAQ,IAAI,GAAG;AACrB,eAAW,YAAY,MAAM;AACzB,YAAM,KAAK,GAAG,WAAW,QAAQ,CAAC;AAAA,IACtC;AAAA,EACJ,OAAO;AAEH,eAAW,CAAC,QAAQ,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAM,WAAW,WAAW,KAAK;AACjC,eAAS,QAAQ,CAACH,UAAS,MAAM,KAAK,GAAG,MAAM,IAAIA,KAAI,EAAE,CAAC;AAAA,IAC9D;AAAA,EACJ;AACA,SAAO;AACX;AAWA,eAAsB,iBAAiB,cAA4B,gBAAwB;AACvF,QAAM,gBAAgBA,MAAK,QAAQ,cAAc;AACjD,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,YAAY,GAAG;AAC/D,UAAM,WAAW,WAAW,QAAQ;AACpC,UAAM,SAASA,MAAK,QAAQE,eAAc,cAAc,EAAE,QAAQ,GAAG,UAAU,eAAe,CAAC;AAC/F,QAAI,QAAQ;AACZ,eAAW,cAAc,UAAU;AAC/B,YAAM,QAAQ,MAAMC,MAAKH,MAAK,KAAK,QAAQ,UAAU,CAAC;AACtD,iBAAW,QAAQ,OAAO;AACtB,cAAM,eAAeA,MAAK,SAAS,QAAQ,IAAI;AAC/C,cAAM,WAAWA,MAAK,KAAK,eAAe,YAAY;AACtD,cAAMD,OAAMC,MAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACvD,cAAMC,UAAS,MAAM,QAAQ;AAC7B;AAAA,MACJ;AAAA,IACJ;AACA,YAAQ,IAAI,UAAU,KAAK,eAAe,UAAU,EAAE;AAAA,EAC1D;AACJ","sourcesContent":["import { copyFile, mkdir } from 'fs/promises'\nimport { createRequire } from 'module'\nimport path from 'path'\n\nimport { glob } from 'glob'\n\ntype AllowedFiles = 'artifacts' | 'deployments'\nexport type SrcFiles = { [key in AllowedFiles]?: readonly string[] }\nexport type CopyTargets = Record<string, SrcFiles>\n\nexport async function populate(copyTargets: CopyTargets, callerRootFile: string) {\n const callerRootDir = path.dirname(callerRootFile)\n for (const [srcPackage, srcFiles] of Object.entries(copyTargets)) {\n const srcDir = path.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`))\n for (const [filePath, patterns] of Object.entries(srcFiles)) {\n const files = await glob(patterns.map((p) => path.join(srcDir, filePath, p)))\n let count = 0\n for (const file of files) {\n const relativePath = path.relative(srcDir, file)\n const destPath = path.join(callerRootDir, relativePath)\n await mkdir(path.dirname(destPath), { recursive: true })\n await copyFile(file, destPath)\n count++\n }\n console.log(`Copied ${count} ${filePath} from ${srcPackage}`)\n }\n }\n}\n","import { mkdir, readFile, readdir, writeFile } from 'fs/promises'\nimport path from 'path'\n\nimport { utils } from 'ethers'\n\nimport { Semaphore } from './semaphore'\n\nconst { ErrorFragment, FormatTypes, Interface } = utils\n\n/**\n * Asynchronously reads a JSON file from a given file path.\n * @param filePath - The path to the JSON file.\n * @returns A promise that resolves to the parsed JSON data.\n */\nasync function readJSONFile(filePath: string) {\n const data = await readFile(filePath, { encoding: 'utf8' })\n return JSON.parse(data)\n}\n\n/**\n * Processes ABI errors in parallel, controlled by a semaphore, and populates error collections.\n * @param filePath - The file path to read.\n * @param semaphore - Semaphore instance for concurrency control.\n * @param errors - Object to collect full errors and error selectors.\n */\nasync function parallelProcess(\n filePath: string,\n semaphore: Semaphore,\n errors: { full: Set<string>; selector: Record<string, string> }\n) {\n await semaphore.acquire()\n try {\n const { abi } = await readJSONFile(filePath)\n if (!abi || !Array.isArray(abi)) return\n abi.filter(({ type }) => type === 'error').forEach((obj) => {\n const frag = ErrorFragment.from(obj)\n errors.full.add(frag.format(FormatTypes.full))\n errors.selector[Interface.getSighash(frag)] = frag.format(FormatTypes.sighash)\n })\n } finally {\n semaphore.release()\n }\n}\n\n/**\n * Populates error information from ABI files in parallel.\n * @param callerRootFile - The root file path to start the search for ABI files.\n * @param destPath - The path relative to root to copy the files in.\n * @param maxConcurrent - Maximum number of concurrent file processing operations.\n */\nexport async function populateErrors(callerRootFile: string, destPath = 'src/errors', maxConcurrent = 50) {\n // Determining the directory paths from the caller's root file\n const callerRootDir = path.dirname(callerRootFile)\n const callerArtifactsPath = path.join(callerRootDir, 'artifacts')\n\n // Reading and filtering artifacts to process only JSON files\n const files = await readdir(callerArtifactsPath, { recursive: true })\n const jsonFiles = files.filter((file) => path.extname(file).toLowerCase() === '.json')\n\n const semaphore = new Semaphore(maxConcurrent)\n const errors = { full: new Set<string>(), selector: {} }\n\n // Parallel processing of files using the semaphore for concurrency control\n const filePromises = jsonFiles.map((file) =>\n parallelProcess(path.join(callerArtifactsPath, file), semaphore, errors)\n )\n await Promise.all(filePromises)\n\n // Sorting and organizing errors for output\n const { full, selector } = errors\n let sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: true })\n const sortedErrors = [...full].sort(sorter.compare)\n sorter = new Intl.Collator('en', { caseFirst: 'upper', numeric: false })\n const sortedSelectors = Object.fromEntries(Object.entries(selector).sort(([a], [b]) => sorter.compare(a, b)))\n const errorDir = path.join(callerRootDir, destPath)\n // Writing errors to files in the error directory\n await mkdir(errorDir, { recursive: true })\n await writeFile(path.join(errorDir, 'errors.json'), JSON.stringify(sortedErrors, null, 2))\n console.log(`Generated abi for ${sortedErrors.length} errors`)\n await writeFile(path.join(errorDir, 'errorSelectors.json'), JSON.stringify(sortedSelectors, null, 2))\n}\n","/**\n * Semaphore class for controlling access to a resource by multiple processes.\n * It maintains a counter and a queue for managing access.\n */\nexport class Semaphore {\n private counter = 0\n private queue: (() => void)[] = []\n constructor(private max: number) {}\n\n /**\n * Acquires a lock on the semaphore. If the semaphore is at its maximum,\n * the function will wait until it can acquire the lock.\n * @returns A promise that resolves when the lock has been acquired.\n */\n public async acquire(): Promise<void> {\n if (this.counter >= this.max) {\n await new Promise<void>((resolve) => this.queue.push(resolve))\n }\n this.counter++\n }\n\n /**\n * Releases a lock on the semaphore.\n */\n public release(): void {\n if (this.counter == 0) return\n this.counter--\n const resolve = this.queue.shift() ?? (() => null)\n resolve()\n }\n}\n","import { copyFile, mkdir } from 'fs/promises'\nimport { createRequire } from 'module'\nimport path from 'path'\n\nimport { glob } from 'glob'\n\ntype Package = string\ntype File = string\ntype Directory = Array<File | NestedDirectory>\ninterface NestedDirectory {\n [key: string]: Directory | NestedDirectory\n}\nexport interface PackageFiles {\n [key: Package]: NestedDirectory | Directory\n}\n\n/**\n * Recursively traverses a directory structure to build paths to leaf nodes.\n * @param node - A directory structure represented as a string (for files), an array (for directories), or an object (for nested directories).\n * @returns An array of strings, each representing a path from the root to a leaf node.\n */\nexport const buildPaths = (node: NestedDirectory | Directory | File): string[] => {\n // base case, we're at a leaf node, return the filename\n if (typeof node === 'string') return [node]\n\n const paths: string[] = []\n // If the current node is an array, recursively process each file / nested dir\n if (Array.isArray(node)) {\n for (const contract of node) {\n paths.push(...buildPaths(contract))\n }\n } else {\n // Current node is a nested directory. Recursively process each entry.\n for (const [parent, child] of Object.entries(node)) {\n const children = buildPaths(child)\n children.forEach((path) => paths.push(`${parent}/${path}`))\n }\n }\n return paths\n}\n\n/**\n * Asynchronously copies files from source packages to a target root directory based on the provided directory structure.\n * This function iterates over each source package, resolves all file paths,\n * and copies each file to the corresponding location within the caller's root directory.\n *\n * @param packageFiles - An object mapping package names to their directory structures,\n * where each directory structure defines the files and nested directories to be copied.\n * @param callerRootFile - The absolute path to the root file of the caller, used to determine the root directory into which the files will be copied.\n */\nexport async function copyPackageFiles(packageFiles: PackageFiles, callerRootFile: string) {\n const callerRootDir = path.dirname(callerRootFile)\n for (const [srcPackage, srcFiles] of Object.entries(packageFiles)) {\n const allPaths = buildPaths(srcFiles)\n const srcDir = path.dirname(createRequire(callerRootFile).resolve(`${srcPackage}/package.json`))\n let count = 0\n for (const uniquePath of allPaths) {\n const files = await glob(path.join(srcDir, uniquePath))\n for (const file of files) {\n const relativePath = path.relative(srcDir, file)\n const destPath = path.join(callerRootDir, relativePath)\n await mkdir(path.dirname(destPath), { recursive: true })\n await copyFile(file, destPath)\n count++\n }\n }\n console.log(`Copied ${count} files from ${srcPackage}`)\n }\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@layerzerolabs/evm-sdks-build",
3
- "version": "2.1.3",
3
+ "version": "2.1.4",
4
4
  "license": "BUSL-1.1",
5
5
  "exports": {
6
6
  "types": "./dist/index.d.ts",
@@ -16,18 +16,22 @@
16
16
  "scripts": {
17
17
  "build": "$npm_execpath clean-prebuild && $npm_execpath tsup",
18
18
  "clean": "$npm_execpath clean-prebuild && rimraf .turbo",
19
- "clean-prebuild": "rimraf dist"
19
+ "clean-prebuild": "rimraf dist",
20
+ "test": "jest"
20
21
  },
21
22
  "dependencies": {
22
23
  "ethers": "^5.7.2",
23
24
  "glob": "^10.3.10"
24
25
  },
25
26
  "devDependencies": {
26
- "@layerzerolabs/tsup-config-next": "^2.1.3",
27
- "@layerzerolabs/typescript-config-next": "^2.1.3",
27
+ "@layerzerolabs/tsup-config-next": "^2.1.4",
28
+ "@layerzerolabs/typescript-config-next": "^2.1.4",
28
29
  "@types/glob": "^8.1.0",
30
+ "@types/jest": "^29.5.10",
29
31
  "@types/node": "^20.10.5",
32
+ "jest": "^29.7.0",
30
33
  "rimraf": "^5.0.5",
34
+ "ts-jest": "^29.1.1",
31
35
  "tsup": "^8.0.1",
32
36
  "typescript": "~5.2.2"
33
37
  },