@xyo-network/bip39 2.110.10 → 2.110.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/index.cjs +11 -36
- package/dist/browser/index.cjs.map +1 -1
- package/dist/browser/index.d.cts.map +1 -1
- package/dist/browser/index.d.mts.map +1 -1
- package/dist/browser/index.d.ts.map +1 -1
- package/dist/browser/index.js +11 -38
- package/dist/browser/index.js.map +1 -1
- package/dist/browser/wordlists/index.d.cts.map +1 -1
- package/dist/browser/wordlists/index.d.mts.map +1 -1
- package/dist/browser/wordlists/index.d.ts.map +1 -1
- package/dist/neutral/index.cjs +11 -36
- package/dist/neutral/index.cjs.map +1 -1
- package/dist/neutral/index.d.cts.map +1 -1
- package/dist/neutral/index.d.mts.map +1 -1
- package/dist/neutral/index.d.ts.map +1 -1
- package/dist/neutral/index.js +11 -38
- package/dist/neutral/index.js.map +1 -1
- package/dist/neutral/wordlists/index.d.cts.map +1 -1
- package/dist/neutral/wordlists/index.d.mts.map +1 -1
- package/dist/neutral/wordlists/index.d.ts.map +1 -1
- package/dist/node/index.cjs +11 -36
- package/dist/node/index.cjs.map +1 -1
- package/dist/node/index.d.cts.map +1 -1
- package/dist/node/index.d.mts.map +1 -1
- package/dist/node/index.d.ts.map +1 -1
- package/dist/node/index.js +11 -38
- package/dist/node/index.js.map +1 -1
- package/dist/node/wordlists/index.d.cts.map +1 -1
- package/dist/node/wordlists/index.d.mts.map +1 -1
- package/dist/node/wordlists/index.d.ts.map +1 -1
- package/package.json +3 -3
- package/src/index.ts +0 -1
- package/src/wordlists/index.ts +0 -1
package/dist/browser/index.cjs
CHANGED
|
@@ -5,7 +5,6 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
5
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
6
|
var __getProtoOf = Object.getPrototypeOf;
|
|
7
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
9
8
|
var __export = (target, all) => {
|
|
10
9
|
for (var name in all)
|
|
11
10
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -67,73 +66,49 @@ var wordlists = {
|
|
|
67
66
|
};
|
|
68
67
|
|
|
69
68
|
// src/index.ts
|
|
70
|
-
var isJapanese =
|
|
69
|
+
var isJapanese = (wordlist) => wordlist[0] === "\u3042\u3044\u3053\u304F\u3057\u3093";
|
|
71
70
|
function nfkd(str) {
|
|
72
71
|
if (typeof str !== "string") throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
|
73
72
|
return str.normalize("NFKD");
|
|
74
73
|
}
|
|
75
|
-
__name(nfkd, "nfkd");
|
|
76
74
|
function normalize(str) {
|
|
77
75
|
const norm = nfkd(str);
|
|
78
76
|
const words = norm.split(" ");
|
|
79
|
-
if (![
|
|
80
|
-
|
|
81
|
-
15,
|
|
82
|
-
18,
|
|
83
|
-
21,
|
|
84
|
-
24
|
|
85
|
-
].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
86
|
-
return {
|
|
87
|
-
nfkd: norm,
|
|
88
|
-
words
|
|
89
|
-
};
|
|
77
|
+
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
78
|
+
return { nfkd: norm, words };
|
|
90
79
|
}
|
|
91
|
-
__name(normalize, "normalize");
|
|
92
80
|
function assertEntropy(entropy) {
|
|
93
81
|
import_assert.default.bytes(entropy, 16, 20, 24, 28, 32);
|
|
94
82
|
}
|
|
95
|
-
|
|
96
|
-
var calcChecksum = /* @__PURE__ */ __name((entropy) => {
|
|
83
|
+
var calcChecksum = (entropy) => {
|
|
97
84
|
const bitsLeft = 8 - entropy.length / 4;
|
|
98
|
-
return new Uint8Array([
|
|
99
|
-
|
|
100
|
-
]);
|
|
101
|
-
}, "calcChecksum");
|
|
85
|
+
return new Uint8Array([(0, import_sha256.sha256)(entropy)[0] >> bitsLeft << bitsLeft]);
|
|
86
|
+
};
|
|
102
87
|
function getCoder(wordlist) {
|
|
103
|
-
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
88
|
+
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
89
|
+
throw new Error("Worlist: expected array of 2048 strings");
|
|
104
90
|
for (const i of wordlist) {
|
|
105
91
|
if (typeof i !== "string") throw new Error(`Wordlist: non-string element: ${i}`);
|
|
106
92
|
}
|
|
107
93
|
return import_base.utils.chain(import_base.utils.checksum(1, calcChecksum), import_base.utils.radix2(11, true), import_base.utils.alphabet(wordlist));
|
|
108
94
|
}
|
|
109
|
-
__name(getCoder, "getCoder");
|
|
110
95
|
function mnemonicToEntropy(mnemonic, wordlist) {
|
|
111
96
|
const { words } = normalize(mnemonic);
|
|
112
97
|
const entropy = getCoder(wordlist).decode(words);
|
|
113
98
|
assertEntropy(entropy);
|
|
114
99
|
return entropy;
|
|
115
100
|
}
|
|
116
|
-
__name(mnemonicToEntropy, "mnemonicToEntropy");
|
|
117
101
|
function entropyToMnemonic(entropy, wordlist) {
|
|
118
102
|
assertEntropy(entropy);
|
|
119
103
|
const words = getCoder(wordlist).encode(entropy);
|
|
120
104
|
return words.join(isJapanese(wordlist) ? "\u3000" : " ");
|
|
121
105
|
}
|
|
122
|
-
|
|
123
|
-
var salt = /* @__PURE__ */ __name((passphrase) => nfkd(`mnemonic${passphrase}`), "salt");
|
|
106
|
+
var salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
|
124
107
|
function mnemonicToSeed(mnemonic, passphrase = "") {
|
|
125
|
-
return (0, import_pbkdf2.pbkdf2Async)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
126
|
-
c: 2048,
|
|
127
|
-
dkLen: 64
|
|
128
|
-
});
|
|
108
|
+
return (0, import_pbkdf2.pbkdf2Async)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
129
109
|
}
|
|
130
|
-
__name(mnemonicToSeed, "mnemonicToSeed");
|
|
131
110
|
function mnemonicToSeedSync(mnemonic, passphrase = "") {
|
|
132
|
-
return (0, import_pbkdf2.pbkdf2)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
133
|
-
c: 2048,
|
|
134
|
-
dkLen: 64
|
|
135
|
-
});
|
|
111
|
+
return (0, import_pbkdf2.pbkdf2)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
136
112
|
}
|
|
137
|
-
__name(mnemonicToSeedSync, "mnemonicToSeedSync");
|
|
138
113
|
/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */
|
|
139
114
|
//# sourceMappingURL=index.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */\nimport assert from '@noble/hashes/_assert'\nimport { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2'\nimport { sha256 } from '@noble/hashes/sha256'\nimport { sha512 } from '@noble/hashes/sha512'\nimport { utils as baseUtils } from '@scure/base'\n\nexport * from './wordlists/index.js'\n\n// Japanese wordlist\nconst isJapanese = (wordlist: string[]) => wordlist[0] === '\\u3042\\u3044\\u3053\\u304F\\u3057\\u3093'\n\n// Normalization replaces equivalent sequences of characters\n// so that any two texts that are equivalent will be reduced\n// to the same sequence of code points, called the normal form of the original text.\nfunction nfkd(str: string) {\n if (typeof str !== 'string') throw new TypeError(`Invalid mnemonic type: ${typeof str}`)\n return str.normalize('NFKD')\n}\n\nfunction normalize(str: string) {\n const norm = nfkd(str)\n const words = norm.split(' ')\n if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic')\n return { nfkd: norm, words }\n}\n\nfunction assertEntropy(entropy: Uint8Array) {\n assert.bytes(entropy, 16, 20, 24, 28, 32)\n}\n\nconst calcChecksum = (entropy: Uint8Array) => {\n // Checksum is ent.length/4 bits long\n const bitsLeft = 8 - entropy.length / 4\n // Zero rightmost \"bitsLeft\" bits in byte\n // For example: bitsLeft=4 val=10111101 -> 10110000\n return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft])\n}\n\nfunction getCoder(wordlist: string[]) {\n if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')\n throw new Error('Worlist: expected array of 2048 strings')\n for (const i of wordlist) {\n if (typeof i !== 'string') throw new Error(`Wordlist: non-string element: ${i}`)\n }\n return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist))\n}\n\n/**\n * Reversible: Converts mnemonic string to raw entropy in form of byte array.\n * @param mnemonic 12-24 words\n * @param wordlist imported wordlist for specific language\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToEntropy(mnem, wordlist)\n * // Produces\n * new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {\n const { words } = normalize(mnemonic)\n const entropy = getCoder(wordlist).decode(words)\n assertEntropy(entropy)\n return entropy\n}\n\n/**\n * Reversible: Converts raw entropy in form of byte array to mnemonic string.\n * @param entropy byte array\n * @param wordlist imported wordlist for specific language\n * @returns 12-24 words\n * @example\n * const ent = new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ]);\n * entropyToMnemonic(ent, wordlist);\n * // 'legal winner thank year wave sausage worth useful legal winner thank yellow'\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {\n assertEntropy(entropy)\n const words = getCoder(wordlist).encode(entropy)\n return words.join(isJapanese(wordlist) ? '\\u3000' : ' ')\n}\n\nconst salt = (passphrase: string) => nfkd(`mnemonic${passphrase}`)\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * await mnemonicToSeed(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeed(mnemonic: string, passphrase = '') {\n return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToSeedSync(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeedSync(mnemonic: string, passphrase = '') {\n return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n","import czech from '@scure/bip39/wordlists/czech'\nimport english from '@scure/bip39/wordlists/english'\nimport french from '@scure/bip39/wordlists/french'\nimport italian from '@scure/bip39/wordlists/italian'\nimport japanese from '@scure/bip39/wordlists/japanese'\nimport korean from '@scure/bip39/wordlists/korean'\nimport simplifiedChinese from '@scure/bip39/wordlists/simplified-chinese'\nimport spanish from '@scure/bip39/wordlists/spanish'\nimport traditionalChinese from '@scure/bip39/wordlists/traditional-chinese'\n\n/** @deprecated use @scure/bip39 instead */\nexport const wordlists = {\n czech,\n english,\n french,\n italian,\n japanese,\n korean,\n simplifiedChinese,\n spanish,\n traditionalChinese,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;AACnB,oBAAoC;AACpC,oBAAuB;AACvB,oBAAuB;AACvB,kBAAmC;;;ACLnC,mBAAkB;AAClB,qBAAoB;AACpB,oBAAmB;AACnB,qBAAoB;AACpB,sBAAqB;AACrB,oBAAmB;AACnB,gCAA8B;AAC9B,qBAAoB;AACpB,iCAA+B;AAGxB,IAAM,YAAY;AAAA,EACvB,oBAAAA;AAAA,EACA,wBAAAC;AAAA,EACA,sBAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,0BAAAC;AAAA,EACA,sBAAAC;AAAA,EACA,6CAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,+CAAAC;AACF;;;ADXA,IAAM,aAAa,CAAC,aAAuB,SAAS,CAAC,MAAM;AAK3D,SAAS,KAAK,KAAa;AACzB,MAAI,OAAO,QAAQ,SAAU,OAAM,IAAI,UAAU,0BAA0B,OAAO,GAAG,EAAE;AACvF,SAAO,IAAI,UAAU,MAAM;AAC7B;AAEA,SAAS,UAAU,KAAa;AAC9B,QAAM,OAAO,KAAK,GAAG;AACrB,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,CAAC,CAAC,IAAI,IAAI,IAAI,IAAI,EAAE,EAAE,SAAS,MAAM,MAAM,EAAG,OAAM,IAAI,MAAM,kBAAkB;AACpF,SAAO,EAAE,MAAM,MAAM,MAAM;AAC7B;AAEA,SAAS,cAAc,SAAqB;AAC1C,gBAAAC,QAAO,MAAM,SAAS,IAAI,IAAI,IAAI,IAAI,EAAE;AAC1C;AAEA,IAAM,eAAe,CAAC,YAAwB;AAE5C,QAAM,WAAW,IAAI,QAAQ,SAAS;AAGtC,SAAO,IAAI,WAAW,KAAE,sBAAO,OAAO,EAAE,CAAC,KAAK,YAAa,QAAQ,CAAC;AACtE;AAEA,SAAS,SAAS,UAAoB;AACpC,MAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,QAAQ,OAAO,SAAS,CAAC,MAAM;AACjF,UAAM,IAAI,MAAM,yCAAyC;AAC3D,aAAW,KAAK,UAAU;AACxB,QAAI,OAAO,MAAM,SAAU,OAAM,IAAI,MAAM,iCAAiC,CAAC,EAAE;AAAA,EACjF;AACA,SAAO,YAAAC,MAAU,MAAM,YAAAA,MAAU,SAAS,GAAG,YAAY,GAAG,YAAAA,MAAU,OAAO,IAAI,IAAI,GAAG,YAAAA,MAAU,SAAS,QAAQ,CAAC;AACtH;AAiBO,SAAS,kBAAkB,UAAkB,UAAgC;AAClF,QAAM,EAAE,MAAM,IAAI,UAAU,QAAQ;AACpC,QAAM,UAAU,SAAS,QAAQ,EAAE,OAAO,KAAK;AAC/C,gBAAc,OAAO;AACrB,SAAO;AACT;AAiBO,SAAS,kBAAkB,SAAqB,UAA4B;AACjF,gBAAc,OAAO;AACrB,QAAM,QAAQ,SAAS,QAAQ,EAAE,OAAO,OAAO;AAC/C,SAAO,MAAM,KAAK,WAAW,QAAQ,IAAI,WAAW,GAAG;AACzD;AAEA,IAAM,OAAO,CAAC,eAAuB,KAAK,WAAW,UAAU,EAAE;AAc1D,SAAS,eAAe,UAAkB,aAAa,IAAI;AAChE,aAAO,2BAAY,sBAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC/F;AAcO,SAAS,mBAAmB,UAAkB,aAAa,IAAI;AACpE,aAAO,sBAAO,sBAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC1F;","names":["czech","english","french","italian","japanese","korean","simplifiedChinese","spanish","traditionalChinese","assert","baseUtils"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
package/dist/browser/index.js
CHANGED
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
var __defProp = Object.defineProperty;
|
|
2
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
3
|
-
|
|
4
1
|
// src/index.ts
|
|
5
2
|
import assert from "@noble/hashes/_assert";
|
|
6
3
|
import { pbkdf2, pbkdf2Async } from "@noble/hashes/pbkdf2";
|
|
@@ -31,74 +28,50 @@ var wordlists = {
|
|
|
31
28
|
};
|
|
32
29
|
|
|
33
30
|
// src/index.ts
|
|
34
|
-
var isJapanese =
|
|
31
|
+
var isJapanese = (wordlist) => wordlist[0] === "\u3042\u3044\u3053\u304F\u3057\u3093";
|
|
35
32
|
function nfkd(str) {
|
|
36
33
|
if (typeof str !== "string") throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
|
37
34
|
return str.normalize("NFKD");
|
|
38
35
|
}
|
|
39
|
-
__name(nfkd, "nfkd");
|
|
40
36
|
function normalize(str) {
|
|
41
37
|
const norm = nfkd(str);
|
|
42
38
|
const words = norm.split(" ");
|
|
43
|
-
if (![
|
|
44
|
-
|
|
45
|
-
15,
|
|
46
|
-
18,
|
|
47
|
-
21,
|
|
48
|
-
24
|
|
49
|
-
].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
50
|
-
return {
|
|
51
|
-
nfkd: norm,
|
|
52
|
-
words
|
|
53
|
-
};
|
|
39
|
+
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
40
|
+
return { nfkd: norm, words };
|
|
54
41
|
}
|
|
55
|
-
__name(normalize, "normalize");
|
|
56
42
|
function assertEntropy(entropy) {
|
|
57
43
|
assert.bytes(entropy, 16, 20, 24, 28, 32);
|
|
58
44
|
}
|
|
59
|
-
|
|
60
|
-
var calcChecksum = /* @__PURE__ */ __name((entropy) => {
|
|
45
|
+
var calcChecksum = (entropy) => {
|
|
61
46
|
const bitsLeft = 8 - entropy.length / 4;
|
|
62
|
-
return new Uint8Array([
|
|
63
|
-
|
|
64
|
-
]);
|
|
65
|
-
}, "calcChecksum");
|
|
47
|
+
return new Uint8Array([sha256(entropy)[0] >> bitsLeft << bitsLeft]);
|
|
48
|
+
};
|
|
66
49
|
function getCoder(wordlist) {
|
|
67
|
-
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
50
|
+
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
51
|
+
throw new Error("Worlist: expected array of 2048 strings");
|
|
68
52
|
for (const i of wordlist) {
|
|
69
53
|
if (typeof i !== "string") throw new Error(`Wordlist: non-string element: ${i}`);
|
|
70
54
|
}
|
|
71
55
|
return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist));
|
|
72
56
|
}
|
|
73
|
-
__name(getCoder, "getCoder");
|
|
74
57
|
function mnemonicToEntropy(mnemonic, wordlist) {
|
|
75
58
|
const { words } = normalize(mnemonic);
|
|
76
59
|
const entropy = getCoder(wordlist).decode(words);
|
|
77
60
|
assertEntropy(entropy);
|
|
78
61
|
return entropy;
|
|
79
62
|
}
|
|
80
|
-
__name(mnemonicToEntropy, "mnemonicToEntropy");
|
|
81
63
|
function entropyToMnemonic(entropy, wordlist) {
|
|
82
64
|
assertEntropy(entropy);
|
|
83
65
|
const words = getCoder(wordlist).encode(entropy);
|
|
84
66
|
return words.join(isJapanese(wordlist) ? "\u3000" : " ");
|
|
85
67
|
}
|
|
86
|
-
|
|
87
|
-
var salt = /* @__PURE__ */ __name((passphrase) => nfkd(`mnemonic${passphrase}`), "salt");
|
|
68
|
+
var salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
|
88
69
|
function mnemonicToSeed(mnemonic, passphrase = "") {
|
|
89
|
-
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
90
|
-
c: 2048,
|
|
91
|
-
dkLen: 64
|
|
92
|
-
});
|
|
70
|
+
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
93
71
|
}
|
|
94
|
-
__name(mnemonicToSeed, "mnemonicToSeed");
|
|
95
72
|
function mnemonicToSeedSync(mnemonic, passphrase = "") {
|
|
96
|
-
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
97
|
-
c: 2048,
|
|
98
|
-
dkLen: 64
|
|
99
|
-
});
|
|
73
|
+
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
100
74
|
}
|
|
101
|
-
__name(mnemonicToSeedSync, "mnemonicToSeedSync");
|
|
102
75
|
export {
|
|
103
76
|
entropyToMnemonic,
|
|
104
77
|
mnemonicToEntropy,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */\nimport assert from '@noble/hashes/_assert'\nimport { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2'\nimport { sha256 } from '@noble/hashes/sha256'\nimport { sha512 } from '@noble/hashes/sha512'\nimport { utils as baseUtils } from '@scure/base'\n\nexport * from './wordlists/index.js'\n\n// Japanese wordlist\nconst isJapanese = (wordlist: string[]) => wordlist[0] === '\\u3042\\u3044\\u3053\\u304F\\u3057\\u3093'\n\n// Normalization replaces equivalent sequences of characters\n// so that any two texts that are equivalent will be reduced\n// to the same sequence of code points, called the normal form of the original text.\nfunction nfkd(str: string) {\n if (typeof str !== 'string') throw new TypeError(`Invalid mnemonic type: ${typeof str}`)\n return str.normalize('NFKD')\n}\n\nfunction normalize(str: string) {\n const norm = nfkd(str)\n const words = norm.split(' ')\n if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic')\n return { nfkd: norm, words }\n}\n\nfunction assertEntropy(entropy: Uint8Array) {\n assert.bytes(entropy, 16, 20, 24, 28, 32)\n}\n\nconst calcChecksum = (entropy: Uint8Array) => {\n // Checksum is ent.length/4 bits long\n const bitsLeft = 8 - entropy.length / 4\n // Zero rightmost \"bitsLeft\" bits in byte\n // For example: bitsLeft=4 val=10111101 -> 10110000\n return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft])\n}\n\nfunction getCoder(wordlist: string[]) {\n if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')\n throw new Error('Worlist: expected array of 2048 strings')\n for (const i of wordlist) {\n if (typeof i !== 'string') throw new Error(`Wordlist: non-string element: ${i}`)\n }\n return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist))\n}\n\n/**\n * Reversible: Converts mnemonic string to raw entropy in form of byte array.\n * @param mnemonic 12-24 words\n * @param wordlist imported wordlist for specific language\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToEntropy(mnem, wordlist)\n * // Produces\n * new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {\n const { words } = normalize(mnemonic)\n const entropy = getCoder(wordlist).decode(words)\n assertEntropy(entropy)\n return entropy\n}\n\n/**\n * Reversible: Converts raw entropy in form of byte array to mnemonic string.\n * @param entropy byte array\n * @param wordlist imported wordlist for specific language\n * @returns 12-24 words\n * @example\n * const ent = new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ]);\n * entropyToMnemonic(ent, wordlist);\n * // 'legal winner thank year wave sausage worth useful legal winner thank yellow'\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {\n assertEntropy(entropy)\n const words = getCoder(wordlist).encode(entropy)\n return words.join(isJapanese(wordlist) ? '\\u3000' : ' ')\n}\n\nconst salt = (passphrase: string) => nfkd(`mnemonic${passphrase}`)\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * await mnemonicToSeed(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeed(mnemonic: string, passphrase = '') {\n return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToSeedSync(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeedSync(mnemonic: string, passphrase = '') {\n return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n","import czech from '@scure/bip39/wordlists/czech'\nimport english from '@scure/bip39/wordlists/english'\nimport french from '@scure/bip39/wordlists/french'\nimport italian from '@scure/bip39/wordlists/italian'\nimport japanese from '@scure/bip39/wordlists/japanese'\nimport korean from '@scure/bip39/wordlists/korean'\nimport simplifiedChinese from '@scure/bip39/wordlists/simplified-chinese'\nimport spanish from '@scure/bip39/wordlists/spanish'\nimport traditionalChinese from '@scure/bip39/wordlists/traditional-chinese'\n\n/** @deprecated use @scure/bip39 instead */\nexport const wordlists = {\n czech,\n english,\n french,\n italian,\n japanese,\n korean,\n simplifiedChinese,\n spanish,\n traditionalChinese,\n}\n"],"mappings":";AACA,OAAO,YAAY;AACnB,SAAS,QAAQ,mBAAmB;AACpC,SAAS,cAAc;AACvB,SAAS,cAAc;AACvB,SAAS,SAAS,iBAAiB;;;ACLnC,OAAO,WAAW;AAClB,OAAO,aAAa;AACpB,OAAO,YAAY;AACnB,OAAO,aAAa;AACpB,OAAO,cAAc;AACrB,OAAO,YAAY;AACnB,OAAO,uBAAuB;AAC9B,OAAO,aAAa;AACpB,OAAO,wBAAwB;AAGxB,IAAM,YAAY;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ADXA,IAAM,aAAa,CAAC,aAAuB,SAAS,CAAC,MAAM;AAK3D,SAAS,KAAK,KAAa;AACzB,MAAI,OAAO,QAAQ,SAAU,OAAM,IAAI,UAAU,0BAA0B,OAAO,GAAG,EAAE;AACvF,SAAO,IAAI,UAAU,MAAM;AAC7B;AAEA,SAAS,UAAU,KAAa;AAC9B,QAAM,OAAO,KAAK,GAAG;AACrB,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,CAAC,CAAC,IAAI,IAAI,IAAI,IAAI,EAAE,EAAE,SAAS,MAAM,MAAM,EAAG,OAAM,IAAI,MAAM,kBAAkB;AACpF,SAAO,EAAE,MAAM,MAAM,MAAM;AAC7B;AAEA,SAAS,cAAc,SAAqB;AAC1C,SAAO,MAAM,SAAS,IAAI,IAAI,IAAI,IAAI,EAAE;AAC1C;AAEA,IAAM,eAAe,CAAC,YAAwB;AAE5C,QAAM,WAAW,IAAI,QAAQ,SAAS;AAGtC,SAAO,IAAI,WAAW,CAAE,OAAO,OAAO,EAAE,CAAC,KAAK,YAAa,QAAQ,CAAC;AACtE;AAEA,SAAS,SAAS,UAAoB;AACpC,MAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,QAAQ,OAAO,SAAS,CAAC,MAAM;AACjF,UAAM,IAAI,MAAM,yCAAyC;AAC3D,aAAW,KAAK,UAAU;AACxB,QAAI,OAAO,MAAM,SAAU,OAAM,IAAI,MAAM,iCAAiC,CAAC,EAAE;AAAA,EACjF;AACA,SAAO,UAAU,MAAM,UAAU,SAAS,GAAG,YAAY,GAAG,UAAU,OAAO,IAAI,IAAI,GAAG,UAAU,SAAS,QAAQ,CAAC;AACtH;AAiBO,SAAS,kBAAkB,UAAkB,UAAgC;AAClF,QAAM,EAAE,MAAM,IAAI,UAAU,QAAQ;AACpC,QAAM,UAAU,SAAS,QAAQ,EAAE,OAAO,KAAK;AAC/C,gBAAc,OAAO;AACrB,SAAO;AACT;AAiBO,SAAS,kBAAkB,SAAqB,UAA4B;AACjF,gBAAc,OAAO;AACrB,QAAM,QAAQ,SAAS,QAAQ,EAAE,OAAO,OAAO;AAC/C,SAAO,MAAM,KAAK,WAAW,QAAQ,IAAI,WAAW,GAAG;AACzD;AAEA,IAAM,OAAO,CAAC,eAAuB,KAAK,WAAW,UAAU,EAAE;AAc1D,SAAS,eAAe,UAAkB,aAAa,IAAI;AAChE,SAAO,YAAY,QAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC/F;AAcO,SAAS,mBAAmB,UAAkB,aAAa,IAAI;AACpE,SAAO,OAAO,QAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC1F;","names":[]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
package/dist/neutral/index.cjs
CHANGED
|
@@ -5,7 +5,6 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
5
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
6
|
var __getProtoOf = Object.getPrototypeOf;
|
|
7
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
9
8
|
var __export = (target, all) => {
|
|
10
9
|
for (var name in all)
|
|
11
10
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -67,73 +66,49 @@ var wordlists = {
|
|
|
67
66
|
};
|
|
68
67
|
|
|
69
68
|
// src/index.ts
|
|
70
|
-
var isJapanese =
|
|
69
|
+
var isJapanese = (wordlist) => wordlist[0] === "\u3042\u3044\u3053\u304F\u3057\u3093";
|
|
71
70
|
function nfkd(str) {
|
|
72
71
|
if (typeof str !== "string") throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
|
73
72
|
return str.normalize("NFKD");
|
|
74
73
|
}
|
|
75
|
-
__name(nfkd, "nfkd");
|
|
76
74
|
function normalize(str) {
|
|
77
75
|
const norm = nfkd(str);
|
|
78
76
|
const words = norm.split(" ");
|
|
79
|
-
if (![
|
|
80
|
-
|
|
81
|
-
15,
|
|
82
|
-
18,
|
|
83
|
-
21,
|
|
84
|
-
24
|
|
85
|
-
].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
86
|
-
return {
|
|
87
|
-
nfkd: norm,
|
|
88
|
-
words
|
|
89
|
-
};
|
|
77
|
+
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
78
|
+
return { nfkd: norm, words };
|
|
90
79
|
}
|
|
91
|
-
__name(normalize, "normalize");
|
|
92
80
|
function assertEntropy(entropy) {
|
|
93
81
|
import_assert.default.bytes(entropy, 16, 20, 24, 28, 32);
|
|
94
82
|
}
|
|
95
|
-
|
|
96
|
-
var calcChecksum = /* @__PURE__ */ __name((entropy) => {
|
|
83
|
+
var calcChecksum = (entropy) => {
|
|
97
84
|
const bitsLeft = 8 - entropy.length / 4;
|
|
98
|
-
return new Uint8Array([
|
|
99
|
-
|
|
100
|
-
]);
|
|
101
|
-
}, "calcChecksum");
|
|
85
|
+
return new Uint8Array([(0, import_sha256.sha256)(entropy)[0] >> bitsLeft << bitsLeft]);
|
|
86
|
+
};
|
|
102
87
|
function getCoder(wordlist) {
|
|
103
|
-
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
88
|
+
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
89
|
+
throw new Error("Worlist: expected array of 2048 strings");
|
|
104
90
|
for (const i of wordlist) {
|
|
105
91
|
if (typeof i !== "string") throw new Error(`Wordlist: non-string element: ${i}`);
|
|
106
92
|
}
|
|
107
93
|
return import_base.utils.chain(import_base.utils.checksum(1, calcChecksum), import_base.utils.radix2(11, true), import_base.utils.alphabet(wordlist));
|
|
108
94
|
}
|
|
109
|
-
__name(getCoder, "getCoder");
|
|
110
95
|
function mnemonicToEntropy(mnemonic, wordlist) {
|
|
111
96
|
const { words } = normalize(mnemonic);
|
|
112
97
|
const entropy = getCoder(wordlist).decode(words);
|
|
113
98
|
assertEntropy(entropy);
|
|
114
99
|
return entropy;
|
|
115
100
|
}
|
|
116
|
-
__name(mnemonicToEntropy, "mnemonicToEntropy");
|
|
117
101
|
function entropyToMnemonic(entropy, wordlist) {
|
|
118
102
|
assertEntropy(entropy);
|
|
119
103
|
const words = getCoder(wordlist).encode(entropy);
|
|
120
104
|
return words.join(isJapanese(wordlist) ? "\u3000" : " ");
|
|
121
105
|
}
|
|
122
|
-
|
|
123
|
-
var salt = /* @__PURE__ */ __name((passphrase) => nfkd(`mnemonic${passphrase}`), "salt");
|
|
106
|
+
var salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
|
124
107
|
function mnemonicToSeed(mnemonic, passphrase = "") {
|
|
125
|
-
return (0, import_pbkdf2.pbkdf2Async)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
126
|
-
c: 2048,
|
|
127
|
-
dkLen: 64
|
|
128
|
-
});
|
|
108
|
+
return (0, import_pbkdf2.pbkdf2Async)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
129
109
|
}
|
|
130
|
-
__name(mnemonicToSeed, "mnemonicToSeed");
|
|
131
110
|
function mnemonicToSeedSync(mnemonic, passphrase = "") {
|
|
132
|
-
return (0, import_pbkdf2.pbkdf2)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
133
|
-
c: 2048,
|
|
134
|
-
dkLen: 64
|
|
135
|
-
});
|
|
111
|
+
return (0, import_pbkdf2.pbkdf2)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
136
112
|
}
|
|
137
|
-
__name(mnemonicToSeedSync, "mnemonicToSeedSync");
|
|
138
113
|
/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */
|
|
139
114
|
//# sourceMappingURL=index.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */\nimport assert from '@noble/hashes/_assert'\nimport { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2'\nimport { sha256 } from '@noble/hashes/sha256'\nimport { sha512 } from '@noble/hashes/sha512'\nimport { utils as baseUtils } from '@scure/base'\n\nexport * from './wordlists/index.js'\n\n// Japanese wordlist\nconst isJapanese = (wordlist: string[]) => wordlist[0] === '\\u3042\\u3044\\u3053\\u304F\\u3057\\u3093'\n\n// Normalization replaces equivalent sequences of characters\n// so that any two texts that are equivalent will be reduced\n// to the same sequence of code points, called the normal form of the original text.\nfunction nfkd(str: string) {\n if (typeof str !== 'string') throw new TypeError(`Invalid mnemonic type: ${typeof str}`)\n return str.normalize('NFKD')\n}\n\nfunction normalize(str: string) {\n const norm = nfkd(str)\n const words = norm.split(' ')\n if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic')\n return { nfkd: norm, words }\n}\n\nfunction assertEntropy(entropy: Uint8Array) {\n assert.bytes(entropy, 16, 20, 24, 28, 32)\n}\n\nconst calcChecksum = (entropy: Uint8Array) => {\n // Checksum is ent.length/4 bits long\n const bitsLeft = 8 - entropy.length / 4\n // Zero rightmost \"bitsLeft\" bits in byte\n // For example: bitsLeft=4 val=10111101 -> 10110000\n return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft])\n}\n\nfunction getCoder(wordlist: string[]) {\n if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')\n throw new Error('Worlist: expected array of 2048 strings')\n for (const i of wordlist) {\n if (typeof i !== 'string') throw new Error(`Wordlist: non-string element: ${i}`)\n }\n return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist))\n}\n\n/**\n * Reversible: Converts mnemonic string to raw entropy in form of byte array.\n * @param mnemonic 12-24 words\n * @param wordlist imported wordlist for specific language\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToEntropy(mnem, wordlist)\n * // Produces\n * new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {\n const { words } = normalize(mnemonic)\n const entropy = getCoder(wordlist).decode(words)\n assertEntropy(entropy)\n return entropy\n}\n\n/**\n * Reversible: Converts raw entropy in form of byte array to mnemonic string.\n * @param entropy byte array\n * @param wordlist imported wordlist for specific language\n * @returns 12-24 words\n * @example\n * const ent = new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ]);\n * entropyToMnemonic(ent, wordlist);\n * // 'legal winner thank year wave sausage worth useful legal winner thank yellow'\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {\n assertEntropy(entropy)\n const words = getCoder(wordlist).encode(entropy)\n return words.join(isJapanese(wordlist) ? '\\u3000' : ' ')\n}\n\nconst salt = (passphrase: string) => nfkd(`mnemonic${passphrase}`)\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * await mnemonicToSeed(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeed(mnemonic: string, passphrase = '') {\n return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToSeedSync(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeedSync(mnemonic: string, passphrase = '') {\n return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n","import czech from '@scure/bip39/wordlists/czech'\nimport english from '@scure/bip39/wordlists/english'\nimport french from '@scure/bip39/wordlists/french'\nimport italian from '@scure/bip39/wordlists/italian'\nimport japanese from '@scure/bip39/wordlists/japanese'\nimport korean from '@scure/bip39/wordlists/korean'\nimport simplifiedChinese from '@scure/bip39/wordlists/simplified-chinese'\nimport spanish from '@scure/bip39/wordlists/spanish'\nimport traditionalChinese from '@scure/bip39/wordlists/traditional-chinese'\n\n/** @deprecated use @scure/bip39 instead */\nexport const wordlists = {\n czech,\n english,\n french,\n italian,\n japanese,\n korean,\n simplifiedChinese,\n spanish,\n traditionalChinese,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;AACnB,oBAAoC;AACpC,oBAAuB;AACvB,oBAAuB;AACvB,kBAAmC;;;ACLnC,mBAAkB;AAClB,qBAAoB;AACpB,oBAAmB;AACnB,qBAAoB;AACpB,sBAAqB;AACrB,oBAAmB;AACnB,gCAA8B;AAC9B,qBAAoB;AACpB,iCAA+B;AAGxB,IAAM,YAAY;AAAA,EACvB,oBAAAA;AAAA,EACA,wBAAAC;AAAA,EACA,sBAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,0BAAAC;AAAA,EACA,sBAAAC;AAAA,EACA,6CAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,+CAAAC;AACF;;;ADXA,IAAM,aAAa,CAAC,aAAuB,SAAS,CAAC,MAAM;AAK3D,SAAS,KAAK,KAAa;AACzB,MAAI,OAAO,QAAQ,SAAU,OAAM,IAAI,UAAU,0BAA0B,OAAO,GAAG,EAAE;AACvF,SAAO,IAAI,UAAU,MAAM;AAC7B;AAEA,SAAS,UAAU,KAAa;AAC9B,QAAM,OAAO,KAAK,GAAG;AACrB,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,CAAC,CAAC,IAAI,IAAI,IAAI,IAAI,EAAE,EAAE,SAAS,MAAM,MAAM,EAAG,OAAM,IAAI,MAAM,kBAAkB;AACpF,SAAO,EAAE,MAAM,MAAM,MAAM;AAC7B;AAEA,SAAS,cAAc,SAAqB;AAC1C,gBAAAC,QAAO,MAAM,SAAS,IAAI,IAAI,IAAI,IAAI,EAAE;AAC1C;AAEA,IAAM,eAAe,CAAC,YAAwB;AAE5C,QAAM,WAAW,IAAI,QAAQ,SAAS;AAGtC,SAAO,IAAI,WAAW,KAAE,sBAAO,OAAO,EAAE,CAAC,KAAK,YAAa,QAAQ,CAAC;AACtE;AAEA,SAAS,SAAS,UAAoB;AACpC,MAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,QAAQ,OAAO,SAAS,CAAC,MAAM;AACjF,UAAM,IAAI,MAAM,yCAAyC;AAC3D,aAAW,KAAK,UAAU;AACxB,QAAI,OAAO,MAAM,SAAU,OAAM,IAAI,MAAM,iCAAiC,CAAC,EAAE;AAAA,EACjF;AACA,SAAO,YAAAC,MAAU,MAAM,YAAAA,MAAU,SAAS,GAAG,YAAY,GAAG,YAAAA,MAAU,OAAO,IAAI,IAAI,GAAG,YAAAA,MAAU,SAAS,QAAQ,CAAC;AACtH;AAiBO,SAAS,kBAAkB,UAAkB,UAAgC;AAClF,QAAM,EAAE,MAAM,IAAI,UAAU,QAAQ;AACpC,QAAM,UAAU,SAAS,QAAQ,EAAE,OAAO,KAAK;AAC/C,gBAAc,OAAO;AACrB,SAAO;AACT;AAiBO,SAAS,kBAAkB,SAAqB,UAA4B;AACjF,gBAAc,OAAO;AACrB,QAAM,QAAQ,SAAS,QAAQ,EAAE,OAAO,OAAO;AAC/C,SAAO,MAAM,KAAK,WAAW,QAAQ,IAAI,WAAW,GAAG;AACzD;AAEA,IAAM,OAAO,CAAC,eAAuB,KAAK,WAAW,UAAU,EAAE;AAc1D,SAAS,eAAe,UAAkB,aAAa,IAAI;AAChE,aAAO,2BAAY,sBAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC/F;AAcO,SAAS,mBAAmB,UAAkB,aAAa,IAAI;AACpE,aAAO,sBAAO,sBAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC1F;","names":["czech","english","french","italian","japanese","korean","simplifiedChinese","spanish","traditionalChinese","assert","baseUtils"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
package/dist/neutral/index.js
CHANGED
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
var __defProp = Object.defineProperty;
|
|
2
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
3
|
-
|
|
4
1
|
// src/index.ts
|
|
5
2
|
import assert from "@noble/hashes/_assert";
|
|
6
3
|
import { pbkdf2, pbkdf2Async } from "@noble/hashes/pbkdf2";
|
|
@@ -31,74 +28,50 @@ var wordlists = {
|
|
|
31
28
|
};
|
|
32
29
|
|
|
33
30
|
// src/index.ts
|
|
34
|
-
var isJapanese =
|
|
31
|
+
var isJapanese = (wordlist) => wordlist[0] === "\u3042\u3044\u3053\u304F\u3057\u3093";
|
|
35
32
|
function nfkd(str) {
|
|
36
33
|
if (typeof str !== "string") throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
|
37
34
|
return str.normalize("NFKD");
|
|
38
35
|
}
|
|
39
|
-
__name(nfkd, "nfkd");
|
|
40
36
|
function normalize(str) {
|
|
41
37
|
const norm = nfkd(str);
|
|
42
38
|
const words = norm.split(" ");
|
|
43
|
-
if (![
|
|
44
|
-
|
|
45
|
-
15,
|
|
46
|
-
18,
|
|
47
|
-
21,
|
|
48
|
-
24
|
|
49
|
-
].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
50
|
-
return {
|
|
51
|
-
nfkd: norm,
|
|
52
|
-
words
|
|
53
|
-
};
|
|
39
|
+
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
40
|
+
return { nfkd: norm, words };
|
|
54
41
|
}
|
|
55
|
-
__name(normalize, "normalize");
|
|
56
42
|
function assertEntropy(entropy) {
|
|
57
43
|
assert.bytes(entropy, 16, 20, 24, 28, 32);
|
|
58
44
|
}
|
|
59
|
-
|
|
60
|
-
var calcChecksum = /* @__PURE__ */ __name((entropy) => {
|
|
45
|
+
var calcChecksum = (entropy) => {
|
|
61
46
|
const bitsLeft = 8 - entropy.length / 4;
|
|
62
|
-
return new Uint8Array([
|
|
63
|
-
|
|
64
|
-
]);
|
|
65
|
-
}, "calcChecksum");
|
|
47
|
+
return new Uint8Array([sha256(entropy)[0] >> bitsLeft << bitsLeft]);
|
|
48
|
+
};
|
|
66
49
|
function getCoder(wordlist) {
|
|
67
|
-
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
50
|
+
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
51
|
+
throw new Error("Worlist: expected array of 2048 strings");
|
|
68
52
|
for (const i of wordlist) {
|
|
69
53
|
if (typeof i !== "string") throw new Error(`Wordlist: non-string element: ${i}`);
|
|
70
54
|
}
|
|
71
55
|
return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist));
|
|
72
56
|
}
|
|
73
|
-
__name(getCoder, "getCoder");
|
|
74
57
|
function mnemonicToEntropy(mnemonic, wordlist) {
|
|
75
58
|
const { words } = normalize(mnemonic);
|
|
76
59
|
const entropy = getCoder(wordlist).decode(words);
|
|
77
60
|
assertEntropy(entropy);
|
|
78
61
|
return entropy;
|
|
79
62
|
}
|
|
80
|
-
__name(mnemonicToEntropy, "mnemonicToEntropy");
|
|
81
63
|
function entropyToMnemonic(entropy, wordlist) {
|
|
82
64
|
assertEntropy(entropy);
|
|
83
65
|
const words = getCoder(wordlist).encode(entropy);
|
|
84
66
|
return words.join(isJapanese(wordlist) ? "\u3000" : " ");
|
|
85
67
|
}
|
|
86
|
-
|
|
87
|
-
var salt = /* @__PURE__ */ __name((passphrase) => nfkd(`mnemonic${passphrase}`), "salt");
|
|
68
|
+
var salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
|
88
69
|
function mnemonicToSeed(mnemonic, passphrase = "") {
|
|
89
|
-
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
90
|
-
c: 2048,
|
|
91
|
-
dkLen: 64
|
|
92
|
-
});
|
|
70
|
+
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
93
71
|
}
|
|
94
|
-
__name(mnemonicToSeed, "mnemonicToSeed");
|
|
95
72
|
function mnemonicToSeedSync(mnemonic, passphrase = "") {
|
|
96
|
-
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
97
|
-
c: 2048,
|
|
98
|
-
dkLen: 64
|
|
99
|
-
});
|
|
73
|
+
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
100
74
|
}
|
|
101
|
-
__name(mnemonicToSeedSync, "mnemonicToSeedSync");
|
|
102
75
|
export {
|
|
103
76
|
entropyToMnemonic,
|
|
104
77
|
mnemonicToEntropy,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */\nimport assert from '@noble/hashes/_assert'\nimport { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2'\nimport { sha256 } from '@noble/hashes/sha256'\nimport { sha512 } from '@noble/hashes/sha512'\nimport { utils as baseUtils } from '@scure/base'\n\nexport * from './wordlists/index.js'\n\n// Japanese wordlist\nconst isJapanese = (wordlist: string[]) => wordlist[0] === '\\u3042\\u3044\\u3053\\u304F\\u3057\\u3093'\n\n// Normalization replaces equivalent sequences of characters\n// so that any two texts that are equivalent will be reduced\n// to the same sequence of code points, called the normal form of the original text.\nfunction nfkd(str: string) {\n if (typeof str !== 'string') throw new TypeError(`Invalid mnemonic type: ${typeof str}`)\n return str.normalize('NFKD')\n}\n\nfunction normalize(str: string) {\n const norm = nfkd(str)\n const words = norm.split(' ')\n if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic')\n return { nfkd: norm, words }\n}\n\nfunction assertEntropy(entropy: Uint8Array) {\n assert.bytes(entropy, 16, 20, 24, 28, 32)\n}\n\nconst calcChecksum = (entropy: Uint8Array) => {\n // Checksum is ent.length/4 bits long\n const bitsLeft = 8 - entropy.length / 4\n // Zero rightmost \"bitsLeft\" bits in byte\n // For example: bitsLeft=4 val=10111101 -> 10110000\n return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft])\n}\n\nfunction getCoder(wordlist: string[]) {\n if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')\n throw new Error('Worlist: expected array of 2048 strings')\n for (const i of wordlist) {\n if (typeof i !== 'string') throw new Error(`Wordlist: non-string element: ${i}`)\n }\n return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist))\n}\n\n/**\n * Reversible: Converts mnemonic string to raw entropy in form of byte array.\n * @param mnemonic 12-24 words\n * @param wordlist imported wordlist for specific language\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToEntropy(mnem, wordlist)\n * // Produces\n * new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {\n const { words } = normalize(mnemonic)\n const entropy = getCoder(wordlist).decode(words)\n assertEntropy(entropy)\n return entropy\n}\n\n/**\n * Reversible: Converts raw entropy in form of byte array to mnemonic string.\n * @param entropy byte array\n * @param wordlist imported wordlist for specific language\n * @returns 12-24 words\n * @example\n * const ent = new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ]);\n * entropyToMnemonic(ent, wordlist);\n * // 'legal winner thank year wave sausage worth useful legal winner thank yellow'\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {\n assertEntropy(entropy)\n const words = getCoder(wordlist).encode(entropy)\n return words.join(isJapanese(wordlist) ? '\\u3000' : ' ')\n}\n\nconst salt = (passphrase: string) => nfkd(`mnemonic${passphrase}`)\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * await mnemonicToSeed(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeed(mnemonic: string, passphrase = '') {\n return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToSeedSync(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeedSync(mnemonic: string, passphrase = '') {\n return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n","import czech from '@scure/bip39/wordlists/czech'\nimport english from '@scure/bip39/wordlists/english'\nimport french from '@scure/bip39/wordlists/french'\nimport italian from '@scure/bip39/wordlists/italian'\nimport japanese from '@scure/bip39/wordlists/japanese'\nimport korean from '@scure/bip39/wordlists/korean'\nimport simplifiedChinese from '@scure/bip39/wordlists/simplified-chinese'\nimport spanish from '@scure/bip39/wordlists/spanish'\nimport traditionalChinese from '@scure/bip39/wordlists/traditional-chinese'\n\n/** @deprecated use @scure/bip39 instead */\nexport const wordlists = {\n czech,\n english,\n french,\n italian,\n japanese,\n korean,\n simplifiedChinese,\n spanish,\n traditionalChinese,\n}\n"],"mappings":";AACA,OAAO,YAAY;AACnB,SAAS,QAAQ,mBAAmB;AACpC,SAAS,cAAc;AACvB,SAAS,cAAc;AACvB,SAAS,SAAS,iBAAiB;;;ACLnC,OAAO,WAAW;AAClB,OAAO,aAAa;AACpB,OAAO,YAAY;AACnB,OAAO,aAAa;AACpB,OAAO,cAAc;AACrB,OAAO,YAAY;AACnB,OAAO,uBAAuB;AAC9B,OAAO,aAAa;AACpB,OAAO,wBAAwB;AAGxB,IAAM,YAAY;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ADXA,IAAM,aAAa,CAAC,aAAuB,SAAS,CAAC,MAAM;AAK3D,SAAS,KAAK,KAAa;AACzB,MAAI,OAAO,QAAQ,SAAU,OAAM,IAAI,UAAU,0BAA0B,OAAO,GAAG,EAAE;AACvF,SAAO,IAAI,UAAU,MAAM;AAC7B;AAEA,SAAS,UAAU,KAAa;AAC9B,QAAM,OAAO,KAAK,GAAG;AACrB,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,CAAC,CAAC,IAAI,IAAI,IAAI,IAAI,EAAE,EAAE,SAAS,MAAM,MAAM,EAAG,OAAM,IAAI,MAAM,kBAAkB;AACpF,SAAO,EAAE,MAAM,MAAM,MAAM;AAC7B;AAEA,SAAS,cAAc,SAAqB;AAC1C,SAAO,MAAM,SAAS,IAAI,IAAI,IAAI,IAAI,EAAE;AAC1C;AAEA,IAAM,eAAe,CAAC,YAAwB;AAE5C,QAAM,WAAW,IAAI,QAAQ,SAAS;AAGtC,SAAO,IAAI,WAAW,CAAE,OAAO,OAAO,EAAE,CAAC,KAAK,YAAa,QAAQ,CAAC;AACtE;AAEA,SAAS,SAAS,UAAoB;AACpC,MAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,QAAQ,OAAO,SAAS,CAAC,MAAM;AACjF,UAAM,IAAI,MAAM,yCAAyC;AAC3D,aAAW,KAAK,UAAU;AACxB,QAAI,OAAO,MAAM,SAAU,OAAM,IAAI,MAAM,iCAAiC,CAAC,EAAE;AAAA,EACjF;AACA,SAAO,UAAU,MAAM,UAAU,SAAS,GAAG,YAAY,GAAG,UAAU,OAAO,IAAI,IAAI,GAAG,UAAU,SAAS,QAAQ,CAAC;AACtH;AAiBO,SAAS,kBAAkB,UAAkB,UAAgC;AAClF,QAAM,EAAE,MAAM,IAAI,UAAU,QAAQ;AACpC,QAAM,UAAU,SAAS,QAAQ,EAAE,OAAO,KAAK;AAC/C,gBAAc,OAAO;AACrB,SAAO;AACT;AAiBO,SAAS,kBAAkB,SAAqB,UAA4B;AACjF,gBAAc,OAAO;AACrB,QAAM,QAAQ,SAAS,QAAQ,EAAE,OAAO,OAAO;AAC/C,SAAO,MAAM,KAAK,WAAW,QAAQ,IAAI,WAAW,GAAG;AACzD;AAEA,IAAM,OAAO,CAAC,eAAuB,KAAK,WAAW,UAAU,EAAE;AAc1D,SAAS,eAAe,UAAkB,aAAa,IAAI;AAChE,SAAO,YAAY,QAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC/F;AAcO,SAAS,mBAAmB,UAAkB,aAAa,IAAI;AACpE,SAAO,OAAO,QAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC1F;","names":[]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
package/dist/node/index.cjs
CHANGED
|
@@ -5,7 +5,6 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
5
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
6
|
var __getProtoOf = Object.getPrototypeOf;
|
|
7
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
9
8
|
var __export = (target, all) => {
|
|
10
9
|
for (var name in all)
|
|
11
10
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -67,74 +66,50 @@ var wordlists = {
|
|
|
67
66
|
};
|
|
68
67
|
|
|
69
68
|
// src/index.ts
|
|
70
|
-
var isJapanese =
|
|
69
|
+
var isJapanese = (wordlist) => wordlist[0] === "\u3042\u3044\u3053\u304F\u3057\u3093";
|
|
71
70
|
function nfkd(str) {
|
|
72
71
|
if (typeof str !== "string") throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
|
73
72
|
return str.normalize("NFKD");
|
|
74
73
|
}
|
|
75
|
-
__name(nfkd, "nfkd");
|
|
76
74
|
function normalize(str) {
|
|
77
75
|
const norm = nfkd(str);
|
|
78
76
|
const words = norm.split(" ");
|
|
79
|
-
if (![
|
|
80
|
-
|
|
81
|
-
15,
|
|
82
|
-
18,
|
|
83
|
-
21,
|
|
84
|
-
24
|
|
85
|
-
].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
86
|
-
return {
|
|
87
|
-
nfkd: norm,
|
|
88
|
-
words
|
|
89
|
-
};
|
|
77
|
+
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
78
|
+
return { nfkd: norm, words };
|
|
90
79
|
}
|
|
91
|
-
__name(normalize, "normalize");
|
|
92
80
|
function assertEntropy(entropy) {
|
|
93
81
|
import_assert.default.bytes(entropy, 16, 20, 24, 28, 32);
|
|
94
82
|
}
|
|
95
|
-
|
|
96
|
-
var calcChecksum = /* @__PURE__ */ __name((entropy) => {
|
|
83
|
+
var calcChecksum = (entropy) => {
|
|
97
84
|
const bitsLeft = 8 - entropy.length / 4;
|
|
98
|
-
return new Uint8Array([
|
|
99
|
-
|
|
100
|
-
]);
|
|
101
|
-
}, "calcChecksum");
|
|
85
|
+
return new Uint8Array([(0, import_sha256.sha256)(entropy)[0] >> bitsLeft << bitsLeft]);
|
|
86
|
+
};
|
|
102
87
|
function getCoder(wordlist) {
|
|
103
|
-
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
88
|
+
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
89
|
+
throw new Error("Worlist: expected array of 2048 strings");
|
|
104
90
|
for (const i of wordlist) {
|
|
105
91
|
if (typeof i !== "string") throw new Error(`Wordlist: non-string element: ${i}`);
|
|
106
92
|
}
|
|
107
93
|
return import_base.utils.chain(import_base.utils.checksum(1, calcChecksum), import_base.utils.radix2(11, true), import_base.utils.alphabet(wordlist));
|
|
108
94
|
}
|
|
109
|
-
__name(getCoder, "getCoder");
|
|
110
95
|
function mnemonicToEntropy(mnemonic, wordlist) {
|
|
111
96
|
const { words } = normalize(mnemonic);
|
|
112
97
|
const entropy = getCoder(wordlist).decode(words);
|
|
113
98
|
assertEntropy(entropy);
|
|
114
99
|
return entropy;
|
|
115
100
|
}
|
|
116
|
-
__name(mnemonicToEntropy, "mnemonicToEntropy");
|
|
117
101
|
function entropyToMnemonic(entropy, wordlist) {
|
|
118
102
|
assertEntropy(entropy);
|
|
119
103
|
const words = getCoder(wordlist).encode(entropy);
|
|
120
104
|
return words.join(isJapanese(wordlist) ? "\u3000" : " ");
|
|
121
105
|
}
|
|
122
|
-
|
|
123
|
-
var salt = /* @__PURE__ */ __name((passphrase) => nfkd(`mnemonic${passphrase}`), "salt");
|
|
106
|
+
var salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
|
124
107
|
function mnemonicToSeed(mnemonic, passphrase = "") {
|
|
125
|
-
return (0, import_pbkdf2.pbkdf2Async)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
126
|
-
c: 2048,
|
|
127
|
-
dkLen: 64
|
|
128
|
-
});
|
|
108
|
+
return (0, import_pbkdf2.pbkdf2Async)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
129
109
|
}
|
|
130
|
-
__name(mnemonicToSeed, "mnemonicToSeed");
|
|
131
110
|
function mnemonicToSeedSync(mnemonic, passphrase = "") {
|
|
132
|
-
return (0, import_pbkdf2.pbkdf2)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
133
|
-
c: 2048,
|
|
134
|
-
dkLen: 64
|
|
135
|
-
});
|
|
111
|
+
return (0, import_pbkdf2.pbkdf2)(import_sha512.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
136
112
|
}
|
|
137
|
-
__name(mnemonicToSeedSync, "mnemonicToSeedSync");
|
|
138
113
|
// Annotate the CommonJS export names for ESM import in node:
|
|
139
114
|
0 && (module.exports = {
|
|
140
115
|
entropyToMnemonic,
|
package/dist/node/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */\nimport assert from '@noble/hashes/_assert'\nimport { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2'\nimport { sha256 } from '@noble/hashes/sha256'\nimport { sha512 } from '@noble/hashes/sha512'\nimport { utils as baseUtils } from '@scure/base'\n\nexport * from './wordlists/index.js'\n\n// Japanese wordlist\nconst isJapanese = (wordlist: string[]) => wordlist[0] === '\\u3042\\u3044\\u3053\\u304F\\u3057\\u3093'\n\n// Normalization replaces equivalent sequences of characters\n// so that any two texts that are equivalent will be reduced\n// to the same sequence of code points, called the normal form of the original text.\nfunction nfkd(str: string) {\n if (typeof str !== 'string') throw new TypeError(`Invalid mnemonic type: ${typeof str}`)\n return str.normalize('NFKD')\n}\n\nfunction normalize(str: string) {\n const norm = nfkd(str)\n const words = norm.split(' ')\n if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic')\n return { nfkd: norm, words }\n}\n\nfunction assertEntropy(entropy: Uint8Array) {\n assert.bytes(entropy, 16, 20, 24, 28, 32)\n}\n\nconst calcChecksum = (entropy: Uint8Array) => {\n // Checksum is ent.length/4 bits long\n const bitsLeft = 8 - entropy.length / 4\n // Zero rightmost \"bitsLeft\" bits in byte\n // For example: bitsLeft=4 val=10111101 -> 10110000\n return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft])\n}\n\nfunction getCoder(wordlist: string[]) {\n if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')\n throw new Error('Worlist: expected array of 2048 strings')\n for (const i of wordlist) {\n if (typeof i !== 'string') throw new Error(`Wordlist: non-string element: ${i}`)\n }\n return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist))\n}\n\n/**\n * Reversible: Converts mnemonic string to raw entropy in form of byte array.\n * @param mnemonic 12-24 words\n * @param wordlist imported wordlist for specific language\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToEntropy(mnem, wordlist)\n * // Produces\n * new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {\n const { words } = normalize(mnemonic)\n const entropy = getCoder(wordlist).decode(words)\n assertEntropy(entropy)\n return entropy\n}\n\n/**\n * Reversible: Converts raw entropy in form of byte array to mnemonic string.\n * @param entropy byte array\n * @param wordlist imported wordlist for specific language\n * @returns 12-24 words\n * @example\n * const ent = new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ]);\n * entropyToMnemonic(ent, wordlist);\n * // 'legal winner thank year wave sausage worth useful legal winner thank yellow'\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {\n assertEntropy(entropy)\n const words = getCoder(wordlist).encode(entropy)\n return words.join(isJapanese(wordlist) ? '\\u3000' : ' ')\n}\n\nconst salt = (passphrase: string) => nfkd(`mnemonic${passphrase}`)\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * await mnemonicToSeed(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeed(mnemonic: string, passphrase = '') {\n return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToSeedSync(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeedSync(mnemonic: string, passphrase = '') {\n return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n","import czech from '@scure/bip39/wordlists/czech'\nimport english from '@scure/bip39/wordlists/english'\nimport french from '@scure/bip39/wordlists/french'\nimport italian from '@scure/bip39/wordlists/italian'\nimport japanese from '@scure/bip39/wordlists/japanese'\nimport korean from '@scure/bip39/wordlists/korean'\nimport simplifiedChinese from '@scure/bip39/wordlists/simplified-chinese'\nimport spanish from '@scure/bip39/wordlists/spanish'\nimport traditionalChinese from '@scure/bip39/wordlists/traditional-chinese'\n\n/** @deprecated use @scure/bip39 instead */\nexport const wordlists = {\n czech,\n english,\n french,\n italian,\n japanese,\n korean,\n simplifiedChinese,\n spanish,\n traditionalChinese,\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;AACnB,oBAAoC;AACpC,oBAAuB;AACvB,oBAAuB;AACvB,kBAAmC;;;ACLnC,mBAAkB;AAClB,qBAAoB;AACpB,oBAAmB;AACnB,qBAAoB;AACpB,sBAAqB;AACrB,oBAAmB;AACnB,gCAA8B;AAC9B,qBAAoB;AACpB,iCAA+B;AAGxB,IAAM,YAAY;AAAA,EACvB,oBAAAA;AAAA,EACA,wBAAAC;AAAA,EACA,sBAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,0BAAAC;AAAA,EACA,sBAAAC;AAAA,EACA,6CAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,+CAAAC;AACF;;;ADXA,IAAM,aAAa,CAAC,aAAuB,SAAS,CAAC,MAAM;AAK3D,SAAS,KAAK,KAAa;AACzB,MAAI,OAAO,QAAQ,SAAU,OAAM,IAAI,UAAU,0BAA0B,OAAO,GAAG,EAAE;AACvF,SAAO,IAAI,UAAU,MAAM;AAC7B;AAEA,SAAS,UAAU,KAAa;AAC9B,QAAM,OAAO,KAAK,GAAG;AACrB,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,CAAC,CAAC,IAAI,IAAI,IAAI,IAAI,EAAE,EAAE,SAAS,MAAM,MAAM,EAAG,OAAM,IAAI,MAAM,kBAAkB;AACpF,SAAO,EAAE,MAAM,MAAM,MAAM;AAC7B;AAEA,SAAS,cAAc,SAAqB;AAC1C,gBAAAC,QAAO,MAAM,SAAS,IAAI,IAAI,IAAI,IAAI,EAAE;AAC1C;AAEA,IAAM,eAAe,CAAC,YAAwB;AAE5C,QAAM,WAAW,IAAI,QAAQ,SAAS;AAGtC,SAAO,IAAI,WAAW,KAAE,sBAAO,OAAO,EAAE,CAAC,KAAK,YAAa,QAAQ,CAAC;AACtE;AAEA,SAAS,SAAS,UAAoB;AACpC,MAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,QAAQ,OAAO,SAAS,CAAC,MAAM;AACjF,UAAM,IAAI,MAAM,yCAAyC;AAC3D,aAAW,KAAK,UAAU;AACxB,QAAI,OAAO,MAAM,SAAU,OAAM,IAAI,MAAM,iCAAiC,CAAC,EAAE;AAAA,EACjF;AACA,SAAO,YAAAC,MAAU,MAAM,YAAAA,MAAU,SAAS,GAAG,YAAY,GAAG,YAAAA,MAAU,OAAO,IAAI,IAAI,GAAG,YAAAA,MAAU,SAAS,QAAQ,CAAC;AACtH;AAiBO,SAAS,kBAAkB,UAAkB,UAAgC;AAClF,QAAM,EAAE,MAAM,IAAI,UAAU,QAAQ;AACpC,QAAM,UAAU,SAAS,QAAQ,EAAE,OAAO,KAAK;AAC/C,gBAAc,OAAO;AACrB,SAAO;AACT;AAiBO,SAAS,kBAAkB,SAAqB,UAA4B;AACjF,gBAAc,OAAO;AACrB,QAAM,QAAQ,SAAS,QAAQ,EAAE,OAAO,OAAO;AAC/C,SAAO,MAAM,KAAK,WAAW,QAAQ,IAAI,WAAW,GAAG;AACzD;AAEA,IAAM,OAAO,CAAC,eAAuB,KAAK,WAAW,UAAU,EAAE;AAc1D,SAAS,eAAe,UAAkB,aAAa,IAAI;AAChE,aAAO,2BAAY,sBAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC/F;AAcO,SAAS,mBAAmB,UAAkB,aAAa,IAAI;AACpE,aAAO,sBAAO,sBAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC1F;","names":["czech","english","french","italian","japanese","korean","simplifiedChinese","spanish","traditionalChinese","assert","baseUtils"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
package/dist/node/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAOA,cAAc,sBAAsB,CAAA;AAwDpC,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,UAAU,CAKlF;AAiBD,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,MAAM,CAIjF;AAgBD,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,uBAE/D;AAcD,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,SAAK,cAEnE"}
|
package/dist/node/index.js
CHANGED
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
var __defProp = Object.defineProperty;
|
|
2
|
-
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
3
|
-
|
|
4
1
|
// src/index.ts
|
|
5
2
|
import assert from "@noble/hashes/_assert";
|
|
6
3
|
import { pbkdf2, pbkdf2Async } from "@noble/hashes/pbkdf2";
|
|
@@ -31,74 +28,50 @@ var wordlists = {
|
|
|
31
28
|
};
|
|
32
29
|
|
|
33
30
|
// src/index.ts
|
|
34
|
-
var isJapanese =
|
|
31
|
+
var isJapanese = (wordlist) => wordlist[0] === "\u3042\u3044\u3053\u304F\u3057\u3093";
|
|
35
32
|
function nfkd(str) {
|
|
36
33
|
if (typeof str !== "string") throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
|
37
34
|
return str.normalize("NFKD");
|
|
38
35
|
}
|
|
39
|
-
__name(nfkd, "nfkd");
|
|
40
36
|
function normalize(str) {
|
|
41
37
|
const norm = nfkd(str);
|
|
42
38
|
const words = norm.split(" ");
|
|
43
|
-
if (![
|
|
44
|
-
|
|
45
|
-
15,
|
|
46
|
-
18,
|
|
47
|
-
21,
|
|
48
|
-
24
|
|
49
|
-
].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
50
|
-
return {
|
|
51
|
-
nfkd: norm,
|
|
52
|
-
words
|
|
53
|
-
};
|
|
39
|
+
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error("Invalid mnemonic");
|
|
40
|
+
return { nfkd: norm, words };
|
|
54
41
|
}
|
|
55
|
-
__name(normalize, "normalize");
|
|
56
42
|
function assertEntropy(entropy) {
|
|
57
43
|
assert.bytes(entropy, 16, 20, 24, 28, 32);
|
|
58
44
|
}
|
|
59
|
-
|
|
60
|
-
var calcChecksum = /* @__PURE__ */ __name((entropy) => {
|
|
45
|
+
var calcChecksum = (entropy) => {
|
|
61
46
|
const bitsLeft = 8 - entropy.length / 4;
|
|
62
|
-
return new Uint8Array([
|
|
63
|
-
|
|
64
|
-
]);
|
|
65
|
-
}, "calcChecksum");
|
|
47
|
+
return new Uint8Array([sha256(entropy)[0] >> bitsLeft << bitsLeft]);
|
|
48
|
+
};
|
|
66
49
|
function getCoder(wordlist) {
|
|
67
|
-
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
50
|
+
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== "string")
|
|
51
|
+
throw new Error("Worlist: expected array of 2048 strings");
|
|
68
52
|
for (const i of wordlist) {
|
|
69
53
|
if (typeof i !== "string") throw new Error(`Wordlist: non-string element: ${i}`);
|
|
70
54
|
}
|
|
71
55
|
return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist));
|
|
72
56
|
}
|
|
73
|
-
__name(getCoder, "getCoder");
|
|
74
57
|
function mnemonicToEntropy(mnemonic, wordlist) {
|
|
75
58
|
const { words } = normalize(mnemonic);
|
|
76
59
|
const entropy = getCoder(wordlist).decode(words);
|
|
77
60
|
assertEntropy(entropy);
|
|
78
61
|
return entropy;
|
|
79
62
|
}
|
|
80
|
-
__name(mnemonicToEntropy, "mnemonicToEntropy");
|
|
81
63
|
function entropyToMnemonic(entropy, wordlist) {
|
|
82
64
|
assertEntropy(entropy);
|
|
83
65
|
const words = getCoder(wordlist).encode(entropy);
|
|
84
66
|
return words.join(isJapanese(wordlist) ? "\u3000" : " ");
|
|
85
67
|
}
|
|
86
|
-
|
|
87
|
-
var salt = /* @__PURE__ */ __name((passphrase) => nfkd(`mnemonic${passphrase}`), "salt");
|
|
68
|
+
var salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
|
88
69
|
function mnemonicToSeed(mnemonic, passphrase = "") {
|
|
89
|
-
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
90
|
-
c: 2048,
|
|
91
|
-
dkLen: 64
|
|
92
|
-
});
|
|
70
|
+
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
93
71
|
}
|
|
94
|
-
__name(mnemonicToSeed, "mnemonicToSeed");
|
|
95
72
|
function mnemonicToSeedSync(mnemonic, passphrase = "") {
|
|
96
|
-
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), {
|
|
97
|
-
c: 2048,
|
|
98
|
-
dkLen: 64
|
|
99
|
-
});
|
|
73
|
+
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
|
100
74
|
}
|
|
101
|
-
__name(mnemonicToSeedSync, "mnemonicToSeedSync");
|
|
102
75
|
export {
|
|
103
76
|
entropyToMnemonic,
|
|
104
77
|
mnemonicToEntropy,
|
package/dist/node/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts","../../src/wordlists/index.ts"],"sourcesContent":["/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */\nimport assert from '@noble/hashes/_assert'\nimport { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2'\nimport { sha256 } from '@noble/hashes/sha256'\nimport { sha512 } from '@noble/hashes/sha512'\nimport { utils as baseUtils } from '@scure/base'\n\nexport * from './wordlists/index.js'\n\n// Japanese wordlist\nconst isJapanese = (wordlist: string[]) => wordlist[0] === '\\u3042\\u3044\\u3053\\u304F\\u3057\\u3093'\n\n// Normalization replaces equivalent sequences of characters\n// so that any two texts that are equivalent will be reduced\n// to the same sequence of code points, called the normal form of the original text.\nfunction nfkd(str: string) {\n if (typeof str !== 'string') throw new TypeError(`Invalid mnemonic type: ${typeof str}`)\n return str.normalize('NFKD')\n}\n\nfunction normalize(str: string) {\n const norm = nfkd(str)\n const words = norm.split(' ')\n if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic')\n return { nfkd: norm, words }\n}\n\nfunction assertEntropy(entropy: Uint8Array) {\n assert.bytes(entropy, 16, 20, 24, 28, 32)\n}\n\nconst calcChecksum = (entropy: Uint8Array) => {\n // Checksum is ent.length/4 bits long\n const bitsLeft = 8 - entropy.length / 4\n // Zero rightmost \"bitsLeft\" bits in byte\n // For example: bitsLeft=4 val=10111101 -> 10110000\n return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft])\n}\n\nfunction getCoder(wordlist: string[]) {\n if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')\n throw new Error('Worlist: expected array of 2048 strings')\n for (const i of wordlist) {\n if (typeof i !== 'string') throw new Error(`Wordlist: non-string element: ${i}`)\n }\n return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist))\n}\n\n/**\n * Reversible: Converts mnemonic string to raw entropy in form of byte array.\n * @param mnemonic 12-24 words\n * @param wordlist imported wordlist for specific language\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToEntropy(mnem, wordlist)\n * // Produces\n * new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {\n const { words } = normalize(mnemonic)\n const entropy = getCoder(wordlist).decode(words)\n assertEntropy(entropy)\n return entropy\n}\n\n/**\n * Reversible: Converts raw entropy in form of byte array to mnemonic string.\n * @param entropy byte array\n * @param wordlist imported wordlist for specific language\n * @returns 12-24 words\n * @example\n * const ent = new Uint8Array([\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,\n * 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f\n * ]);\n * entropyToMnemonic(ent, wordlist);\n * // 'legal winner thank year wave sausage worth useful legal winner thank yellow'\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {\n assertEntropy(entropy)\n const words = getCoder(wordlist).encode(entropy)\n return words.join(isJapanese(wordlist) ? '\\u3000' : ' ')\n}\n\nconst salt = (passphrase: string) => nfkd(`mnemonic${passphrase}`)\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * await mnemonicToSeed(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeed(mnemonic: string, passphrase = '') {\n return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n\n/**\n * Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.\n * @param mnemonic 12-24 words\n * @param passphrase string that will additionally protect the key\n * @returns 64 bytes of key data\n * @example\n * const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';\n * mnemonicToSeedSync(mnem, 'password');\n * // new Uint8Array([...64 bytes])\n */\n\n/** @deprecated use @scure/bip39 instead */\nexport function mnemonicToSeedSync(mnemonic: string, passphrase = '') {\n return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 })\n}\n","import czech from '@scure/bip39/wordlists/czech'\nimport english from '@scure/bip39/wordlists/english'\nimport french from '@scure/bip39/wordlists/french'\nimport italian from '@scure/bip39/wordlists/italian'\nimport japanese from '@scure/bip39/wordlists/japanese'\nimport korean from '@scure/bip39/wordlists/korean'\nimport simplifiedChinese from '@scure/bip39/wordlists/simplified-chinese'\nimport spanish from '@scure/bip39/wordlists/spanish'\nimport traditionalChinese from '@scure/bip39/wordlists/traditional-chinese'\n\n/** @deprecated use @scure/bip39 instead */\nexport const wordlists = {\n czech,\n english,\n french,\n italian,\n japanese,\n korean,\n simplifiedChinese,\n spanish,\n traditionalChinese,\n}\n"],"mappings":";AACA,OAAO,YAAY;AACnB,SAAS,QAAQ,mBAAmB;AACpC,SAAS,cAAc;AACvB,SAAS,cAAc;AACvB,SAAS,SAAS,iBAAiB;;;ACLnC,OAAO,WAAW;AAClB,OAAO,aAAa;AACpB,OAAO,YAAY;AACnB,OAAO,aAAa;AACpB,OAAO,cAAc;AACrB,OAAO,YAAY;AACnB,OAAO,uBAAuB;AAC9B,OAAO,aAAa;AACpB,OAAO,wBAAwB;AAGxB,IAAM,YAAY;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;ADXA,IAAM,aAAa,CAAC,aAAuB,SAAS,CAAC,MAAM;AAK3D,SAAS,KAAK,KAAa;AACzB,MAAI,OAAO,QAAQ,SAAU,OAAM,IAAI,UAAU,0BAA0B,OAAO,GAAG,EAAE;AACvF,SAAO,IAAI,UAAU,MAAM;AAC7B;AAEA,SAAS,UAAU,KAAa;AAC9B,QAAM,OAAO,KAAK,GAAG;AACrB,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,CAAC,CAAC,IAAI,IAAI,IAAI,IAAI,EAAE,EAAE,SAAS,MAAM,MAAM,EAAG,OAAM,IAAI,MAAM,kBAAkB;AACpF,SAAO,EAAE,MAAM,MAAM,MAAM;AAC7B;AAEA,SAAS,cAAc,SAAqB;AAC1C,SAAO,MAAM,SAAS,IAAI,IAAI,IAAI,IAAI,EAAE;AAC1C;AAEA,IAAM,eAAe,CAAC,YAAwB;AAE5C,QAAM,WAAW,IAAI,QAAQ,SAAS;AAGtC,SAAO,IAAI,WAAW,CAAE,OAAO,OAAO,EAAE,CAAC,KAAK,YAAa,QAAQ,CAAC;AACtE;AAEA,SAAS,SAAS,UAAoB;AACpC,MAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,QAAQ,OAAO,SAAS,CAAC,MAAM;AACjF,UAAM,IAAI,MAAM,yCAAyC;AAC3D,aAAW,KAAK,UAAU;AACxB,QAAI,OAAO,MAAM,SAAU,OAAM,IAAI,MAAM,iCAAiC,CAAC,EAAE;AAAA,EACjF;AACA,SAAO,UAAU,MAAM,UAAU,SAAS,GAAG,YAAY,GAAG,UAAU,OAAO,IAAI,IAAI,GAAG,UAAU,SAAS,QAAQ,CAAC;AACtH;AAiBO,SAAS,kBAAkB,UAAkB,UAAgC;AAClF,QAAM,EAAE,MAAM,IAAI,UAAU,QAAQ;AACpC,QAAM,UAAU,SAAS,QAAQ,EAAE,OAAO,KAAK;AAC/C,gBAAc,OAAO;AACrB,SAAO;AACT;AAiBO,SAAS,kBAAkB,SAAqB,UAA4B;AACjF,gBAAc,OAAO;AACrB,QAAM,QAAQ,SAAS,QAAQ,EAAE,OAAO,OAAO;AAC/C,SAAO,MAAM,KAAK,WAAW,QAAQ,IAAI,WAAW,GAAG;AACzD;AAEA,IAAM,OAAO,CAAC,eAAuB,KAAK,WAAW,UAAU,EAAE;AAc1D,SAAS,eAAe,UAAkB,aAAa,IAAI;AAChE,SAAO,YAAY,QAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC/F;AAcO,SAAS,mBAAmB,UAAkB,aAAa,IAAI;AACpE,SAAO,OAAO,QAAQ,UAAU,QAAQ,EAAE,MAAM,KAAK,UAAU,GAAG,EAAE,GAAG,MAAM,OAAO,GAAG,CAAC;AAC1F;","names":[]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/wordlists/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,8BAA8B,CAAA;AAChD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,QAAQ,MAAM,iCAAiC,CAAA;AACtD,OAAO,MAAM,MAAM,+BAA+B,CAAA;AAClD,OAAO,iBAAiB,MAAM,2CAA2C,CAAA;AACzE,OAAO,OAAO,MAAM,gCAAgC,CAAA;AACpD,OAAO,kBAAkB,MAAM,4CAA4C,CAAA;AAG3E,eAAO,MAAM,SAAS;;;;;;;;;;CAUrB,CAAA"}
|
package/package.json
CHANGED
|
@@ -16,8 +16,8 @@
|
|
|
16
16
|
},
|
|
17
17
|
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
18
18
|
"devDependencies": {
|
|
19
|
-
"@xylabs/ts-scripts-yarn3": "^3.
|
|
20
|
-
"@xylabs/tsconfig": "^3.
|
|
19
|
+
"@xylabs/ts-scripts-yarn3": "^3.13.3",
|
|
20
|
+
"@xylabs/tsconfig": "^3.13.3",
|
|
21
21
|
"typescript": "^5.5.4"
|
|
22
22
|
},
|
|
23
23
|
"exports": {
|
|
@@ -58,6 +58,6 @@
|
|
|
58
58
|
"url": "https://github.com/XYOracleNetwork/sdk-xyo-client-js.git"
|
|
59
59
|
},
|
|
60
60
|
"sideEffects": false,
|
|
61
|
-
"version": "2.110.
|
|
61
|
+
"version": "2.110.11",
|
|
62
62
|
"type": "module"
|
|
63
63
|
}
|
package/src/index.ts
CHANGED
package/src/wordlists/index.ts
CHANGED