create daemon
This commit is contained in:
69
thrower_daemon/node_modules/@scure/bip39/esm/index.js
generated
vendored
Normal file
69
thrower_daemon/node_modules/@scure/bip39/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
import assert from '@noble/hashes/_assert';
|
||||
import { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2';
|
||||
import { sha256 } from '@noble/hashes/sha256';
|
||||
import { sha512 } from '@noble/hashes/sha512';
|
||||
import { randomBytes } from '@noble/hashes/utils';
|
||||
import { utils as baseUtils } from '@scure/base';
|
||||
const isJapanese = (wordlist) => wordlist[0] === '\u3042\u3044\u3053\u304f\u3057\u3093';
|
||||
function nfkd(str) {
|
||||
if (typeof str !== 'string')
|
||||
throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
||||
return str.normalize('NFKD');
|
||||
}
|
||||
function normalize(str) {
|
||||
const norm = nfkd(str);
|
||||
const words = norm.split(' ');
|
||||
if (![12, 15, 18, 21, 24].includes(words.length))
|
||||
throw new Error('Invalid mnemonic');
|
||||
return { nfkd: norm, words };
|
||||
}
|
||||
function assertEntropy(entropy) {
|
||||
assert.bytes(entropy, 16, 20, 24, 28, 32);
|
||||
}
|
||||
export function generateMnemonic(wordlist, strength = 128) {
|
||||
assert.number(strength);
|
||||
if (strength % 32 !== 0 || strength > 256)
|
||||
throw new TypeError('Invalid entropy');
|
||||
return entropyToMnemonic(randomBytes(strength / 8), wordlist);
|
||||
}
|
||||
const calcChecksum = (entropy) => {
|
||||
const bitsLeft = 8 - entropy.length / 4;
|
||||
return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft]);
|
||||
};
|
||||
function getCoder(wordlist) {
|
||||
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')
|
||||
throw new Error('Worlist: expected array of 2048 strings');
|
||||
wordlist.forEach((i) => {
|
||||
if (typeof i !== 'string')
|
||||
throw new Error(`Wordlist: non-string element: ${i}`);
|
||||
});
|
||||
return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist));
|
||||
}
|
||||
export function mnemonicToEntropy(mnemonic, wordlist) {
|
||||
const { words } = normalize(mnemonic);
|
||||
const entropy = getCoder(wordlist).decode(words);
|
||||
assertEntropy(entropy);
|
||||
return entropy;
|
||||
}
|
||||
export function entropyToMnemonic(entropy, wordlist) {
|
||||
assertEntropy(entropy);
|
||||
const words = getCoder(wordlist).encode(entropy);
|
||||
return words.join(isJapanese(wordlist) ? '\u3000' : ' ');
|
||||
}
|
||||
export function validateMnemonic(mnemonic, wordlist) {
|
||||
try {
|
||||
mnemonicToEntropy(mnemonic, wordlist);
|
||||
}
|
||||
catch (e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
||||
export function mnemonicToSeed(mnemonic, passphrase = '') {
|
||||
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
export function mnemonicToSeedSync(mnemonic, passphrase = '') {
|
||||
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
//# sourceMappingURL=index.js.map
|
||||
Reference in New Issue
Block a user