mirror of https://github.com/status-im/bip39.git
Merge pull request #104 from bitcoinjs/typescript
v3 (typescript included)
This commit is contained in:
commit
cd6bcdbb2e
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
}
|
|
@ -6,6 +6,10 @@ node_js:
|
|||
- "lts/*"
|
||||
- "9"
|
||||
- "10"
|
||||
matrix:
|
||||
include:
|
||||
- node_js: "lts/*"
|
||||
env: TEST_SUITE=gitdiff:ci
|
||||
env:
|
||||
- TEST_SUITE=test
|
||||
script: npm run-script $TEST_SUITE
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
# 3.0.0
|
||||
__added__
|
||||
- Added TypeScript support (#104)
|
||||
- Added support for excluding wordlists from packages (#105)
|
||||
|
||||
__changed__
|
||||
- Changed `mnemonicToSeed` to use async, sync version moved to `mnemonicToSeedSync` (#104)
|
||||
|
||||
__removed__
|
||||
- Removed explicit hex methods (use `toString('hex')` on the Buffer) (#104)
|
|
@ -0,0 +1,12 @@
|
|||
# Check the CONTRIBUTING doc on bitcoinjs-lib for info
|
||||
|
||||
[Contributing to BitcoinJS](https://github.com/bitcoinjs/bitcoinjs-lib/blob/master/CONTRIBUTING.md)
|
||||
|
||||
# TypeScript
|
||||
|
||||
* `npm install`
|
||||
* Modify TypeScript in ts_src folder
|
||||
* Run `npm run format`
|
||||
* Run `npm run lint` and make any necessary changes to remove errors
|
||||
* Run `npm test` and make sure the tests pass
|
||||
* Run `git diff` and make sure the diff in the JS files in the src folder match the changes you made to the TS files
|
74
README.md
74
README.md
|
@ -16,6 +16,62 @@ When a checksum is invalid, warn the user that the phrase is not something gener
|
|||
|
||||
However, there should be other checks in place, such as checking to make sure the user is inputting 12 words or more separated by a space. ie. `phrase.trim().split(/\s+/g).length >= 12`
|
||||
|
||||
## Removing wordlists from webpack/browserify
|
||||
|
||||
Browserify/Webpack bundles can get very large if you include all the wordlists, so you can now exclude wordlists to make your bundle lighter.
|
||||
|
||||
For example, if we want to exclude all wordlists besides chinese_simplified, you could build using the browserify command below.
|
||||
|
||||
```bash
|
||||
$ browserify -r bip39 -s bip39 \
|
||||
--exclude=./wordlists/english.json \
|
||||
--exclude=./wordlists/japanese.json \
|
||||
--exclude=./wordlists/spanish.json \
|
||||
--exclude=./wordlists/italian.json \
|
||||
--exclude=./wordlists/french.json \
|
||||
--exclude=./wordlists/korean.json \
|
||||
--exclude=./wordlists/chinese_traditional.json \
|
||||
> bip39.browser.js
|
||||
```
|
||||
|
||||
This will create a bundle that only contains the chinese_simplified wordlist, and it will be the default wordlist for all calls without explicit wordlists.
|
||||
|
||||
This is how it will look in the browser console.
|
||||
|
||||
```javascript
|
||||
> bip39.entropyToMnemonic('00000000000000000000000000000000')
|
||||
"的 的 的 的 的 的 的 的 的 的 的 在"
|
||||
> bip39.wordlists.chinese_simplified
|
||||
Array(2048) [ "的", "一", "是", "在", "不", "了", "有", "和", "人", "这", … ]
|
||||
> bip39.wordlists.english
|
||||
undefined
|
||||
> bip39.wordlists.japanese
|
||||
undefined
|
||||
> bip39.wordlists.spanish
|
||||
undefined
|
||||
> bip39.wordlists.italian
|
||||
undefined
|
||||
> bip39.wordlists.french
|
||||
undefined
|
||||
> bip39.wordlists.korean
|
||||
undefined
|
||||
> bip39.wordlists.chinese_traditional
|
||||
undefined
|
||||
```
|
||||
|
||||
For a list of supported wordlists check the wordlists folder. The name of the json file (minus the extension) is the name of the key to access the wordlist.
|
||||
|
||||
You can also change the default wordlist at runtime if you dislike the wordlist you were given as default.
|
||||
|
||||
```javascript
|
||||
> bip39.entropyToMnemonic('00000000000000000000000000000fff')
|
||||
"あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あまい ろんり"
|
||||
> bip39.setDefaultWordlist('italian')
|
||||
undefined
|
||||
> bip39.entropyToMnemonic('00000000000000000000000000000fff')
|
||||
"abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco aforisma zibetto"
|
||||
```
|
||||
|
||||
## Installation
|
||||
``` bash
|
||||
npm install bip39
|
||||
|
@ -27,21 +83,21 @@ npm install bip39
|
|||
const mnemonic = bip39.generateMnemonic()
|
||||
// => 'seed sock milk update focus rotate barely fade car face mechanic mercy'
|
||||
|
||||
bip39.mnemonicToSeedHex('basket actual')
|
||||
bip39.mnemonicToSeedSync('basket actual').toString('hex')
|
||||
// => '5cf2d4a8b0355e90295bdfc565a022a409af063d5365bb57bf74d9528f494bfa4400f53d8349b80fdae44082d7f9541e1dba2b003bcfec9d0d53781ca676651f'
|
||||
|
||||
bip39.mnemonicToSeed('basket actual')
|
||||
bip39.mnemonicToSeedSync('basket actual')
|
||||
// => <Buffer 5c f2 d4 a8 b0 35 5e 90 29 5b df c5 65 a0 22 a4 09 af 06 3d 53 65 bb 57 bf 74 d9 52 8f 49 4b fa 44 00 f5 3d 83 49 b8 0f da e4 40 82 d7 f9 54 1e 1d ba 2b ...>
|
||||
|
||||
// mnemonicToSeed and mnemonicToSeedHex have async versions
|
||||
// mnemonicToSeedAsync mnemonicToSeedHexAsync are more performance oriented
|
||||
bip39.mnemonicToSeedAsync('basket actual').then(console.log)
|
||||
// mnemonicToSeed has an synchronous version
|
||||
// mnemonicToSeedSync is less performance oriented
|
||||
bip39.mnemonicToSeed('basket actual').then(console.log)
|
||||
// => <Buffer 5c f2 d4 a8 b0 35 5e 90 29 5b df c5 65 a0 22 a4 09 af 06 3d 53 65 bb 57 bf 74 d9 52 8f 49 4b fa 44 00 f5 3d 83 49 b8 0f da e4 40 82 d7 f9 54 1e 1d ba 2b ...>
|
||||
|
||||
bip39.mnemonicToSeedHexAsync('basket actual').then(console.log)
|
||||
bip39.mnemonicToSeed('basket actual').then(bytes => bytes.toString('hex')).then(console.log)
|
||||
// => '5cf2d4a8b0355e90295bdfc565a022a409af063d5365bb57bf74d9528f494bfa4400f53d8349b80fdae44082d7f9541e1dba2b003bcfec9d0d53781ca676651f'
|
||||
|
||||
bip39.mnemonicToSeed('basket actual', 'a password')
|
||||
bip39.mnemonicToSeedSync('basket actual', 'a password')
|
||||
// => <Buffer 46 16 a4 4f 2c 90 b9 69 02 14 b8 fd 43 5b b4 14 62 43 de 10 7b 30 87 59 0a 3b b8 d3 1b 2f 3a ef ab 1d 4b 52 6d 21 e5 0a 04 02 3d 7a d0 66 43 ea 68 3b ... >
|
||||
|
||||
bip39.validateMnemonic(mnemonic)
|
||||
|
@ -53,11 +109,11 @@ bip39.validateMnemonic('basket actual')
|
|||
|
||||
|
||||
``` js
|
||||
var bip39 = require('bip39')
|
||||
const bip39 = require('bip39')
|
||||
|
||||
// defaults to BIP39 English word list
|
||||
// uses HEX strings for entropy
|
||||
var mnemonic = bip39.entropyToMnemonic('00000000000000000000000000000000')
|
||||
const mnemonic = bip39.entropyToMnemonic('00000000000000000000000000000000')
|
||||
// => abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about
|
||||
|
||||
// reversible
|
||||
|
|
178
index.js
178
index.js
|
@ -1,178 +0,0 @@
|
|||
var Buffer = require('safe-buffer').Buffer
|
||||
var createHash = require('create-hash')
|
||||
var _pbkdf2 = require('pbkdf2')
|
||||
var pbkdf2 = _pbkdf2.pbkdf2Sync
|
||||
var pbkdf2Async = _pbkdf2.pbkdf2
|
||||
var randomBytes = require('randombytes')
|
||||
|
||||
// use unorm until String.prototype.normalize gets better browser support
|
||||
var unorm = require('unorm')
|
||||
|
||||
var CHINESE_SIMPLIFIED_WORDLIST = require('./wordlists/chinese_simplified.json')
|
||||
var CHINESE_TRADITIONAL_WORDLIST = require('./wordlists/chinese_traditional.json')
|
||||
var ENGLISH_WORDLIST = require('./wordlists/english.json')
|
||||
var FRENCH_WORDLIST = require('./wordlists/french.json')
|
||||
var ITALIAN_WORDLIST = require('./wordlists/italian.json')
|
||||
var JAPANESE_WORDLIST = require('./wordlists/japanese.json')
|
||||
var KOREAN_WORDLIST = require('./wordlists/korean.json')
|
||||
var SPANISH_WORDLIST = require('./wordlists/spanish.json')
|
||||
var DEFAULT_WORDLIST = ENGLISH_WORDLIST
|
||||
|
||||
var INVALID_MNEMONIC = 'Invalid mnemonic'
|
||||
var INVALID_ENTROPY = 'Invalid entropy'
|
||||
var INVALID_CHECKSUM = 'Invalid mnemonic checksum'
|
||||
|
||||
function lpad (str, padString, length) {
|
||||
while (str.length < length) str = padString + str
|
||||
return str
|
||||
}
|
||||
|
||||
function binaryToByte (bin) {
|
||||
return parseInt(bin, 2)
|
||||
}
|
||||
|
||||
function bytesToBinary (bytes) {
|
||||
return bytes.map(function (x) {
|
||||
return lpad(x.toString(2), '0', 8)
|
||||
}).join('')
|
||||
}
|
||||
|
||||
function deriveChecksumBits (entropyBuffer) {
|
||||
var ENT = entropyBuffer.length * 8
|
||||
var CS = ENT / 32
|
||||
var hash = createHash('sha256').update(entropyBuffer).digest()
|
||||
|
||||
return bytesToBinary([].slice.call(hash)).slice(0, CS)
|
||||
}
|
||||
|
||||
function salt (password) {
|
||||
return 'mnemonic' + (password || '')
|
||||
}
|
||||
|
||||
function mnemonicToSeed (mnemonic, password) {
|
||||
var mnemonicBuffer = Buffer.from(unorm.nfkd(mnemonic), 'utf8')
|
||||
var saltBuffer = Buffer.from(salt(unorm.nfkd(password)), 'utf8')
|
||||
|
||||
return pbkdf2(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512')
|
||||
}
|
||||
|
||||
function mnemonicToSeedHex (mnemonic, password) {
|
||||
return mnemonicToSeed(mnemonic, password).toString('hex')
|
||||
}
|
||||
|
||||
function mnemonicToSeedAsync (mnemonic, password) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
try {
|
||||
var mnemonicBuffer = Buffer.from(unorm.nfkd(mnemonic), 'utf8')
|
||||
var saltBuffer = Buffer.from(salt(unorm.nfkd(password)), 'utf8')
|
||||
} catch (error) {
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
pbkdf2Async(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', function (err, data) {
|
||||
if (err) return reject(err)
|
||||
else return resolve(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function mnemonicToSeedHexAsync (mnemonic, password) {
|
||||
return mnemonicToSeedAsync(mnemonic, password)
|
||||
.then(function (buf) { return buf.toString('hex') })
|
||||
}
|
||||
|
||||
function mnemonicToEntropy (mnemonic, wordlist) {
|
||||
wordlist = wordlist || DEFAULT_WORDLIST
|
||||
|
||||
var words = unorm.nfkd(mnemonic).split(' ')
|
||||
if (words.length % 3 !== 0) throw new Error(INVALID_MNEMONIC)
|
||||
|
||||
// convert word indices to 11 bit binary strings
|
||||
var bits = words.map(function (word) {
|
||||
var index = wordlist.indexOf(word)
|
||||
if (index === -1) throw new Error(INVALID_MNEMONIC)
|
||||
|
||||
return lpad(index.toString(2), '0', 11)
|
||||
}).join('')
|
||||
|
||||
// split the binary string into ENT/CS
|
||||
var dividerIndex = Math.floor(bits.length / 33) * 32
|
||||
var entropyBits = bits.slice(0, dividerIndex)
|
||||
var checksumBits = bits.slice(dividerIndex)
|
||||
|
||||
// calculate the checksum and compare
|
||||
var entropyBytes = entropyBits.match(/(.{1,8})/g).map(binaryToByte)
|
||||
if (entropyBytes.length < 16) throw new Error(INVALID_ENTROPY)
|
||||
if (entropyBytes.length > 32) throw new Error(INVALID_ENTROPY)
|
||||
if (entropyBytes.length % 4 !== 0) throw new Error(INVALID_ENTROPY)
|
||||
|
||||
var entropy = Buffer.from(entropyBytes)
|
||||
var newChecksum = deriveChecksumBits(entropy)
|
||||
if (newChecksum !== checksumBits) throw new Error(INVALID_CHECKSUM)
|
||||
|
||||
return entropy.toString('hex')
|
||||
}
|
||||
|
||||
function entropyToMnemonic (entropy, wordlist) {
|
||||
if (!Buffer.isBuffer(entropy)) entropy = Buffer.from(entropy, 'hex')
|
||||
wordlist = wordlist || DEFAULT_WORDLIST
|
||||
|
||||
// 128 <= ENT <= 256
|
||||
if (entropy.length < 16) throw new TypeError(INVALID_ENTROPY)
|
||||
if (entropy.length > 32) throw new TypeError(INVALID_ENTROPY)
|
||||
if (entropy.length % 4 !== 0) throw new TypeError(INVALID_ENTROPY)
|
||||
|
||||
var entropyBits = bytesToBinary([].slice.call(entropy))
|
||||
var checksumBits = deriveChecksumBits(entropy)
|
||||
|
||||
var bits = entropyBits + checksumBits
|
||||
var chunks = bits.match(/(.{1,11})/g)
|
||||
var words = chunks.map(function (binary) {
|
||||
var index = binaryToByte(binary)
|
||||
return wordlist[index]
|
||||
})
|
||||
|
||||
return wordlist === JAPANESE_WORDLIST ? words.join('\u3000') : words.join(' ')
|
||||
}
|
||||
|
||||
function generateMnemonic (strength, rng, wordlist) {
|
||||
strength = strength || 128
|
||||
if (strength % 32 !== 0) throw new TypeError(INVALID_ENTROPY)
|
||||
rng = rng || randomBytes
|
||||
|
||||
return entropyToMnemonic(rng(strength / 8), wordlist)
|
||||
}
|
||||
|
||||
function validateMnemonic (mnemonic, wordlist) {
|
||||
try {
|
||||
mnemonicToEntropy(mnemonic, wordlist)
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
mnemonicToSeed: mnemonicToSeed,
|
||||
mnemonicToSeedAsync: mnemonicToSeedAsync,
|
||||
mnemonicToSeedHex: mnemonicToSeedHex,
|
||||
mnemonicToSeedHexAsync: mnemonicToSeedHexAsync,
|
||||
mnemonicToEntropy: mnemonicToEntropy,
|
||||
entropyToMnemonic: entropyToMnemonic,
|
||||
generateMnemonic: generateMnemonic,
|
||||
validateMnemonic: validateMnemonic,
|
||||
wordlists: {
|
||||
EN: ENGLISH_WORDLIST,
|
||||
JA: JAPANESE_WORDLIST,
|
||||
|
||||
chinese_simplified: CHINESE_SIMPLIFIED_WORDLIST,
|
||||
chinese_traditional: CHINESE_TRADITIONAL_WORDLIST,
|
||||
english: ENGLISH_WORDLIST,
|
||||
french: FRENCH_WORDLIST,
|
||||
italian: ITALIAN_WORDLIST,
|
||||
japanese: JAPANESE_WORDLIST,
|
||||
korean: KOREAN_WORDLIST,
|
||||
spanish: SPANISH_WORDLIST
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
38
package.json
38
package.json
|
@ -1,15 +1,19 @@
|
|||
{
|
||||
"name": "bip39",
|
||||
"version": "2.6.0",
|
||||
"version": "3.0.0",
|
||||
"description": "Bitcoin BIP39: Mnemonic code for generating deterministic keys",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"wordlists"
|
||||
],
|
||||
"main": "src/index.js",
|
||||
"types": "./types/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "npm run clean && tsc -p tsconfig.json",
|
||||
"clean": "rm -rf src",
|
||||
"coverage": "nyc --branches 100 --functions 100 --check-coverage npm run unit",
|
||||
"standard": "standard",
|
||||
"test": "npm run standard && npm run unit",
|
||||
"format": "npm run prettier -- --write",
|
||||
"format:ci": "npm run prettier -- --check",
|
||||
"gitdiff:ci": "npm run build && git diff --exit-code",
|
||||
"lint": "tslint -p tsconfig.json -c tslint.json",
|
||||
"prettier": "prettier 'ts_src/**/*.ts' --ignore-path ./.prettierignore",
|
||||
"test": "npm run build && npm run format:ci && npm run lint && npm run unit",
|
||||
"unit": "tape test/*.js",
|
||||
"update": "node -e \"require('./util/wordlists').update()\""
|
||||
},
|
||||
|
@ -23,21 +27,29 @@
|
|||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/weilu/bip39.git"
|
||||
"url": "https://github.com/bitcoinjs/bip39.git"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"src",
|
||||
"types"
|
||||
],
|
||||
"dependencies": {
|
||||
"@types/node": "11.11.6",
|
||||
"create-hash": "^1.1.0",
|
||||
"pbkdf2": "^3.0.9",
|
||||
"randombytes": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1",
|
||||
"unorm": "^1.3.3"
|
||||
"randombytes": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/create-hash": "1.2.0",
|
||||
"@types/pbkdf2": "3.0.0",
|
||||
"@types/randombytes": "2.0.0",
|
||||
"node-fetch": "^1.6.3",
|
||||
"nyc": "^13.1.0",
|
||||
"prettier": "1.16.4",
|
||||
"proxyquire": "^1.7.10",
|
||||
"standard": "^10.0.2",
|
||||
"tape": "^4.6.2"
|
||||
"tape": "^4.6.2",
|
||||
"tslint": "5.14.0",
|
||||
"typescript": "3.3.4000"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// browserify by default only pulls in files that are hard coded in requires
|
||||
// In order of last to first in this file, the default wordlist will be chosen
|
||||
// based on what is present. (Bundles may remove wordlists they don't need)
|
||||
const wordlistFilenames = [
|
||||
'chinese_simplified',
|
||||
'chinese_traditional',
|
||||
'korean',
|
||||
'french',
|
||||
'italian',
|
||||
'spanish',
|
||||
'japanese',
|
||||
'english',
|
||||
];
|
||||
const wordlists = {};
|
||||
exports.wordlists = wordlists;
|
||||
let _default;
|
||||
exports._default = _default;
|
||||
wordlistFilenames.forEach(lang => {
|
||||
try {
|
||||
exports._default = _default = require('./wordlists/' + lang + '.json');
|
||||
wordlists[lang] = _default;
|
||||
if (lang === 'japanese')
|
||||
wordlists.JA = _default;
|
||||
if (lang === 'english')
|
||||
wordlists.EN = _default;
|
||||
}
|
||||
catch (err) { }
|
||||
});
|
|
@ -0,0 +1,159 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const createHash = require("create-hash");
|
||||
const pbkdf2_1 = require("pbkdf2");
|
||||
const randomBytes = require("randombytes");
|
||||
const _wordlists_1 = require("./_wordlists");
|
||||
let DEFAULT_WORDLIST = _wordlists_1._default;
|
||||
const INVALID_MNEMONIC = 'Invalid mnemonic';
|
||||
const INVALID_ENTROPY = 'Invalid entropy';
|
||||
const INVALID_CHECKSUM = 'Invalid mnemonic checksum';
|
||||
const WORDLIST_REQUIRED = 'A wordlist is required but a default could not be found.\n' +
|
||||
'Please explicitly pass a 2048 word array explicitly.';
|
||||
function lpad(str, padString, length) {
|
||||
while (str.length < length)
|
||||
str = padString + str;
|
||||
return str;
|
||||
}
|
||||
function binaryToByte(bin) {
|
||||
return parseInt(bin, 2);
|
||||
}
|
||||
function bytesToBinary(bytes) {
|
||||
return bytes.map(x => lpad(x.toString(2), '0', 8)).join('');
|
||||
}
|
||||
function deriveChecksumBits(entropyBuffer) {
|
||||
const ENT = entropyBuffer.length * 8;
|
||||
const CS = ENT / 32;
|
||||
const hash = createHash('sha256')
|
||||
.update(entropyBuffer)
|
||||
.digest();
|
||||
return bytesToBinary([...hash]).slice(0, CS);
|
||||
}
|
||||
function salt(password) {
|
||||
return 'mnemonic' + (password || '');
|
||||
}
|
||||
function mnemonicToSeedSync(mnemonic, password) {
|
||||
const mnemonicBuffer = Buffer.from((mnemonic || '').normalize('NFKD'), 'utf8');
|
||||
const saltBuffer = Buffer.from(salt((password || '').normalize('NFKD')), 'utf8');
|
||||
return pbkdf2_1.pbkdf2Sync(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512');
|
||||
}
|
||||
exports.mnemonicToSeedSync = mnemonicToSeedSync;
|
||||
function mnemonicToSeed(mnemonic, password) {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const mnemonicBuffer = Buffer.from((mnemonic || '').normalize('NFKD'), 'utf8');
|
||||
const saltBuffer = Buffer.from(salt((password || '').normalize('NFKD')), 'utf8');
|
||||
pbkdf2_1.pbkdf2(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', (err, data) => {
|
||||
if (err)
|
||||
return reject(err);
|
||||
else
|
||||
return resolve(data);
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
return reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.mnemonicToSeed = mnemonicToSeed;
|
||||
function mnemonicToEntropy(mnemonic, wordlist) {
|
||||
wordlist = wordlist || DEFAULT_WORDLIST;
|
||||
if (!wordlist) {
|
||||
throw new Error(WORDLIST_REQUIRED);
|
||||
}
|
||||
const words = (mnemonic || '').normalize('NFKD').split(' ');
|
||||
if (words.length % 3 !== 0)
|
||||
throw new Error(INVALID_MNEMONIC);
|
||||
// convert word indices to 11 bit binary strings
|
||||
const bits = words
|
||||
.map(word => {
|
||||
const index = wordlist.indexOf(word);
|
||||
if (index === -1)
|
||||
throw new Error(INVALID_MNEMONIC);
|
||||
return lpad(index.toString(2), '0', 11);
|
||||
})
|
||||
.join('');
|
||||
// split the binary string into ENT/CS
|
||||
const dividerIndex = Math.floor(bits.length / 33) * 32;
|
||||
const entropyBits = bits.slice(0, dividerIndex);
|
||||
const checksumBits = bits.slice(dividerIndex);
|
||||
// calculate the checksum and compare
|
||||
const entropyBytes = entropyBits.match(/(.{1,8})/g).map(binaryToByte);
|
||||
if (entropyBytes.length < 16)
|
||||
throw new Error(INVALID_ENTROPY);
|
||||
if (entropyBytes.length > 32)
|
||||
throw new Error(INVALID_ENTROPY);
|
||||
if (entropyBytes.length % 4 !== 0)
|
||||
throw new Error(INVALID_ENTROPY);
|
||||
const entropy = Buffer.from(entropyBytes);
|
||||
const newChecksum = deriveChecksumBits(entropy);
|
||||
if (newChecksum !== checksumBits)
|
||||
throw new Error(INVALID_CHECKSUM);
|
||||
return entropy.toString('hex');
|
||||
}
|
||||
exports.mnemonicToEntropy = mnemonicToEntropy;
|
||||
function entropyToMnemonic(entropy, wordlist) {
|
||||
if (!Buffer.isBuffer(entropy))
|
||||
entropy = Buffer.from(entropy, 'hex');
|
||||
wordlist = wordlist || DEFAULT_WORDLIST;
|
||||
if (!wordlist) {
|
||||
throw new Error(WORDLIST_REQUIRED);
|
||||
}
|
||||
// 128 <= ENT <= 256
|
||||
if (entropy.length < 16)
|
||||
throw new TypeError(INVALID_ENTROPY);
|
||||
if (entropy.length > 32)
|
||||
throw new TypeError(INVALID_ENTROPY);
|
||||
if (entropy.length % 4 !== 0)
|
||||
throw new TypeError(INVALID_ENTROPY);
|
||||
const entropyBits = bytesToBinary([...entropy]);
|
||||
const checksumBits = deriveChecksumBits(entropy);
|
||||
const bits = entropyBits + checksumBits;
|
||||
const chunks = bits.match(/(.{1,11})/g);
|
||||
const words = chunks.map(binary => {
|
||||
const index = binaryToByte(binary);
|
||||
return wordlist[index];
|
||||
});
|
||||
return wordlist[0] === '\u3042\u3044\u3053\u304f\u3057\u3093' // Japanese wordlist
|
||||
? words.join('\u3000')
|
||||
: words.join(' ');
|
||||
}
|
||||
exports.entropyToMnemonic = entropyToMnemonic;
|
||||
function generateMnemonic(strength, rng, wordlist) {
|
||||
strength = strength || 128;
|
||||
if (strength % 32 !== 0)
|
||||
throw new TypeError(INVALID_ENTROPY);
|
||||
rng = rng || randomBytes;
|
||||
return entropyToMnemonic(rng(strength / 8), wordlist);
|
||||
}
|
||||
exports.generateMnemonic = generateMnemonic;
|
||||
function validateMnemonic(mnemonic, wordlist) {
|
||||
try {
|
||||
mnemonicToEntropy(mnemonic, wordlist);
|
||||
}
|
||||
catch (e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.validateMnemonic = validateMnemonic;
|
||||
function setDefaultWordlist(language) {
|
||||
const result = _wordlists_1.wordlists[language];
|
||||
if (result)
|
||||
DEFAULT_WORDLIST = result;
|
||||
else
|
||||
throw new Error('Could not find wordlist for language "' + language + '"');
|
||||
}
|
||||
exports.setDefaultWordlist = setDefaultWordlist;
|
||||
function getDefaultWordlist() {
|
||||
if (!DEFAULT_WORDLIST)
|
||||
throw new Error('No Default Wordlist set');
|
||||
return Object.keys(_wordlists_1.wordlists).filter(lang => {
|
||||
if (lang === 'JA' || lang === 'EN')
|
||||
return false;
|
||||
return _wordlists_1.wordlists[lang].every((word, index) => word === DEFAULT_WORDLIST[index]);
|
||||
})[0];
|
||||
}
|
||||
exports.getDefaultWordlist = getDefaultWordlist;
|
||||
var _wordlists_2 = require("./_wordlists");
|
||||
exports.wordlists = _wordlists_2.wordlists;
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,9 +1,8 @@
|
|||
var bip39 = require('../')
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var download = require('../util/wordlists').download
|
||||
var WORDLISTS = {
|
||||
english: require('../wordlists/english.json'),
|
||||
japanese: require('../wordlists/japanese.json'),
|
||||
english: require('../src/wordlists/english.json'),
|
||||
japanese: require('../src/wordlists/japanese.json'),
|
||||
custom: require('./wordlist.json')
|
||||
}
|
||||
|
||||
|
@ -19,9 +18,9 @@ function testVector (description, wordlist, password, v, i) {
|
|||
t.plan(6)
|
||||
|
||||
t.equal(bip39.mnemonicToEntropy(vmnemonic, wordlist), ventropy, 'mnemonicToEntropy returns ' + ventropy.slice(0, 40) + '...')
|
||||
t.equal(bip39.mnemonicToSeedHex(vmnemonic, password), vseedHex, 'mnemonicToSeedHex returns ' + vseedHex.slice(0, 40) + '...')
|
||||
bip39.mnemonicToSeedHexAsync(vmnemonic, password).then(function (asyncSeedHex) {
|
||||
t.equal(asyncSeedHex, vseedHex, 'mnemonicToSeedHexAsync returns ' + vseedHex.slice(0, 40) + '...')
|
||||
t.equal(bip39.mnemonicToSeedSync(vmnemonic, password).toString('hex'), vseedHex, 'mnemonicToSeedSync returns ' + vseedHex.slice(0, 40) + '...')
|
||||
bip39.mnemonicToSeed(vmnemonic, password).then(function (asyncSeed) {
|
||||
t.equal(asyncSeed.toString('hex'), vseedHex, 'mnemonicToSeed returns ' + vseedHex.slice(0, 40) + '...')
|
||||
})
|
||||
t.equal(bip39.entropyToMnemonic(ventropy, wordlist), vmnemonic, 'entropyToMnemonic returns ' + vmnemonic.slice(0, 40) + '...')
|
||||
|
||||
|
@ -35,6 +34,46 @@ vectors.english.forEach(function (v, i) { testVector('English', undefined, 'TREZ
|
|||
vectors.japanese.forEach(function (v, i) { testVector('Japanese', WORDLISTS.japanese, '㍍ガバヴァぱばぐゞちぢ十人十色', v, i) })
|
||||
vectors.custom.forEach(function (v, i) { testVector('Custom', WORDLISTS.custom, undefined, v, i) })
|
||||
|
||||
test('getDefaultWordlist returns "english"', function (t) {
|
||||
t.plan(1)
|
||||
const english = bip39.getDefaultWordlist()
|
||||
t.equal(english, 'english')
|
||||
// TODO: Test that Error throws when called if no wordlists are compiled with bip39
|
||||
})
|
||||
|
||||
test('setDefaultWordlist changes default wordlist', function (t) {
|
||||
t.plan(4)
|
||||
const english = bip39.getDefaultWordlist()
|
||||
t.equal(english, 'english')
|
||||
|
||||
bip39.setDefaultWordlist('italian')
|
||||
|
||||
const italian = bip39.getDefaultWordlist()
|
||||
t.equal(italian, 'italian')
|
||||
|
||||
const phraseItalian = bip39.entropyToMnemonic('00000000000000000000000000000000')
|
||||
t.equal(phraseItalian.slice(0, 5), 'abaco')
|
||||
|
||||
bip39.setDefaultWordlist('english')
|
||||
|
||||
const phraseEnglish = bip39.entropyToMnemonic('00000000000000000000000000000000')
|
||||
t.equal(phraseEnglish.slice(0, 7), 'abandon')
|
||||
})
|
||||
|
||||
test('setDefaultWordlist throws on unknown wordlist', function (t) {
|
||||
t.plan(2)
|
||||
const english = bip39.getDefaultWordlist()
|
||||
t.equal(english, 'english')
|
||||
|
||||
try {
|
||||
bip39.setDefaultWordlist('abcdefghijklmnop')
|
||||
} catch (error) {
|
||||
t.equal(error.message, 'Could not find wordlist for language "abcdefghijklmnop"')
|
||||
return
|
||||
}
|
||||
t.assert(false)
|
||||
})
|
||||
|
||||
test('invalid entropy', function (t) {
|
||||
t.plan(3)
|
||||
|
||||
|
@ -61,8 +100,8 @@ test('UTF8 passwords', function (t) {
|
|||
var password = '㍍ガバヴァぱばぐゞちぢ十人十色'
|
||||
var normalizedPassword = 'メートルガバヴァぱばぐゞちぢ十人十色'
|
||||
|
||||
t.equal(bip39.mnemonicToSeedHex(vmnemonic, password), vseedHex, 'mnemonicToSeedHex normalizes passwords')
|
||||
t.equal(bip39.mnemonicToSeedHex(vmnemonic, normalizedPassword), vseedHex, 'mnemonicToSeedHex leaves normalizes passwords as-is')
|
||||
t.equal(bip39.mnemonicToSeedSync(vmnemonic, password).toString('hex'), vseedHex, 'mnemonicToSeedSync normalizes passwords')
|
||||
t.equal(bip39.mnemonicToSeedSync(vmnemonic, normalizedPassword).toString('hex'), vseedHex, 'mnemonicToSeedSync leaves normalizes passwords as-is')
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
var bip39 = require('../')
|
||||
var Buffer = require('safe-buffer').Buffer
|
||||
var proxyquire = require('proxyquire')
|
||||
var test = require('tape')
|
||||
const bip39 = require('../')
|
||||
const Buffer = require('safe-buffer').Buffer
|
||||
const proxyquire = require('proxyquire')
|
||||
const test = require('tape')
|
||||
|
||||
test('README example 1', function (t) {
|
||||
// defaults to BIP39 English word list
|
||||
var entropy = 'ffffffffffffffffffffffffffffffff'
|
||||
var mnemonic = bip39.entropyToMnemonic(entropy)
|
||||
const entropy = 'ffffffffffffffffffffffffffffffff'
|
||||
const mnemonic = bip39.entropyToMnemonic(entropy)
|
||||
|
||||
t.plan(2)
|
||||
t.equal(mnemonic, 'zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong')
|
||||
|
@ -16,15 +16,15 @@ test('README example 1', function (t) {
|
|||
})
|
||||
|
||||
test('README example 2', function (t) {
|
||||
var stub = {
|
||||
const stub = {
|
||||
randombytes: function (size) {
|
||||
return Buffer.from('qwertyuiopasdfghjklzxcvbnm[];,./'.slice(0, size), 'utf8')
|
||||
}
|
||||
}
|
||||
var proxiedbip39 = proxyquire('../', stub)
|
||||
const proxiedbip39 = proxyquire('../', stub)
|
||||
|
||||
// mnemonic strength defaults to 128 bits
|
||||
var mnemonic = proxiedbip39.generateMnemonic()
|
||||
const mnemonic = proxiedbip39.generateMnemonic()
|
||||
|
||||
t.plan(2)
|
||||
t.equal(mnemonic, 'imitate robot frame trophy nuclear regret saddle around inflict case oil spice')
|
||||
|
@ -32,12 +32,10 @@ test('README example 2', function (t) {
|
|||
})
|
||||
|
||||
test('README example 3', function (t) {
|
||||
var mnemonic = 'basket actual'
|
||||
var seed = bip39.mnemonicToSeed(mnemonic)
|
||||
var seedHex = bip39.mnemonicToSeedHex(mnemonic)
|
||||
const mnemonic = 'basket actual'
|
||||
const seed = bip39.mnemonicToSeedSync(mnemonic)
|
||||
|
||||
t.plan(3)
|
||||
t.equal(seed.toString('hex'), seedHex)
|
||||
t.equal(seedHex, '5cf2d4a8b0355e90295bdfc565a022a409af063d5365bb57bf74d9528f494bfa4400f53d8349b80fdae44082d7f9541e1dba2b003bcfec9d0d53781ca676651f')
|
||||
t.plan(2)
|
||||
t.equal(seed.toString('hex'), '5cf2d4a8b0355e90295bdfc565a022a409af063d5365bb57bf74d9528f494bfa4400f53d8349b80fdae44082d7f9541e1dba2b003bcfec9d0d53781ca676651f')
|
||||
t.equal(bip39.validateMnemonic(mnemonic), false)
|
||||
})
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
// browserify by default only pulls in files that are hard coded in requires
|
||||
// In order of last to first in this file, the default wordlist will be chosen
|
||||
// based on what is present. (Bundles may remove wordlists they don't need)
|
||||
const wordlistFilenames: string[] = [
|
||||
'chinese_simplified',
|
||||
'chinese_traditional',
|
||||
'korean',
|
||||
'french',
|
||||
'italian',
|
||||
'spanish',
|
||||
'japanese',
|
||||
'english', // Last language available in list will be the default.
|
||||
];
|
||||
const wordlists: { [index: string]: string[] } = {};
|
||||
let _default: string[] | undefined;
|
||||
wordlistFilenames.forEach(lang => {
|
||||
try {
|
||||
_default = require('./wordlists/' + lang + '.json');
|
||||
wordlists[lang] = _default as string[];
|
||||
if (lang === 'japanese') wordlists.JA = _default as string[];
|
||||
if (lang === 'english') wordlists.EN = _default as string[];
|
||||
} catch (err) {}
|
||||
});
|
||||
|
||||
// Last one to overwrite wordlist gets to be default.
|
||||
export { wordlists, _default };
|
|
@ -0,0 +1,193 @@
|
|||
import * as createHash from 'create-hash';
|
||||
import { pbkdf2, pbkdf2Sync } from 'pbkdf2';
|
||||
import * as randomBytes from 'randombytes';
|
||||
import { _default as _DEFAULT_WORDLIST, wordlists } from './_wordlists';
|
||||
|
||||
let DEFAULT_WORDLIST: string[] | undefined = _DEFAULT_WORDLIST;
|
||||
|
||||
const INVALID_MNEMONIC = 'Invalid mnemonic';
|
||||
const INVALID_ENTROPY = 'Invalid entropy';
|
||||
const INVALID_CHECKSUM = 'Invalid mnemonic checksum';
|
||||
const WORDLIST_REQUIRED =
|
||||
'A wordlist is required but a default could not be found.\n' +
|
||||
'Please explicitly pass a 2048 word array explicitly.';
|
||||
|
||||
function lpad(str: string, padString: string, length: number): string {
|
||||
while (str.length < length) str = padString + str;
|
||||
return str;
|
||||
}
|
||||
|
||||
function binaryToByte(bin: string): number {
|
||||
return parseInt(bin, 2);
|
||||
}
|
||||
|
||||
function bytesToBinary(bytes: number[]): string {
|
||||
return bytes.map(x => lpad(x.toString(2), '0', 8)).join('');
|
||||
}
|
||||
|
||||
function deriveChecksumBits(entropyBuffer: Buffer): string {
|
||||
const ENT = entropyBuffer.length * 8;
|
||||
const CS = ENT / 32;
|
||||
const hash = createHash('sha256')
|
||||
.update(entropyBuffer)
|
||||
.digest();
|
||||
|
||||
return bytesToBinary([...hash]).slice(0, CS);
|
||||
}
|
||||
|
||||
function salt(password?: string): string {
|
||||
return 'mnemonic' + (password || '');
|
||||
}
|
||||
|
||||
export function mnemonicToSeedSync(mnemonic: string, password: string): Buffer {
|
||||
const mnemonicBuffer = Buffer.from(
|
||||
(mnemonic || '').normalize('NFKD'),
|
||||
'utf8',
|
||||
);
|
||||
const saltBuffer = Buffer.from(
|
||||
salt((password || '').normalize('NFKD')),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
return pbkdf2Sync(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512');
|
||||
}
|
||||
|
||||
export function mnemonicToSeed(
|
||||
mnemonic: string,
|
||||
password: string,
|
||||
): Promise<Buffer> {
|
||||
return new Promise(
|
||||
(resolve, reject): void => {
|
||||
try {
|
||||
const mnemonicBuffer = Buffer.from(
|
||||
(mnemonic || '').normalize('NFKD'),
|
||||
'utf8',
|
||||
);
|
||||
const saltBuffer = Buffer.from(
|
||||
salt((password || '').normalize('NFKD')),
|
||||
'utf8',
|
||||
);
|
||||
pbkdf2(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', (err, data) => {
|
||||
if (err) return reject(err);
|
||||
else return resolve(data);
|
||||
});
|
||||
} catch (error) {
|
||||
return reject(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export function mnemonicToEntropy(
|
||||
mnemonic: string,
|
||||
wordlist?: string[],
|
||||
): string {
|
||||
wordlist = wordlist || DEFAULT_WORDLIST;
|
||||
if (!wordlist) {
|
||||
throw new Error(WORDLIST_REQUIRED);
|
||||
}
|
||||
|
||||
const words = (mnemonic || '').normalize('NFKD').split(' ');
|
||||
if (words.length % 3 !== 0) throw new Error(INVALID_MNEMONIC);
|
||||
|
||||
// convert word indices to 11 bit binary strings
|
||||
const bits = words
|
||||
.map(word => {
|
||||
const index = wordlist!.indexOf(word);
|
||||
if (index === -1) throw new Error(INVALID_MNEMONIC);
|
||||
|
||||
return lpad(index.toString(2), '0', 11);
|
||||
})
|
||||
.join('');
|
||||
|
||||
// split the binary string into ENT/CS
|
||||
const dividerIndex = Math.floor(bits.length / 33) * 32;
|
||||
const entropyBits = bits.slice(0, dividerIndex);
|
||||
const checksumBits = bits.slice(dividerIndex);
|
||||
|
||||
// calculate the checksum and compare
|
||||
const entropyBytes = entropyBits.match(/(.{1,8})/g)!.map(binaryToByte);
|
||||
if (entropyBytes.length < 16) throw new Error(INVALID_ENTROPY);
|
||||
if (entropyBytes.length > 32) throw new Error(INVALID_ENTROPY);
|
||||
if (entropyBytes.length % 4 !== 0) throw new Error(INVALID_ENTROPY);
|
||||
|
||||
const entropy = Buffer.from(entropyBytes);
|
||||
const newChecksum = deriveChecksumBits(entropy);
|
||||
if (newChecksum !== checksumBits) throw new Error(INVALID_CHECKSUM);
|
||||
|
||||
return entropy.toString('hex');
|
||||
}
|
||||
|
||||
export function entropyToMnemonic(
|
||||
entropy: Buffer | string,
|
||||
wordlist?: string[],
|
||||
): string {
|
||||
if (!Buffer.isBuffer(entropy)) entropy = Buffer.from(entropy, 'hex');
|
||||
wordlist = wordlist || DEFAULT_WORDLIST;
|
||||
if (!wordlist) {
|
||||
throw new Error(WORDLIST_REQUIRED);
|
||||
}
|
||||
|
||||
// 128 <= ENT <= 256
|
||||
if (entropy.length < 16) throw new TypeError(INVALID_ENTROPY);
|
||||
if (entropy.length > 32) throw new TypeError(INVALID_ENTROPY);
|
||||
if (entropy.length % 4 !== 0) throw new TypeError(INVALID_ENTROPY);
|
||||
|
||||
const entropyBits = bytesToBinary([...entropy]);
|
||||
const checksumBits = deriveChecksumBits(entropy);
|
||||
|
||||
const bits = entropyBits + checksumBits;
|
||||
const chunks = bits.match(/(.{1,11})/g)!;
|
||||
const words = chunks.map(binary => {
|
||||
const index = binaryToByte(binary);
|
||||
return wordlist![index];
|
||||
});
|
||||
|
||||
return wordlist[0] === '\u3042\u3044\u3053\u304f\u3057\u3093' // Japanese wordlist
|
||||
? words.join('\u3000')
|
||||
: words.join(' ');
|
||||
}
|
||||
|
||||
export function generateMnemonic(
|
||||
strength?: number,
|
||||
rng?: (size: number) => Buffer,
|
||||
wordlist?: string[],
|
||||
): string {
|
||||
strength = strength || 128;
|
||||
if (strength % 32 !== 0) throw new TypeError(INVALID_ENTROPY);
|
||||
rng = rng || randomBytes;
|
||||
|
||||
return entropyToMnemonic(rng(strength / 8), wordlist);
|
||||
}
|
||||
|
||||
export function validateMnemonic(
|
||||
mnemonic: string,
|
||||
wordlist?: string[],
|
||||
): boolean {
|
||||
try {
|
||||
mnemonicToEntropy(mnemonic, wordlist);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function setDefaultWordlist(language: string): void {
|
||||
const result = wordlists[language];
|
||||
if (result) DEFAULT_WORDLIST = result;
|
||||
else
|
||||
throw new Error('Could not find wordlist for language "' + language + '"');
|
||||
}
|
||||
|
||||
export function getDefaultWordlist(): string {
|
||||
if (!DEFAULT_WORDLIST) throw new Error('No Default Wordlist set');
|
||||
return Object.keys(wordlists).filter(lang => {
|
||||
if (lang === 'JA' || lang === 'EN') return false;
|
||||
return wordlists[lang].every(
|
||||
(word, index) => word === DEFAULT_WORDLIST![index],
|
||||
);
|
||||
})[0];
|
||||
}
|
||||
|
||||
export { wordlists } from './_wordlists';
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2017",
|
||||
"module": "commonjs",
|
||||
"outDir": "./src",
|
||||
"declaration": true,
|
||||
"declarationDir": "./types",
|
||||
"rootDir": "./ts_src",
|
||||
"types": [
|
||||
"node"
|
||||
],
|
||||
"allowJs": false,
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"strictFunctionTypes": true,
|
||||
"strictBindCallApply": true,
|
||||
"strictPropertyInitialization": true,
|
||||
"noImplicitThis": true,
|
||||
"alwaysStrict": true,
|
||||
"esModuleInterop": false,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"resolveJsonModule": true
|
||||
},
|
||||
"include": [
|
||||
"ts_src/**/*.ts",
|
||||
"ts_src/**/*.json"
|
||||
],
|
||||
"exclude": [
|
||||
"**/*.spec.ts",
|
||||
"node_modules/**/*"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"defaultSeverity": "error",
|
||||
"extends": ["tslint:recommended"],
|
||||
"rules": {
|
||||
"arrow-parens": [true, "ban-single-arg-parens"],
|
||||
"curly": false,
|
||||
"indent": [
|
||||
true,
|
||||
"spaces",
|
||||
2
|
||||
],
|
||||
"interface-name": [false],
|
||||
"match-default-export-name": true,
|
||||
"max-classes-per-file": [false],
|
||||
"member-access": [true, "no-public"],
|
||||
"no-bitwise": false,
|
||||
"no-console": false,
|
||||
"no-empty": [true, "allow-empty-catch"],
|
||||
"no-implicit-dependencies": true,
|
||||
"no-return-await": true,
|
||||
"no-var-requires": false,
|
||||
"no-unused-expression": false,
|
||||
"object-literal-sort-keys": false,
|
||||
"quotemark": [true, "single"],
|
||||
"typedef": [
|
||||
true,
|
||||
"call-signature",
|
||||
"arrow-call-signature",
|
||||
"property-declaration"
|
||||
],
|
||||
"variable-name": [
|
||||
true,
|
||||
"ban-keywords",
|
||||
"check-format",
|
||||
"allow-leading-underscore",
|
||||
"allow-pascal-case"
|
||||
]
|
||||
},
|
||||
"rulesDirectory": []
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
declare const wordlists: {
|
||||
[index: string]: string[];
|
||||
};
|
||||
declare let _default: string[] | undefined;
|
||||
export { wordlists, _default };
|
|
@ -0,0 +1,10 @@
|
|||
/// <reference types="node" />
|
||||
export declare function mnemonicToSeedSync(mnemonic: string, password: string): Buffer;
|
||||
export declare function mnemonicToSeed(mnemonic: string, password: string): Promise<Buffer>;
|
||||
export declare function mnemonicToEntropy(mnemonic: string, wordlist?: string[]): string;
|
||||
export declare function entropyToMnemonic(entropy: Buffer | string, wordlist?: string[]): string;
|
||||
export declare function generateMnemonic(strength?: number, rng?: (size: number) => Buffer, wordlist?: string[]): string;
|
||||
export declare function validateMnemonic(mnemonic: string, wordlist?: string[]): boolean;
|
||||
export declare function setDefaultWordlist(language: string): void;
|
||||
export declare function getDefaultWordlist(): string;
|
||||
export { wordlists } from './_wordlists';
|
|
@ -0,0 +1,5 @@
|
|||
declare const wordlists: {
|
||||
[index: string]: string[];
|
||||
};
|
||||
declare let _default: string[] | undefined;
|
||||
export { wordlists, _default };
|
|
@ -43,7 +43,7 @@ function toJSON (content) {
|
|||
}
|
||||
|
||||
function save (name, wordlist) {
|
||||
var location = path.join(__dirname, '..', 'wordlists', name + '.json')
|
||||
var location = path.join(__dirname, '..', 'ts_src', 'wordlists', name + '.json')
|
||||
var content = JSON.stringify(wordlist, null, 2) + '\n'
|
||||
log('save ' + wordlist.length + ' words to ' + location)
|
||||
|
||||
|
|
Loading…
Reference in New Issue