add tslint and fix lint errors

This commit is contained in:
d-yokoi 2019-03-23 22:39:32 +09:00
parent 464984682f
commit 834389c038
No known key found for this signature in database
GPG Key ID: 49EAF81BC6A0D19A
5 changed files with 203 additions and 97 deletions

59
package-lock.json generated
View File

@ -384,6 +384,12 @@
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
"dev": true
},
"commander": {
"version": "2.19.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz",
"integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==",
"dev": true
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -504,6 +510,12 @@
"uniq": "^1.0.1"
}
},
"diff": {
"version": "3.5.0",
"resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz",
"integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==",
"dev": true
},
"doctrine": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
@ -3234,6 +3246,53 @@
"integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=",
"dev": true
},
"tslib": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz",
"integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==",
"dev": true
},
"tslint": {
"version": "5.14.0",
"resolved": "https://registry.npmjs.org/tslint/-/tslint-5.14.0.tgz",
"integrity": "sha512-IUla/ieHVnB8Le7LdQFRGlVJid2T/gaJe5VkjzRVSRR6pA2ODYrnfR1hmxi+5+au9l50jBwpbBL34txgv4NnTQ==",
"dev": true,
"requires": {
"babel-code-frame": "^6.22.0",
"builtin-modules": "^1.1.1",
"chalk": "^2.3.0",
"commander": "^2.12.1",
"diff": "^3.2.0",
"glob": "^7.1.1",
"js-yaml": "^3.7.0",
"minimatch": "^3.0.4",
"mkdirp": "^0.5.1",
"resolve": "^1.3.2",
"semver": "^5.3.0",
"tslib": "^1.8.0",
"tsutils": "^2.29.0"
},
"dependencies": {
"resolve": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz",
"integrity": "sha512-3sUr9aq5OfSg2S9pNtPA9hL1FVEAjvfOC4leW0SNf/mpnaakz2a9femSd6LqAww2RaFctwyf1lCqnTHuF1rxDg==",
"dev": true,
"requires": {
"path-parse": "^1.0.6"
}
}
}
},
"tsutils": {
"version": "2.29.0",
"resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz",
"integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==",
"dev": true,
"requires": {
"tslib": "^1.8.1"
}
},
"type-check": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",

View File

@ -9,6 +9,7 @@
"coverage": "nyc --branches 100 --functions 100 --check-coverage npm run unit",
"format": "npm run prettier -- --write",
"format:ci": "npm run prettier -- --check",
"lint": "tslint -p tsconfig.json -c tslint.json",
"prettier": "prettier 'ts_src/**/*.ts' --ignore-path ./.prettierignore",
"standard": "standard",
"test": "npm run standard && npm run unit",
@ -50,6 +51,7 @@
"proxyquire": "^1.7.10",
"standard": "^10.0.2",
"tape": "^4.6.2",
"tslint": "^5.14.0",
"typescript": "3.3.4000"
}
}

View File

@ -1,4 +1,12 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const createHash = require("create-hash");
const pbkdf2_1 = require("pbkdf2");
@ -27,16 +35,12 @@ function binaryToByte(bin) {
return parseInt(bin, 2);
}
function bytesToBinary(bytes) {
return bytes
.map(function (x) {
return lpad(x.toString(2), '0', 8);
})
.join('');
return bytes.map(x => lpad(x.toString(2), '0', 8)).join('');
}
function deriveChecksumBits(entropyBuffer) {
var ENT = entropyBuffer.length * 8;
var CS = ENT / 32;
var hash = createHash('sha256')
const ENT = entropyBuffer.length * 8;
const CS = ENT / 32;
const hash = createHash('sha256')
.update(entropyBuffer)
.digest();
return bytesToBinary([].slice.call(hash)).slice(0, CS);
@ -53,55 +57,56 @@ function mnemonicToSeedHex(mnemonic, password) {
return mnemonicToSeed(mnemonic, password).toString('hex');
}
function mnemonicToSeedAsync(mnemonic, password) {
return new Promise(function (resolve, reject) {
return new Promise((resolve, reject) => {
try {
var mnemonicBuffer = Buffer.from(unorm.nfkd(mnemonic), 'utf8');
var saltBuffer = Buffer.from(salt(unorm.nfkd(password)), 'utf8');
const mnemonicBuffer = Buffer.from(unorm.nfkd(mnemonic), 'utf8');
const saltBuffer = Buffer.from(salt(unorm.nfkd(password)), 'utf8');
pbkdf2_1.pbkdf2(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', (err, data) => {
if (err)
return reject(err);
else
return resolve(data);
});
}
catch (error) {
return reject(error);
}
pbkdf2_1.pbkdf2(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', function (err, data) {
if (err)
return reject(err);
else
return resolve(data);
});
});
}
function mnemonicToSeedHexAsync(mnemonic, password) {
return mnemonicToSeedAsync(mnemonic, password).then(function (buf) {
return __awaiter(this, void 0, void 0, function* () {
const buf = yield mnemonicToSeedAsync(mnemonic, password);
return buf.toString('hex');
});
}
function mnemonicToEntropy(mnemonic, wordlist) {
wordlist = wordlist || DEFAULT_WORDLIST;
var words = unorm.nfkd(mnemonic).split(' ');
const words = unorm.nfkd(mnemonic).split(' ');
if (words.length % 3 !== 0)
throw new Error(INVALID_MNEMONIC);
// convert word indices to 11 bit binary strings
var bits = words
.map(function (word) {
var index = wordlist.indexOf(word);
const bits = words
.map(word => {
const index = wordlist.indexOf(word);
if (index === -1)
throw new Error(INVALID_MNEMONIC);
return lpad(index.toString(2), '0', 11);
})
.join('');
// split the binary string into ENT/CS
var dividerIndex = Math.floor(bits.length / 33) * 32;
var entropyBits = bits.slice(0, dividerIndex);
var checksumBits = bits.slice(dividerIndex);
const dividerIndex = Math.floor(bits.length / 33) * 32;
const entropyBits = bits.slice(0, dividerIndex);
const checksumBits = bits.slice(dividerIndex);
// calculate the checksum and compare
var entropyBytes = entropyBits.match(/(.{1,8})/g).map(binaryToByte);
const entropyBytes = entropyBits.match(/(.{1,8})/g).map(binaryToByte);
if (entropyBytes.length < 16)
throw new Error(INVALID_ENTROPY);
if (entropyBytes.length > 32)
throw new Error(INVALID_ENTROPY);
if (entropyBytes.length % 4 !== 0)
throw new Error(INVALID_ENTROPY);
var entropy = Buffer.from(entropyBytes);
var newChecksum = deriveChecksumBits(entropy);
const entropy = Buffer.from(entropyBytes);
const newChecksum = deriveChecksumBits(entropy);
if (newChecksum !== checksumBits)
throw new Error(INVALID_CHECKSUM);
return entropy.toString('hex');
@ -117,12 +122,12 @@ function entropyToMnemonic(entropy, wordlist) {
throw new TypeError(INVALID_ENTROPY);
if (entropy.length % 4 !== 0)
throw new TypeError(INVALID_ENTROPY);
var entropyBits = bytesToBinary([].slice.call(entropy));
var checksumBits = deriveChecksumBits(entropy);
var bits = entropyBits + checksumBits;
var chunks = bits.match(/(.{1,11})/g);
var words = chunks.map(function (binary) {
var index = binaryToByte(binary);
const entropyBits = bytesToBinary([].slice.call(entropy));
const checksumBits = deriveChecksumBits(entropy);
const bits = entropyBits + checksumBits;
const chunks = bits.match(/(.{1,11})/g);
const words = chunks.map(binary => {
const index = binaryToByte(binary);
return wordlist[index];
});
return wordlist === JAPANESE_WORDLIST
@ -146,14 +151,14 @@ function validateMnemonic(mnemonic, wordlist) {
return true;
}
module.exports = {
mnemonicToSeed: mnemonicToSeed,
mnemonicToSeedAsync: mnemonicToSeedAsync,
mnemonicToSeedHex: mnemonicToSeedHex,
mnemonicToSeedHexAsync: mnemonicToSeedHexAsync,
mnemonicToEntropy: mnemonicToEntropy,
entropyToMnemonic: entropyToMnemonic,
generateMnemonic: generateMnemonic,
validateMnemonic: validateMnemonic,
mnemonicToSeed,
mnemonicToSeedAsync,
mnemonicToSeedHex,
mnemonicToSeedHexAsync,
mnemonicToEntropy,
entropyToMnemonic,
generateMnemonic,
validateMnemonic,
wordlists: {
EN: ENGLISH_WORDLIST,
JA: JAPANESE_WORDLIST,

View File

@ -30,24 +30,20 @@ function binaryToByte(bin: string): number {
}
function bytesToBinary(bytes: number[]): string {
return bytes
.map(function(x) {
return lpad(x.toString(2), '0', 8);
})
.join('');
return bytes.map(x => lpad(x.toString(2), '0', 8)).join('');
}
function deriveChecksumBits(entropyBuffer: Buffer) {
var ENT = entropyBuffer.length * 8;
var CS = ENT / 32;
var hash = createHash('sha256')
function deriveChecksumBits(entropyBuffer: Buffer): string {
const ENT = entropyBuffer.length * 8;
const CS = ENT / 32;
const hash = createHash('sha256')
.update(entropyBuffer)
.digest();
return bytesToBinary([].slice.call(hash)).slice(0, CS);
}
function salt(password?: string) {
function salt(password?: string): string {
return 'mnemonic' + (password || '');
}
@ -66,43 +62,47 @@ function mnemonicToSeedAsync(
mnemonic: string,
password: string,
): Promise<Buffer> {
return new Promise(function(resolve, reject) {
try {
var mnemonicBuffer = Buffer.from(unorm.nfkd(mnemonic), 'utf8');
var saltBuffer = Buffer.from(salt(unorm.nfkd(password)), 'utf8');
} catch (error) {
return reject(error);
}
pbkdf2Async(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', function(
err,
data,
) {
if (err) return reject(err);
else return resolve(data);
});
});
return new Promise(
(resolve, reject): void => {
try {
const mnemonicBuffer = Buffer.from(unorm.nfkd(mnemonic), 'utf8');
const saltBuffer = Buffer.from(salt(unorm.nfkd(password)), 'utf8');
pbkdf2Async(
mnemonicBuffer,
saltBuffer,
2048,
64,
'sha512',
(err, data) => {
if (err) return reject(err);
else return resolve(data);
},
);
} catch (error) {
return reject(error);
}
},
);
}
function mnemonicToSeedHexAsync(
async function mnemonicToSeedHexAsync(
mnemonic: string,
password: string,
): Promise<string> {
return mnemonicToSeedAsync(mnemonic, password).then(function(buf) {
return buf.toString('hex');
});
const buf = await mnemonicToSeedAsync(mnemonic, password);
return buf.toString('hex');
}
function mnemonicToEntropy(mnemonic: string, wordlist: string[]) {
function mnemonicToEntropy(mnemonic: string, wordlist: string[]): string {
wordlist = wordlist || DEFAULT_WORDLIST;
var words = unorm.nfkd(mnemonic).split(' ');
const words = unorm.nfkd(mnemonic).split(' ');
if (words.length % 3 !== 0) throw new Error(INVALID_MNEMONIC);
// convert word indices to 11 bit binary strings
var bits = words
.map(function(word) {
var index = wordlist.indexOf(word);
const bits = words
.map(word => {
const index = wordlist.indexOf(word);
if (index === -1) throw new Error(INVALID_MNEMONIC);
return lpad(index.toString(2), '0', 11);
@ -110,18 +110,18 @@ function mnemonicToEntropy(mnemonic: string, wordlist: string[]) {
.join('');
// split the binary string into ENT/CS
var dividerIndex = Math.floor(bits.length / 33) * 32;
var entropyBits = bits.slice(0, dividerIndex);
var checksumBits = bits.slice(dividerIndex);
const dividerIndex = Math.floor(bits.length / 33) * 32;
const entropyBits = bits.slice(0, dividerIndex);
const checksumBits = bits.slice(dividerIndex);
// calculate the checksum and compare
var entropyBytes = entropyBits.match(/(.{1,8})/g)!.map(binaryToByte);
const entropyBytes = entropyBits.match(/(.{1,8})/g)!.map(binaryToByte);
if (entropyBytes.length < 16) throw new Error(INVALID_ENTROPY);
if (entropyBytes.length > 32) throw new Error(INVALID_ENTROPY);
if (entropyBytes.length % 4 !== 0) throw new Error(INVALID_ENTROPY);
var entropy = Buffer.from(entropyBytes);
var newChecksum = deriveChecksumBits(entropy);
const entropy = Buffer.from(entropyBytes);
const newChecksum = deriveChecksumBits(entropy);
if (newChecksum !== checksumBits) throw new Error(INVALID_CHECKSUM);
return entropy.toString('hex');
@ -139,13 +139,13 @@ function entropyToMnemonic(
if (entropy.length > 32) throw new TypeError(INVALID_ENTROPY);
if (entropy.length % 4 !== 0) throw new TypeError(INVALID_ENTROPY);
var entropyBits = bytesToBinary([].slice.call(entropy));
var checksumBits = deriveChecksumBits(entropy);
const entropyBits = bytesToBinary([].slice.call(entropy));
const checksumBits = deriveChecksumBits(entropy);
var bits = entropyBits + checksumBits;
var chunks = bits.match(/(.{1,11})/g)!;
var words = chunks.map(function(binary) {
var index = binaryToByte(binary);
const bits = entropyBits + checksumBits;
const chunks = bits.match(/(.{1,11})/g)!;
const words = chunks.map(binary => {
const index = binaryToByte(binary);
return wordlist![index];
});
@ -177,14 +177,14 @@ function validateMnemonic(mnemonic: string, wordlist: string[]): boolean {
}
module.exports = {
mnemonicToSeed: mnemonicToSeed,
mnemonicToSeedAsync: mnemonicToSeedAsync,
mnemonicToSeedHex: mnemonicToSeedHex,
mnemonicToSeedHexAsync: mnemonicToSeedHexAsync,
mnemonicToEntropy: mnemonicToEntropy,
entropyToMnemonic: entropyToMnemonic,
generateMnemonic: generateMnemonic,
validateMnemonic: validateMnemonic,
mnemonicToSeed,
mnemonicToSeedAsync,
mnemonicToSeedHex,
mnemonicToSeedHexAsync,
mnemonicToEntropy,
entropyToMnemonic,
generateMnemonic,
validateMnemonic,
wordlists: {
EN: ENGLISH_WORDLIST,
JA: JAPANESE_WORDLIST,

40
tslint.json Normal file
View File

@ -0,0 +1,40 @@
{
"defaultSeverity": "error",
"extends": ["tslint:recommended"],
"rules": {
"arrow-parens": [true, "ban-single-arg-parens"],
"curly": false,
"indent": [
true,
"spaces",
2
],
"interface-name": [false],
"match-default-export-name": true,
"max-classes-per-file": [false],
"member-access": [true, "no-public"],
"no-bitwise": false,
"no-console": false,
"no-empty": [true, "allow-empty-catch"],
"no-implicit-dependencies": true,
"no-return-await": true,
"no-var-requires": false,
"no-unused-expression": false,
"object-literal-sort-keys": false,
"quotemark": [true, "single"],
"typedef": [
true,
"call-signature",
"arrow-call-signature",
"property-declaration"
],
"variable-name": [
true,
"ban-keywords",
"check-format",
"allow-leading-underscore",
"allow-pascal-case"
]
},
"rulesDirectory": []
}