Create script to update tokens list from ethereum-lists/tokens (#1247)

* Add script to update tokens from ethereum-lists/tokens, commit output.

* Force decimals to be integers.

* Eth-lists-script Typescript (#1508)

* Update scripts to handle collisions, and use typescript

* Add comment on duplicateAddress validator

* Lock dep on ts-node

* Fix tsc errors

* revert tokens update

* make implicit anys
This commit is contained in:
William O'Beirne 2018-04-13 13:02:00 -04:00 committed by Daniel Ternyak
parent 7da50ae52a
commit 574c628e61
7 changed files with 351 additions and 1 deletions

View File

@ -116,6 +116,7 @@
"thread-loader": "1.1.5",
"ts-jest": "22.4.2",
"ts-loader": "4.1.0",
"ts-node": "5.0.1",
"tslint": "5.9.1",
"tslint-config-prettier": "1.10.0",
"tslint-microsoft-contrib": "5.0.3",
@ -172,7 +173,8 @@
"precommit": "lint-staged",
"formatAll": "find ./common/ -name '*.ts*' | xargs prettier --write --config ./.prettierrc --config-precedence file-override",
"prettier:diff": "prettier --write --config ./.prettierrc --list-different \"common/**/*.ts\" \"common/**/*.tsx\"",
"prepush": "npm run tslint && npm run tscheck"
"prepush": "npm run tslint && npm run tscheck",
"update:tokens": "ts-node scripts/update-tokens"
},
"lint-staged": {
"*.{ts,tsx}": [

View File

@ -0,0 +1,31 @@
export interface Creator {
login: string;
id: number;
avatar_url: string;
gravatar_id: string;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
}
export interface CommitStatus {
url: string;
id: number;
state: string;
description: string;
target_url: string;
context: string;
created_at: Date;
updated_at: Date;
creator: Creator;
}

View File

@ -0,0 +1,90 @@
export interface Author {
name: string;
email: string;
date: Date;
}
export interface Committer {
name: string;
email: string;
date: Date;
}
export interface Tree {
sha: string;
url: string;
}
export interface Verification {
verified: boolean;
reason: string;
signature?: any;
payload?: any;
}
export interface Commit {
author: Author;
committer: Committer;
message: string;
tree: Tree;
url: string;
comment_count: number;
verification: Verification;
}
export interface Author2 {
login: string;
id: number;
avatar_url: string;
gravatar_id: string;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
}
export interface Committer2 {
login: string;
id: number;
avatar_url: string;
gravatar_id: string;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
}
export interface Parent {
sha: string;
url: string;
html_url: string;
}
export interface GitCommit {
sha: string;
commit: Commit;
url: string;
html_url: string;
comments_url: string;
author: Author2;
committer: Committer2;
parents: Parent[];
}

View File

@ -0,0 +1,20 @@
export interface RawTokenJSON {
name?: string;
symbol?: string;
address?: string;
decimals?: number | string;
}
export interface ValidatedTokenJSON {
name: string;
symbol: string;
address: string;
decimals: number | string;
}
export interface NormalizedTokenJSON {
name: string;
symbol: string;
address: string;
decimal: number;
}

View File

@ -0,0 +1,128 @@
import { RawTokenJSON, ValidatedTokenJSON, NormalizedTokenJSON } from './types/TokensJson';
import { Token } from '../shared/types/network';
interface StrIdx<T> {
[key: string]: T;
}
function processTokenJson(tokensJson: RawTokenJSON[]): Token[] {
const normalizedTokens = tokensJson.map(validateTokenJSON).map(normalizeTokenJSON);
checkForDuplicateAddresses(normalizedTokens);
return handleDuplicateSymbols(normalizedTokens).map(({ name: _, ...rest }) => rest);
}
function validateTokenJSON(token: RawTokenJSON): ValidatedTokenJSON {
const isValid = (t: RawTokenJSON): t is ValidatedTokenJSON =>
!!(t.address && (t.decimals || t.decimals === 0) && t.name && t.symbol);
if (isValid(token)) {
return token;
}
throw Error(`Token failed validation, missing part of schema
Symbol: ${token.symbol}
Name: ${token.name}
Address: ${token.address}
Decimals: ${token.decimals}`);
}
function normalizeTokenJSON(token: ValidatedTokenJSON): NormalizedTokenJSON {
const { address, decimals, symbol, name } = token;
const t: NormalizedTokenJSON = { address, symbol, decimal: +decimals, name };
return t;
}
/**
*
* @description Checks for any duplicated addresses and halts the program if so
* @param {NormalizedTokenJSON[]} tokens
*/
function checkForDuplicateAddresses(tokens: NormalizedTokenJSON[]) {
const map: StrIdx<boolean> = {};
const errors: string[] = [];
for (const token of tokens) {
const { address } = token;
// We might want to strip hex prefixes here, and make all characters lowercase
if (map[address]) {
errors.push(`Token ${token.symbol} has a duplicate address of ${token.address}`);
}
map[address] = true;
}
if (errors.length) {
const err = errors.join('\n');
throw Error(err);
}
}
/**
*
* @description Finds any duplicated names in the fetched token json
* @param {NormalizedTokenJSON[]} tokens
* @returns
*/
function getDuplicatedNames(tokens: NormalizedTokenJSON[]) {
const checkedNames: StrIdx<boolean> = {};
const duplicatedNames: StrIdx<boolean> = {};
for (const token of tokens) {
const { name } = token;
if (checkedNames[name]) {
duplicatedNames[name] = true;
}
checkedNames[name] = true;
}
return duplicatedNames;
}
/**
*
* @description Handles any tokens with duplicated symbols by placing them in a map with each value being a bucket
* of other tokens with the same symbol, then renaming them appropriately so they do not conflict anymore
* @param {NormalizedTokenJSON[]} tokens
* @returns
*/
function handleDuplicateSymbols(tokens: NormalizedTokenJSON[]) {
// start by building a map of symbols => tokens
const map = new Map<string, NormalizedTokenJSON[]>();
for (const token of tokens) {
const { symbol } = token;
const v = map.get(symbol);
if (v) {
map.set(symbol, [...v, token]);
} else {
map.set(symbol, [token]);
}
}
const duplicatedNames = getDuplicatedNames(tokens);
const dedupedTokens: NormalizedTokenJSON[] = [];
map.forEach(tokenBucket =>
dedupedTokens.push(...renameSymbolCollisions(tokenBucket, duplicatedNames))
);
return dedupedTokens;
}
/**
*
* @description Any token collisions are handled in this manner:
* 1) If the name isnt a duplicate, the token symbol is prefixed with the token name
* 2) if it is a duplicate, then we simply use the token index + 1 (so we dont start at 0)
* @param {NormalizedTokenJSON[]} tokens
* @param {StrIdx<boolean>} duplicatedNames
* @returns
*/
function renameSymbolCollisions(tokens: NormalizedTokenJSON[], duplicatedNames: StrIdx<boolean>) {
const renamedTokens: NormalizedTokenJSON[] = [];
if (tokens.length === 1) {
return tokens;
}
return tokens.reduce((prev, curr, idx) => {
const newName = `${curr.symbol} (${duplicatedNames[curr.name] ? idx + 1 : curr.name})`;
const tokenToInsert: NormalizedTokenJSON = {
...curr,
symbol: newName
};
console.warn(`WARN: "${curr.symbol}" has a duplicate symbol, renaming to "${newName}"`);
return [...prev, tokenToInsert];
}, renamedTokens);
}
module.exports = { processTokenJson };

78
scripts/update-tokens.ts Normal file
View File

@ -0,0 +1,78 @@
import { GitCommit } from './types/GitCommit';
import { CommitStatus } from './types/CommitStatus';
import { RawTokenJSON } from './types/TokensJson';
const { processTokenJson } = require('./update-tokens-utils');
const https = require('https');
const fs = require('fs');
const path = require('path');
function httpsGet(opts: any): Promise<string> {
return new Promise(resolve => {
https.get(opts, (res: any) => {
let body = '';
res.setEncoding('utf8');
res.on('data', (data: any) => (body += data));
res.on('end', () => {
resolve(body);
});
});
});
}
function githubApi<T extends object>(pathTail: string): Promise<T> {
return httpsGet({
hostname: 'api.github.com',
path: `/repos/ethereum-lists/tokens${pathTail}`,
headers: {
'user-agent': 'node',
'content-type': 'application/json; charset=utf-8'
}
}).then(body => JSON.parse(body));
}
async function run() {
// First we fetch the latest commit from ethereum-lists/tokens
console.log('Fetching ethereum-lists/tokens commits...');
const commits = await githubApi<GitCommit[]>('/commits');
const commit = commits[0];
// Then we fetch its build status
console.log('Fetching commits statuses...');
const statuses = await githubApi<CommitStatus[]>(`/statuses/${commit.sha}`);
// Fetch the IPFS link, which is a page of links to other IPFS links
console.log('Fetching IPFS output HTML...');
const ipfsUrl = statuses.find(status => status.target_url.includes('ipfs'));
if (!ipfsUrl) {
throw Error('ipfs url not found');
}
const ipfsTargetUrl = ipfsUrl.target_url;
const ipfsHtml = await httpsGet(ipfsTargetUrl);
// Get the IPFS url for the eth tokens json. Regexxing HTML hurts, but w/e
console.log('Fetching IPFS ETH Tokens JSON...');
const tokenUrlMatch = ipfsHtml.match(/<a href='([^']+)'>output\/minified\/eth\.json<\/a>/);
if (!tokenUrlMatch) {
throw Error('No match found for token url');
}
const tokensUrl = tokenUrlMatch[1];
const tokensJson: RawTokenJSON[] = JSON.parse(await httpsGet(tokensUrl));
// Format the json to match our format in common/config/tokens/eth.json
const tokens = processTokenJson(tokensJson);
// Write to the file
console.log('Writing Tokens JSON to common/config/tokens/eth.json...');
const filePath = path.resolve(__dirname, '../common/config/tokens/eth.json');
fs.writeFile(filePath, JSON.stringify(tokens, null, 2), 'utf8', (err: any) => {
if (err) {
console.error(err);
throw new Error('Failed to write tokens json to file, see above error');
}
console.log('Succesfully imported', tokens.length, 'tokens!');
});
}
run();

View File

@ -21,6 +21,7 @@
"noImplicitAny": true
},
"include": [
"./scripts",
"./common/",
"./electron-app/",
"./shared/",