refactor: Complete rewrite of library to use mythxjs

v2.0.0 (2020-04-02)

Bug Fixes
issues: Fixed issue list not matching the list of issues in the MythX dashboard.
sources: Fixed an issue where we no longer need to send all compiled contracts (that may be mutually exclusive) to each MythX analysis.
Features
libs: Now using mythxjs instead of armlet (deprecated) to communicate with the MythX API.
refactor: Complete refactor, with many of the changes focussing on basing off sabre.

BREAKING CHANGES
The --full CLI option is now obsolete and will no have any effect. Please use --mode full instead.
Authentication to the MythX service now requires that the MYTHX_API_KEY environment variable is set, either in a .env file located in your project's root, or directly in an environment variable.
This commit is contained in:
emizzle 2020-03-26 17:50:00 +11:00
parent 1501504bae
commit 71ca63b5a0
No known key found for this signature in database
GPG Key ID: 1FD4BAB3C37EE9BA
32 changed files with 11305 additions and 4173 deletions

42
.github/workflows/main.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name: CI
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Begin CI...
uses: actions/checkout@v2
- name: Use Node 12
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: Use cached node_modules
uses: actions/cache@v1
with:
path: node_modules
key: nodeModules-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
nodeModules-
- name: Install dependencies
run: yarn install --frozen-lockfile
env:
CI: true
- name: Lint
run: yarn lint
env:
CI: true
- name: Test
run: yarn test --ci --coverage --maxWorkers=2
env:
CI: true
- name: Build
run: yarn build
env:
CI: true

6
.gitignore vendored
View File

@ -1,2 +1,4 @@
node_modules/
*.log
.DS_Store
node_modules
dist

4
.npmrc Normal file
View File

@ -0,0 +1,4 @@
engine-strict = true
package-lock = false
save-exact = true
scripts-prepend-node-path = true

3
.yarnrc Normal file
View File

@ -0,0 +1,3 @@
--*.scripts-prepend-node-path true
--install.check-files true
--install.network-timeout 600000

21
CHANGELOG.md Normal file
View File

@ -0,0 +1,21 @@
# Change log
# [2.0.0](https://github.com/embarklabs/embark-mythx/compare/v2.0.0...v1.0.3) (2020-04-02)
### Bug Fixes
* **issues:** Fixed issue list not matching the list of issues in the MythX dashboard.
* **sources:** Fixed an issue where we no longer need to send all compiled contracts (that may be mutually exclusive) to each MythX analysis.
### Features
* **libs:** Now using [`mythxjs`](https://github.com/ConsenSys/mythxjs) instead of `armlet` (deprecated) to communicate with the MythX API.
* **refactor:** Complete refactor, with many of the changes focussing on basing off [`sabre`](https://github.com/b-mueller/sabre).
### BREAKING CHANGES
* The `--full` CLI option is now obsolete and will no have any effect. Please use `--mode full` instead.
* Authentication to the MythX service now requires that the MYTHX_API_KEY environment variable is set, either in a `.env` file located in your project's root, or directly in an environment variable.
[bug]: https://github.com/ethereum/web3.js/issues/3283

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2019 Flex Dapps
Copyright (c) 2020 Status.im
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

111
README.md
View File

@ -1,22 +1,24 @@
![Running MythX analyses in Status Embark](https://cdn-images-1.medium.com/max/960/1*7jwHRc5J152bz704Fg7iug.png)
# Status Embark plugin for MythX
![Running MythX analyses in Status Embark](https://raw.githubusercontent.com/embarklabs/embark-mythx/4808bfe3a07ab871670da4859594080ec7276aba/screenshot.png)
[![GitHub license](https://img.shields.io/github/license/flex-dapps/embark-mythx.svg)](https://github.com/flex-dapps/embark-mythx/blob/master/LICENSE)
[![GitHub license](https://img.shields.io/github/license/flex-dapps/embark-mythx.svg)](https://github.com/embarklabs/embark-mythx/blob/master/LICENSE)
![npm](https://img.shields.io/npm/v/embark-mythx.svg)
# Status Embark plugin for MythX.
This plugin brings MythX to Status Embark. Simply call verify from the Embark console and embark-mythx sends your contracts off for analysis. It is inspired by [sabre](https://github.com/b-mueller/sabre) and uses its source mapping and reporting functions.
This plugin brings MythX to Status Embark. Simply call `verify` from the Embark console and `embark-mythx` sends your contracts off for analysis. It is inspired by `truffle-security` and uses its source mapping and reporting functions.
This project was bootstrapped with [TSDX](https://github.com/jaredpalmer/tsdx).
## QuickStart
1. Create a `.env` file in the root of your project and provide your MythX login information. Free MythX accounts can be created at https://dashboard.mythx.io/#/registration.
1. Create a `.env` file in the root of your project and provide your MythX API Key. Free MythX accounts can be created at https://dashboard.mythx.io/#/registration. Once an account is created, generate an API key at https://dashboard.mythx.io/#/console/tools.
```json
MYTHX_USERNAME="<mythx-username>"
MYTHX_PASSWORD="<password>"
MYTHX_API_KEY="<mythx-api-key>"
```
> **NOTE:** `MYTHX_ETH_ADDRESS` has been deprecated in favour of `MYTHX_USERNAME` and will be removed in future versions. Please update your .env file or your environment variables accordingly.
> **NOTE:** `MYTHX_ETH_ADDRESS` has been deprecated in favour of `MYTHX_USERNAME` and will be removed in future versions. As of version 2.0, `MYTHX_API_KEY` is also required. Please update your .env file or your environment variables accordingly.
`MYTHX_USERNAME` may be either of:
* MythX User ID (assigned by MythX API to any registered user);
@ -29,20 +31,38 @@ For more information, please see the [MythX API Login documentation](https://api
```bash
Embark (development) > verify
embark-mythx: Running MythX analysis in background.
embark-mythx: Submitting 'ERC20' for analysis...
embark-mythx: Submitting 'SafeMath' for analysis...
embark-mythx: Submitting 'Ownable' for analysis...
Authenticating MythX user...
Running MythX analysis...
Analysis job submitted: https://dashboard.mythx.io/#/console/analyses/9a294be9-8656-416a-afbc-06cb299f5319
Analyzing Bank in quick mode...
Analysis job submitted: https://dashboard.mythx.io/#/console/analyses/0741a098-6b81-43dc-af06-0416eda2a076
Analyzing Hack in quick mode...
Retrieving Bank analysis results...
Retrieving Hack analysis results...
Rendering Bank analysis report...
embark-mythx:
/home/flex/mythx-plugin/testToken/.embark/contracts/ERC20.sol
1:0 warning A floating pragma is set SWC-103
Bank.sol
18:12 error persistent state read after call https://swcregistry.io/SWC-registry/docs/SWC-107
14:28 warning A call to a user-supplied address is executed https://swcregistry.io/SWC-registry/docs/SWC-107
1:0 warning A floating pragma is set https://swcregistry.io/SWC-registry/docs/SWC-103
✖ 1 problem (0 errors, 1 warning)
<unknown>
-1:0 warning You are running MythX in free mode. Analysis depth is limited in this mode so some issues might not be detected. Upgrade to a Dev or Pro plan to unlock in-depth analysis and higher rate limits. https://mythx.io/plans N/A
embark-mythx: MythX analysis found vulnerabilities.
✖ 4 problems (1 error, 3 warnings)
Rendering Hack analysis report...
Hack.sol
1:0 warning A floating pragma is set https://swcregistry.io/SWC-registry/docs/SWC-103
<unknown>
-1:0 warning You are running MythX in free mode. Analysis depth is limited in this mode so some issues might not be detected. Upgrade to a Dev or Pro plan to unlock in-depth analysis and higher rate limits. https://mythx.io/plans N/A
✖ 2 problems (0 errors, 2 warnings)
Done!
```
## Installation
0. Install this plugin from the root of your Embark project:
@ -64,22 +84,33 @@ $ npm i flex-dapps/embark-mythx
```
## Usage
The following usage guide can also be obtained by running `verify help` in the Embark console.
```bash
verify [--full] [--debug] [--limit] [--initial-delay] [<contracts>]
verify status <uuid>
verify help
Available Commands
Options:
--full, -f Perform full instead of quick analysis (not available on free MythX tier).
--debug, -d Additional debug output.
--limit, -l Maximum number of concurrent analyses.
--initial-delay, -i Time in seconds before first analysis status check.
verify <options> [contracts] Runs MythX verification. If array of contracts are specified, only those contracts will be analysed.
verify report [--format] uuid Get the report of a completed analysis.
verify status uuid Get the status of an already submitted analysis.
verify list Displays a list of the last 20 submitted analyses in a table.
verify help Display this usage guide.
[<contracts>] List of contracts to submit for analysis (default: all).
status <uuid> Retrieve analysis status for given MythX UUID.
help This help.
Examples
verify --mode full SimpleStorage ERC20 Runs a full MythX verification for the SimpleStorage and ERC20 contracts only.
verify status 0d60d6b3-e226-4192-b9c6-66b45eca3746 Gets the status of the MythX analysis with the specified uuid.
verify report --format stylish 0d60d6b3-e226-4192-b9c6-66b45eca3746 Gets the status of the MythX analysis with the specified uuid.
Verify options
-m, --mode string Analysis mode. Options: quick, standard, deep (default: quick).
-o, --format string Output format. Options: text, stylish, compact, table, html, json (default:
stylish).
-c, --no-cache-lookup Deactivate MythX cache lookups (default: false).
-d, --debug Print MythX API request and response.
-l, --limit number Maximum number of concurrent analyses (default: 10).
--timeout number Timeout in secs to wait for analysis to finish (default: smart default based
on mode).
```
### Example Usage
@ -93,4 +124,28 @@ $ verify ERC20 Ownable --full
# Check status of previous or ongoing analysis
$ verify status ef5bb083-c57a-41b0-97c1-c14a54617812
```
```
## `embark-mythx` Development
Contributions are very welcome! If you'd like to contribute, the following commands will help you get up and running. The library was built using [TSDX](https://github.com/jaredpalmer/tsdx), so these commands are specific to TSDX.
### `npm run start` or `yarn start`
Runs the project in development/watch mode. `embark-mythx` will be rebuilt upon changes. TSDX has a special logger for you convenience. Error messages are pretty printed and formatted for compatibility VS Code's Problems tab.
<img src="https://user-images.githubusercontent.com/4060187/52168303-574d3a00-26f6-11e9-9f3b-71dbec9ebfcb.gif" width="600" />
Your library will be rebuilt if you make edits.
### `npm run build` or `yarn build`
Bundles the package to the `dist` folder.
The package is optimized and bundled with Rollup into multiple formats (CommonJS, UMD, and ES Module).
<img src="https://user-images.githubusercontent.com/4060187/52168322-a98e5b00-26f6-11e9-8cf6-222d716b75ef.gif" width="600" />
### `npm test` or `yarn test`
Runs the test watcher (Jest) in an interactive mode.
By default, runs tests related to files changed since the last commit.

View File

@ -1,56 +0,0 @@
'use strict';
/* This is modified from remix-lib/astWalker.js to use the newer solc AST format
*/
/**
* Crawl the given AST through the function walk(ast, callback)
*/
function AstWalker () {
}
/**
* visit all the AST nodes
*
* @param {Object} ast - AST node
* @param {Object or Function} callback - if (Function) the function will be called for every node.
* - if (Object) callback[<Node Type>] will be called for
* every node of type <Node Type>. callback["*"] will be called fo all other nodes.
* in each case, if the callback returns false it does not descend into children.
* If no callback for the current type, children are visited.
*/
AstWalker.prototype.walk = function (ast, callback) {
if (callback instanceof Function) {
callback = {'*': callback};
}
if (!('*' in callback)) {
callback['*'] = function () { return true; };
}
if (manageCallBack(ast, callback) && ast.nodes && ast.nodes.length > 0) {
for (const child of ast.nodes) {
this.walk(child, callback);
}
}
};
/**
* walk the given @astList
*
* @param {Object} sourcesList - sources list (containing root AST node)
* @param {Function} - callback used by AstWalker to compute response
*/
AstWalker.prototype.walkAstList = function (sourcesList, callback) {
const walker = new AstWalker();
for (const source of sourcesList) {
walker.walk(source.ast, callback);
}
};
function manageCallBack (node, callback) {
if (node.nodeType in callback) {
return callback[node.nodeType](node);
} else {
return callback['*'](node);
}
}
module.exports = AstWalker;

View File

@ -1,231 +0,0 @@
/***
This is modified from remix-lib/src/sourceMappingDecoder.js
The essential difference is that remix-lib uses legacyAST and we
use ast instead. legacyAST has field "children" while ast
renames this to "nodes".
***/
'use strict';
var util = require('remix-lib/src/util');
var AstWalker = require('./astWalker');
/**
* Decompress the source mapping given by solc-bin.js
*/
function SourceMappingDecoder () {
// s:l:f:j
}
/**
* get a list of nodes that are at the given @arg position
*
* @param {String} astNodeType - type of node to return
* @param {Int} position - cursor position
* @return {Object} ast object given by the compiler
*/
SourceMappingDecoder.prototype.nodesAtPosition = nodesAtPosition;
/**
* Decode the source mapping for the given @arg index
*
* @param {Integer} index - source mapping index to decode
* @param {String} mapping - compressed source mapping given by solc-bin
* @return {Object} returns the decompressed source mapping for the given index {start, length, file, jump}
*/
SourceMappingDecoder.prototype.atIndex = atIndex;
/**
* Decode the given @arg value
*
* @param {string} value - source location to decode ( should be start:length:file )
* @return {Object} returns the decompressed source mapping {start, length, file}
*/
SourceMappingDecoder.prototype.decode = function (value) {
if (value) {
value = value.split(':');
return {
start: parseInt(value[0]),
length: parseInt(value[1]),
file: parseInt(value[2])
};
}
};
/**
* Decode the source mapping for the given compressed mapping
*
* @param {String} mapping - compressed source mapping given by solc-bin
* @return {Array} returns the decompressed source mapping. Array of {start, length, file, jump}
*/
SourceMappingDecoder.prototype.decompressAll = function (mapping) {
var map = mapping.split(';');
var ret = [];
for (var k in map) {
var compressed = map[k].split(':');
var sourceMap = {
start: compressed[0] ? parseInt(compressed[0]) : ret[ret.length - 1].start,
length: compressed[1] ? parseInt(compressed[1]) : ret[ret.length - 1].length,
file: compressed[2] ? parseInt(compressed[2]) : ret[ret.length - 1].file,
jump: compressed[3] ? compressed[3] : ret[ret.length - 1].jump
};
ret.push(sourceMap);
}
return ret;
};
/**
* Retrieve line/column position of each source char
*
* @param {String} source - contract source code
* @return {Arrray} returns an array containing offset of line breaks
*/
SourceMappingDecoder.prototype.getLinebreakPositions = function (source) {
var ret = [];
for (var pos = source.indexOf('\n'); pos >= 0; pos = source.indexOf('\n', pos + 1)) {
ret.push(pos);
}
return ret;
};
/**
* Retrieve the line/column position for the given source mapping
*
* @param {Object} sourceLocation - object containing attributes {source} and {length}
* @param {Array} lineBreakPositions - array returned by the function 'getLinebreakPositions'
* @return {Object} returns an object {start: {line, column}, end: {line, column}} (line/column count start at 0)
*/
SourceMappingDecoder.prototype.convertOffsetToLineColumn = function (sourceLocation, lineBreakPositions) {
if (sourceLocation.start >= 0 && sourceLocation.length >= 0) {
return {
start: convertFromCharPosition(sourceLocation.start, lineBreakPositions),
end: convertFromCharPosition(sourceLocation.start + sourceLocation.length, lineBreakPositions)
};
} else {
return {
start: null,
end: null
};
}
};
/**
* Retrieve the first @arg astNodeType that include the source map at arg instIndex
*
* @param {String} astNodeType - node type that include the source map instIndex
* @param {String} instIndex - instruction index used to retrieve the source map
* @param {String} sourceMap - source map given by the compilation result
* @param {Object} ast - ast given by the compilation result
*/
SourceMappingDecoder.prototype.findNodeAtInstructionIndex = findNodeAtInstructionIndex;
SourceMappingDecoder.prototype.findNodeAtSourceLocation = findNodeAtSourceLocation;
function convertFromCharPosition (pos, lineBreakPositions) {
var line = util.findLowerBound(pos, lineBreakPositions);
if (lineBreakPositions[line] !== pos) {
line += 1;
}
var beginColumn = line === 0 ? 0 : (lineBreakPositions[line - 1] + 1);
var column = pos - beginColumn;
return {
line: line,
column: column
};
}
function sourceLocationFromAstNode (astNode) {
if (astNode.src) {
var split = astNode.src.split(':');
return {
start: parseInt(split[0]),
length: parseInt(split[1]),
file: parseInt(split[2])
};
}
return null;
}
function findNodeAtInstructionIndex (astNodeType, instIndex, sourceMap, ast) {
var sourceLocation = atIndex(instIndex, sourceMap);
return findNodeAtSourceLocation(astNodeType, sourceLocation, ast);
}
function findNodeAtSourceLocation (astNodeType, sourceLocation, ast) {
var astWalker = new AstWalker();
var callback = {};
var found = null;
callback['*'] = function (node) {
const nodeLocation = sourceLocationFromAstNode(node);
if (!nodeLocation) {
return true;
}
if (nodeLocation.start <= sourceLocation.start && nodeLocation.start + nodeLocation.length >= sourceLocation.start + sourceLocation.length) {
if (astNodeType === node.nodeType) {
found = node;
return false;
} else {
return true;
}
} else {
return false;
}
};
astWalker.walk(ast, callback);
return found;
}
function nodesAtPosition (astNodeType, position, ast) {
var astWalker = new AstWalker();
var callback = {};
var found = [];
callback['*'] = function (node) {
var nodeLocation = sourceLocationFromAstNode(node);
if (!nodeLocation) {
return;
}
if (nodeLocation.start <= position && nodeLocation.start + nodeLocation.length >= position) {
if (!astNodeType || astNodeType === node.name) {
found.push(node);
if (astNodeType) return false;
}
return true;
} else {
return false;
}
};
astWalker.walk(ast.ast, callback);
return found;
}
function atIndex (index, mapping) {
var ret = {};
var map = mapping.split(';');
if (index >= map.length) {
index = map.length - 1;
}
for (var k = index; k >= 0; k--) {
var current = map[k];
if (!current.length) {
continue;
}
current = current.split(':');
if (ret.start === undefined && current[0] && current[0] !== '-1' && current[0].length) {
ret.start = parseInt(current[0]);
}
if (ret.length === undefined && current[1] && current[1] !== '-1' && current[1].length) {
ret.length = parseInt(current[1]);
}
if (ret.file === undefined && current[2] && current[2] !== '-1' && current[2].length) {
ret.file = parseInt(current[2]);
}
if (ret.jump === undefined && current[3] && current[3].length) {
ret.jump = current[3];
}
if (ret.start !== undefined && ret.length !== undefined && ret.file !== undefined && ret.jump !== undefined) {
break;
}
}
return ret;
}
module.exports = SourceMappingDecoder;

170
formatters/text.js Normal file
View File

@ -0,0 +1,170 @@
const separator = '-'.repeat(20);
const indent = ' '.repeat(4);
const roles = {
creator: 'CREATOR',
attacker: 'ATTACKER',
other: 'USER'
};
const textFormatter = {};
textFormatter.strToInt = str => parseInt(str, 10);
textFormatter.guessAccountRoleByAddress = address => {
const prefix = address.toLowerCase().substr(0, 10);
if (prefix === '0xaffeaffe') {
return roles.creator;
} else if (prefix === '0xdeadbeef') {
return roles.attacker;
}
return roles.other;
};
textFormatter.stringifyValue = value => {
const type = typeof value;
if (type === 'number') {
return String(value);
} else if (type === 'string') {
return value;
} else if (value === null) {
return 'null';
}
return JSON.stringify(value);
};
textFormatter.formatTestCaseSteps = (steps, fnHashes = {}) => {
const output = [];
for (let s = 0, n = 0; s < steps.length; s++) {
const step = steps[s];
/**
* Empty address means "contract creation" transaction.
*
* Skip it to not spam.
*/
if (step.address === '') {
continue;
}
n++;
const type = textFormatter.guessAccountRoleByAddress(step.origin);
const fnHash = step.input.substr(2, 8);
const fnName = fnHashes[fnHash] || step.name || '<N/A>';
const fnDesc = `${fnName} [ ${fnHash} ]`;
output.push(
`Tx #${n}:`,
indent + `Origin: ${step.origin} [ ${type} ]`,
indent + `Function: ${textFormatter.stringifyValue(fnDesc)}`,
indent + `Calldata: ${textFormatter.stringifyValue(step.input)}`
);
if ('decodedInput' in step) {
output.push(`${indent}Decoded Calldata: ${step.decodedInput}`);
}
output.push(
`${indent}Value: ${textFormatter.stringifyValue(step.value)}`,
''
);
}
return output.join('\n').trimRight();
};
textFormatter.formatTestCase = (testCase, fnHashes) => {
const output = [];
if (testCase.steps) {
const content = textFormatter.formatTestCaseSteps(testCase.steps, fnHashes);
if (content) {
output.push('Transaction Sequence:', '', content);
}
}
return output.length ? output.join('\n') : undefined;
};
textFormatter.getCodeSample = (source, src) => {
const [start, length] = src.split(':').map(textFormatter.strToInt);
return source.substr(start, length);
};
textFormatter.formatLocation = message => {
const start = `${message.line}:${message.column}`;
const finish = `${message.endLine}:{message.endCol}`;
return `from ${start} to ${finish}`;
};
textFormatter.formatMessage = (message, filePath, sourceCode, fnHashes) => {
const { mythxIssue, mythxTextLocations } = message;
const output = [];
output.push(
`==== ${mythxIssue.swcTitle || 'N/A'} ====`,
`Severity: ${mythxIssue.severity}`,
`File: ${filePath}`
);
if (message.ruleId !== 'N/A') {
output.push(`Link: ${message.ruleId}`);
}
output.push(
separator,
mythxIssue.description.head,
mythxIssue.description.tail
);
const code = mythxTextLocations.length
? textFormatter.getCodeSample(sourceCode, mythxTextLocations[0].sourceMap)
: undefined;
output.push(
separator,
`Location: ${textFormatter.formatLocation(message)}`,
'',
code || '<code not available>'
);
const testCases = mythxIssue.extra && mythxIssue.extra.testCases;
if (testCases) {
for (const testCase of testCases) {
const content = textFormatter.formatTestCase(testCase, fnHashes);
if (content) {
output.push(separator, content);
}
}
}
return output.join('\n');
};
textFormatter.formatResult = result => {
const { filePath, sourceCode, functionHashes } = result;
return result.messages
.map(message =>
textFormatter.formatMessage(message, filePath, sourceCode, functionHashes)
)
.join('\n\n');
};
textFormatter.run = results => {
return results.map(result => textFormatter.formatResult(result)).join('\n\n');
};
module.exports = (results) => textFormatter.run(results);

132
index.js
View File

@ -1,132 +0,0 @@
const mythx = require('./mythx')
const commandLineArgs = require('command-line-args')
module.exports = function(embark) {
let contracts;
// Register for compilation results
embark.events.on("contracts:compiled:solc", (res) => {
contracts = res;
});
embark.registerConsoleCommand({
description: "Run MythX analysis",
matches: (cmd) => {
const cmdName = cmd.match(/".*?"|\S+/g)
return (Array.isArray(cmdName) &&
cmdName[0] === 'verify' &&
cmdName[1] != 'help' &&
cmdName[1] != 'status' &&
cmdName.length >= 1)
},
usage: "verify [options] [contracts]",
process: async (cmd, callback) => {
const cmdName = cmd.match(/".*?"|\S+/g)
// Remove first element, as we know it's the command
cmdName.shift()
let cfg = parseOptions({ "argv": cmdName })
try {
embark.logger.info("Running MythX analysis in background.")
const returnCode = await mythx.analyse(contracts, cfg, embark)
if (returnCode === 0) {
return callback(null, "MythX analysis found no vulnerabilities.")
} else if (returnCode === 1) {
return callback("MythX analysis found vulnerabilities!", null)
} else if (returnCode === 2) {
return callback("Internal MythX error encountered.", null)
} else {
return callback(new Error("\nUnexpected Error: return value of `analyze` should be either 0 or 1."), null)
}
} catch (e) {
return callback(e, "ERR: " + e.message)
}
}
})
embark.registerConsoleCommand({
description: "Help",
matches: (cmd) => {
const cmdName = cmd.match(/".*?"|\S+/g)
return (Array.isArray(cmdName) &&
(cmdName[0] === 'verify' &&
cmdName[1] === 'help'))
},
usage: "verify help",
process: (cmd, callback) => {
return callback(null, help())
}
})
function help() {
return (
"Usage:\n" +
"\tverify [--full] [--debug] [--limit] [--initial-delay] [<contracts>]\n" +
"\tverify status <uuid>\n" +
"\tverify help\n" +
"\n" +
"Options:\n" +
"\t--full, -f\t\t\tPerform full rather than quick analysis.\n" +
"\t--debug, -d\t\t\tAdditional debug output.\n" +
"\t--limit, -l\t\t\tMaximum number of concurrent analyses.\n" +
"\t--initial-delay, -i\t\tTime in seconds before first analysis status check.\n" +
"\n" +
"\t[<contracts>]\t\t\tList of contracts to submit for analysis (default: all).\n" +
"\tstatus <uuid>\t\t\tRetrieve analysis status for given MythX UUID.\n" +
"\thelp\t\t\t\tThis help.\n"
)
}
embark.registerConsoleCommand({
description: "Check MythX analysis status",
matches: (cmd) => {
const cmdName = cmd.match(/".*?"|\S+/g)
return (Array.isArray(cmdName) &&
cmdName[0] === 'verify' &&
cmdName[1] == 'status' &&
cmdName.length == 3)
},
usage: "verify status <uuid>",
process: async (cmd, callback) => {
const cmdName = cmd.match(/".*?"|\S+/g)
try {
const returnCode = await mythx.getStatus(cmdName[2], embark)
if (returnCode === 0) {
return callback(null, "returnCode: " + returnCode)
} else if (returnCode === 1) {
return callback()
} else {
return callback(new Error("Unexpected Error: return value of `analyze` should be either 0 or 1."), null)
}
} catch (e) {
return callback(e, "ERR: " + e.message)
}
}
})
function parseOptions(options) {
const optionDefinitions = [
{ name: 'full', alias: 'f', type: Boolean },
{ name: 'debug', alias: 'd', type: Boolean },
{ name: 'limit', alias: 'l', type: Number },
{ name: 'initial-delay', alias: 'i', type: Number },
{ name: 'contracts', type: String, multiple: true, defaultOption: true }
]
const parsed = commandLineArgs(optionDefinitions, options)
if(parsed.full) {
parsed.analysisMode = "full"
} else {
parsed.analysisMode = "quick"
}
return parsed
}
}

View File

@ -1,416 +0,0 @@
'use strict';
const path = require('path');
const assert = require('assert');
const SourceMappingDecoder = require('remix-lib/src/sourceMappingDecoder');
const srcmap = require('./srcmap');
const mythx = require('./mythXUtil');
const mythx2Severity = {
High: 2,
Medium: 1,
};
const isFatal = (fatal, severity) => fatal || severity === 2;
const getUniqueMessages = messages => {
const jsonValues = messages.map(m => JSON.stringify(m));
const uniuqeValues = jsonValues.reduce((accum, curr) => {
if (accum.indexOf(curr) === -1) {
accum.push(curr);
}
return accum;
}, []);
return uniuqeValues.map(v => JSON.parse(v));
};
const calculateErrors = messages =>
messages.reduce((acc, { fatal, severity }) => isFatal(fatal , severity) ? acc + 1: acc, 0);
const calculateWarnings = messages =>
messages.reduce((acc, { fatal, severity }) => !isFatal(fatal , severity) ? acc + 1: acc, 0);
const getUniqueIssues = issues =>
issues.map(({ messages, ...restProps }) => {
const uniqueMessages = getUniqueMessages(messages);
const warningCount = calculateWarnings(uniqueMessages);
const errorCount = calculateErrors(uniqueMessages);
return {
...restProps,
messages: uniqueMessages,
errorCount,
warningCount,
};
});
const keepIssueInResults = function (issue, config) {
// omit this issue if its severity is below the config threshold
if (config.severityThreshold && issue.severity < config.severityThreshold) {
return false;
}
// omit this if its swc code is included in the blacklist
if (config.swcBlacklist && config.swcBlacklist.includes(issue.ruleId)) {
return false;
}
// if an issue hasn't been filtered out by severity or blacklist, then keep it
return true;
};
class MythXIssues {
constructor(buildObj, config) {
this.issues = [];
this.logs = [];
this.buildObj = mythx.embark2MythXJSON(buildObj);
this.debug = config.debug;
this.logger = config.logger;
this.sourceMap = this.buildObj.sourceMap;
this.sourcePath = buildObj.sourcePath;
this.deployedSourceMap = this.buildObj.deployedSourceMap;
this.offset2InstNum = srcmap.makeOffset2InstNum(this.buildObj.deployedBytecode);
this.contractName = buildObj.contractName;
this.sourceMappingDecoder = new SourceMappingDecoder();
this.asts = this.mapAsts(this.buildObj.sources);
this.lineBreakPositions = this.mapLineBreakPositions(this.sourceMappingDecoder, this.buildObj.sources);
}
setIssues(issueGroups) {
for (let issueGroup of issueGroups) {
if (issueGroup.sourceType === 'solidity-file' &&
issueGroup.sourceFormat === 'text') {
const filteredIssues = [];
for (const issue of issueGroup.issues) {
for (const location of issue.locations) {
if (!this.isIgnorable(location.sourceMap)) {
filteredIssues.push(issue);
}
}
}
issueGroup.issues = filteredIssues;
}
}
const remappedIssues = issueGroups.map(mythx.remapMythXOutput);
this.issues = remappedIssues
.reduce((acc, curr) => acc.concat(curr), []);
issueGroups.forEach(issueGroup => {
this.logs = this.logs.concat((issueGroup.meta && issueGroup.meta.logs) || []);
});
}
mapLineBreakPositions(decoder, sources) {
const result = {};
Object.entries(sources).forEach(([ sourcePath, { source } ]) => {
if (source) {
result[sourcePath] = decoder.getLinebreakPositions(source);
}
});
return result;
}
mapAsts (sources) {
const result = {};
Object.entries(sources).forEach(([ sourcePath, { ast } ]) => {
result[sourcePath] = ast;
});
return result;
}
isIgnorable(sourceMapLocation) {
const basename = path.basename(this.sourcePath);
if (!( basename in this.asts)) {
return false;
}
const ast = this.asts[basename];
const node = srcmap.isVariableDeclaration(sourceMapLocation, ast);
if (node && srcmap.isDynamicArray(node)) {
if (this.debug) {
// this might brealk if logger is none.
const logger = this.logger || console;
logger.log('**debug: Ignoring Mythril issue around ' +
'dynamically-allocated array.');
}
return true;
} else {
return false;
}
}
byteOffset2lineColumn(bytecodeOffset, lineBreakPositions) {
const instNum = this.offset2InstNum[bytecodeOffset];
const sourceLocation = this.sourceMappingDecoder.atIndex(instNum, this.deployedSourceMap);
assert(sourceLocation, 'sourceMappingDecoder.atIndex() should not return null');
const loc = this.sourceMappingDecoder
.convertOffsetToLineColumn(sourceLocation, lineBreakPositions || []);
if (loc.start) {
loc.start.line++;
}
if (loc.end) {
loc.end.line++;
}
const start = loc.start || { line: -1, column: 0 };
const end = loc.end || {};
return [start, end];
}
textSrcEntry2lineColumn(srcEntry, lineBreakPositions) {
const ary = srcEntry.split(':');
const sourceLocation = {
length: parseInt(ary[1], 10),
start: parseInt(ary[0], 10),
};
const loc = this.sourceMappingDecoder
.convertOffsetToLineColumn(sourceLocation, lineBreakPositions || []);
if (loc.start) {
loc.start.line++;
}
if (loc.end) {
loc.end.line++;
}
return [loc.start, loc.end];
}
issue2EsLint(issue, spaceLimited, sourceFormat, sourceName) {
const esIssue = {
fatal: false,
ruleId: issue.swcID,
message: spaceLimited ? issue.description.head : `${issue.description.head} ${issue.description.tail}`,
severity: mythx2Severity[issue.severity] || 1,
mythXseverity: issue.severity,
line: -1,
column: 0,
endLine: -1,
endCol: 0,
};
let startLineCol, endLineCol;
const lineBreakPositions = this.lineBreakPositions[sourceName];
if (sourceFormat === 'evm-byzantium-bytecode') {
// Pick out first byteCode offset value
const offset = parseInt(issue.sourceMap.split(':')[0], 10);
[startLineCol, endLineCol] = this.byteOffset2lineColumn(offset, lineBreakPositions);
} else if (sourceFormat === 'text') {
// Pick out first srcEntry value
const srcEntry = issue.sourceMap.split(';')[0];
[startLineCol, endLineCol] = this.textSrcEntry2lineColumn(srcEntry, lineBreakPositions);
}
if (startLineCol) {
esIssue.line = startLineCol.line;
esIssue.column = startLineCol.column;
esIssue.endLine = endLineCol.line;
esIssue.endCol = endLineCol.column;
}
return esIssue;
}
convertMythXReport2EsIssue(report, config, spaceLimited) {
const { issues, sourceFormat, source } = report;
const result = {
errorCount: 0,
warningCount: 0,
fixableErrorCount: 0,
fixableWarningCount: 0,
filePath: source,
};
const sourceName = path.basename(source);
result.messages = issues
.map(issue => this.issue2EsLint(issue, spaceLimited, sourceFormat, sourceName))
.filter(issue => keepIssueInResults(issue, config));
result.warningCount = result.messages.reduce((acc, { fatal, severity }) =>
!isFatal(fatal , severity) ? acc + 1: acc, 0);
result.errorCount = result.messages.reduce((acc, { fatal, severity }) =>
isFatal(fatal , severity) ? acc + 1: acc, 0);
return result;
}
getEslintIssues(config, spaceLimited = false) {
return this.issues.map(report => this.convertMythXReport2EsIssue(report, config, spaceLimited));
}
}
function doReport(config, objects, errors, notAnalyzedContracts) {
let ret = 0;
// Return true if we shold show log.
// Ignore logs with log.level "info" unless the "debug" flag
// has been set.
function showLog(log) {
return config.debug || (log.level !== 'info');
}
// Return 1 if vulnerabilities were found.
objects.forEach(ele => {
ele.issues.forEach(ele => {
ret = ele.issues.length > 0 ? 1 : ret;
})
})
const spaceLimited = ['tap', 'markdown', 'json'].indexOf(config.style) === -1;
const eslintIssues = objects
.map(obj => obj.getEslintIssues(config, spaceLimited))
.reduce((acc, curr) => acc.concat(curr), []);
// FIXME: temporary solution until backend will return correct filepath and output.
const eslintIssuesByBaseName = groupEslintIssuesByBasename(eslintIssues);
const uniqueIssues = getUniqueIssues(eslintIssuesByBaseName);
printSummary(objects, uniqueIssues, config.logger);
const formatter = getFormatter(config.style);
const report = formatter(uniqueIssues);
config.logger.info(report);
const logGroups = objects.map(obj => { return {'sourcePath': obj.sourcePath, 'logs': obj.logs, 'uuid': obj.uuid};})
.reduce((acc, curr) => acc.concat(curr), []);
let haveLogs = false;
logGroups.some(logGroup => {
logGroup.logs.some(log => {
if (showLog(log)) {
haveLogs = true;
return;
}
});
if(haveLogs) return;
});
if (haveLogs) {
ret = 1;
config.logger.info('MythX Logs:');
logGroups.forEach(logGroup => {
config.logger.info(`\n${logGroup.sourcePath}`.yellow);
config.logger.info(`UUID: ${logGroup.uuid}`.yellow);
logGroup.logs.forEach(log => {
if (showLog(log)) {
config.logger.info(`${log.level}: ${log.msg}`);
}
});
});
}
if (errors.length > 0) {
ret = 2;
config.logger.error('Internal MythX errors encountered:'.red);
errors.forEach(err => {
config.logger.error(err.error || err);
if (config.debug > 1 && err.stack) {
config.logger.info(err.stack);
}
});
}
return ret;
}
function printSummary(objects, uniqueIssues, logger) {
if (objects && objects.length) {
logger.info('\nMythX Report Summary'.underline.bold);
const groupBy = 'groupId';
const groups = objects.reduce((accum, curr) => {
const issue = uniqueIssues.find((issue) => issue.filePath === curr.buildObj.mainSource);
const issueCount = issue.errorCount + issue.warningCount;
const marking = issueCount > 0 ? '✖'.red : '✔︎'.green;
(accum[curr[groupBy]] = accum[curr[groupBy]] || []).push(` ${marking} ${issue.filePath.cyan}: ${issueCount} issues ${curr.uuid.dim.bold}`);
return accum;
}, {});
let count = 0;
Object.keys(groups).forEach((groupId) => {
logger.info(` ${++count}. Group ${groupId.bold.dim}:`);
Object.values(groups[groupId]).forEach((contract) => {
logger.info(contract);
});
});
}
}
function getFormatter(style) {
const formatterName = style || 'stylish';
try {
const frmtr = require(`eslint/lib/formatters/${formatterName}`);
return frmtr
} catch (ex) {
ex.message = `\nThere was a problem loading formatter option: ${style} \nError: ${
ex.message
}`;
throw ex;
}
}
const groupEslintIssuesByBasename = issues => {
const path = require('path');
const mappedIssues = issues.reduce((accum, issue) => {
const {
errorCount,
warningCount,
fixableErrorCount,
fixableWarningCount,
filePath,
messages,
} = issue;
const basename = path.basename(filePath);
if (!accum[basename]) {
accum[basename] = {
errorCount: 0,
warningCount: 0,
fixableErrorCount: 0,
fixableWarningCount: 0,
filePath: filePath,
messages: [],
};
}
accum[basename].errorCount += errorCount;
accum[basename].warningCount += warningCount;
accum[basename].fixableErrorCount += fixableErrorCount;
accum[basename].fixableWarningCount += fixableWarningCount;
accum[basename].messages = accum[basename].messages.concat(messages);
return accum;
}, {});
const issueGroups = Object.values(mappedIssues);
for (const group of issueGroups) {
group.messages = group.messages.sort(function(mess1, mess2) {
return compareMessLCRange(mess1, mess2);
});
}
return issueGroups;
};
function compareMessLCRange(mess1, mess2) {
const c = compareLineCol(mess1.line, mess1.column, mess2.line, mess2.column);
return c != 0 ? c : compareLineCol(mess1.endLine, mess1.endCol, mess2.endLine, mess2.endCol);
}
function compareLineCol(line1, column1, line2, column2) {
return line1 === line2 ?
(column1 - column2) :
(line1 - line2);
}
module.exports = {
MythXIssues,
keepIssueInResults,
getUniqueIssues,
getUniqueMessages,
isFatal,
doReport
};

View File

@ -1,185 +0,0 @@
'use strict';
const armlet = require('armlet')
const fs = require('fs')
const util = require('util');
const srcmap = require('./srcmap');
const getContractFiles = directory => {
let files = fs.readdirSync(directory)
files = files.filter(f => f !== "ENSRegistry.json" && f !== "FIFSRegistrar.json" && f !== "Resolver.json");
return files.map(f => path.join(directory, f))
};
function getFoundContractNames(contracts, contractNames) {
let foundContractNames = [];
contracts.forEach(({ contractName }) => {
if (contractNames && contractNames.indexOf(contractName) < 0) {
return;
}
foundContractNames.push(contractName);
});
return foundContractNames;
}
const getNotFoundContracts = (allContractNames, foundContracts) => {
if (allContractNames) {
return allContractNames.filter(function(i) {return foundContracts.indexOf(i) < 0;});
} else {
return [];
}
}
const buildRequestData = contractObjects => {
const { sources, compiler } = contractObjects;
let allContracts = [];
const allSources = Object.entries(sources).reduce((accum, [sourcePath, data]) => {
const source = fs.readFileSync(sourcePath, 'utf8')
const { ast, legacyAST } = data;
const key = path.basename(sourcePath);
accum[key] = { ast, legacyAST, source };
return accum;
}, {});
Object.keys(contractObjects.contracts).forEach(function(fileKey, index) {
const contractFile = contractObjects.contracts[fileKey];
Object.keys(contractFile).forEach(function(contractKey, index) {
const contractJSON = contractFile[contractKey];
const sourcesToInclude = Object.keys(JSON.parse(contractJSON.metadata).sources);
const sourcesFiltered = Object.entries(allSources).filter(([filename, { ast }]) => sourcesToInclude.includes(ast.absolutePath));
const sources = {};
sourcesFiltered.forEach(([key, value]) => {
sources[key] = value;
});
const contract = {
contractName: contractKey,
bytecode: contractJSON.evm.bytecode.object,
deployedBytecode: contractJSON.evm.deployedBytecode.object,
sourceMap: contractJSON.evm.bytecode.sourceMap,
deployedSourceMap: contractJSON.evm.deployedBytecode.sourceMap,
sources,
sourcePath: fileKey
};
allContracts = allContracts.concat(contract);
});
});
return allContracts;
};
const embark2MythXJSON = function(embarkJSON, toolId = 'embark-mythx') {
let {
contractName,
bytecode,
deployedBytecode,
sourceMap,
deployedSourceMap,
sourcePath,
sources
} = embarkJSON;
const sourcesKey = path.basename(sourcePath);
let sourceList = [];
for(let key in sources) {
sourceList.push(sources[key].ast.absolutePath);
}
const mythXJSON = {
contractName,
bytecode,
deployedBytecode,
sourceMap,
deployedSourceMap,
mainSource: sourcesKey,
sourceList: sourceList,
sources,
toolId
}
return mythXJSON;
};
const remapMythXOutput = mythObject => {
const mapped = mythObject.sourceList.map(source => ({
source,
sourceType: mythObject.sourceType,
sourceFormat: mythObject.sourceFormat,
issues: [],
}));
if (mythObject.issues) {
mythObject.issues.forEach(issue => {
issue.locations.forEach(({ sourceMap }) => {
let sourceListIndex = sourceMap.split(':')[2];
if (sourceListIndex === -1) {
// FIXME: We need to decide where to attach issues
// that don't have any file associated with them.
// For now we'll pick 0 which is probably the main starting point
sourceListIndex = 0;
}
mapped[0].issues.push({
swcID: issue.swcID,
swcTitle: issue.swcTitle,
description: issue.description,
extra: issue.extra,
severity: issue.severity,
sourceMap,
});
});
});
}
return mapped;
};
const cleanAnalyzeDataEmptyProps = (data, debug, logger) => {
const { bytecode, deployedBytecode, sourceMap, deployedSourceMap, ...props } = data;
const result = { ...props };
const unusedFields = [];
if (bytecode && bytecode !== '0x') {
result.bytecode = bytecode;
} else {
unusedFields.push('bytecode');
}
if (deployedBytecode && deployedBytecode !== '0x') {
result.deployedBytecode = deployedBytecode;
} else {
unusedFields.push('deployedBytecode');
}
if (sourceMap) {
result.sourceMap = sourceMap;
} else {
unusedFields.push('sourceMap');
}
if (deployedSourceMap) {
result.deployedSourceMap = deployedSourceMap;
} else {
unusedFields.push('deployedSourceMap');
}
if (debug && unusedFields.length > 0) {
logger.debug(`${props.contractName}: Empty JSON data fields from compilation - ${unusedFields.join(', ')}`);
}
return result;
}
module.exports = {
remapMythXOutput,
embark2MythXJSON,
buildRequestData,
getNotFoundContracts,
getFoundContractNames,
getContractFiles,
cleanAnalyzeDataEmptyProps
}

View File

@ -1,78 +0,0 @@
'use strict';
const assert = require('assert');
const remixUtil = require('remix-lib/src/util');
const SourceMappingDecoder = require('../compat/remix-lib/sourceMappingDecoder.js');
const opcodes = require('remix-lib/src/code/opcodes');
module.exports = {
isVariableDeclaration: function (srcmap, ast) {
const sourceMappingDecoder = new SourceMappingDecoder();
const sourceLocation = sourceMappingDecoder.decode(srcmap);
return sourceMappingDecoder.findNodeAtSourceLocation('VariableDeclaration',
sourceLocation, ast);
},
isDynamicArray: function (node) {
return (node.stateVariable &&
node.visibility === 'public' &&
node.typeName.nodeType === 'ArrayTypeName');
},
makeOffset2InstNum: function(hexstr) {
const bytecode = remixUtil.hexToIntArray(hexstr);
const instMap = {};
let j = -1;
for (let i = 0; i < bytecode.length; i++) {
j++;
const opcode = opcodes(bytecode[i], true);
if (opcode.name.slice(0, 4) === 'PUSH') {
let length = bytecode[i] - 0x5f;
i += length;
}
instMap[i] = j;
}
return instMap;
},
seenIndices: function(sourceMap) {
const seen = new Set();
const srcArray = sourceMap.split(';');
for (const src of srcArray) {
const fields = src.split(':');
if (fields.length >= 3) {
const index = fields[2];
// File index -1 means no file exists.
// Value '' means that the field is empty but present
// to be able to give a 4th value.
// Skip either of these.
if (index !== '-1' && index !== '') {
seen.add(index);
}
}
}
return seen;
},
zeroedSourceMap: function(sourceMap) {
const srcArray = sourceMap.split(';');
let modArray = [];
let indexSeen = -2;
for (const src of srcArray) {
const fields = src.split(':');
if (fields.length >= 3) {
const index = fields[2];
if (index !== '-1' && index !== '') {
if (indexSeen !== -2) {
assert(indexSeen === index,
`assuming only one index ${indexSeen} needs moving; saw ${index} as well`);
}
fields[2] = '0';
}
}
const modFields = fields.join(':');
modArray.push(modFields);
}
return modArray.join(';');
},
};

201
mythx.js
View File

@ -1,201 +0,0 @@
require('dotenv').config()
const armlet = require('armlet')
const fs = require('fs')
const yaml = require('js-yaml');
const mythXUtil = require('./lib/mythXUtil');
const asyncPool = require('tiny-async-pool');
const { MythXIssues, doReport } = require('./lib/issues2eslint');
const defaultConcurrentAnalyses = 4
function checkEnvVariables(embark) {
if (process.env.MYTHX_ETH_ADDRESS) {
process.env.MYTHX_USERNAME = process.env.MYTHX_ETH_ADDRESS;
embark.logger.warn("The environment variable MYTHX_ETH_ADDRESS has been deprecated in favour of MYTHX_USERNAME and will be removed in future versions. Please update your .env file or your environment variables accordingly.");
}
// Connect to MythX via armlet
if (!process.env.MYTHX_USERNAME || !process.env.MYTHX_PASSWORD) {
throw new Error("Environment variables 'MYTHX_USERNAME' and 'MYTHX_PASSWORD' not found. Place these in a .env file in the root of your &ETH;App, add them in the CLI command, ie 'MYTHX_USERNAME=xyz MYTHX_PASSWORD=123 embark run', or add them to your system's environment variables.");
}
}
async function analyse(contracts, cfg, embark) {
cfg.logger = embark.logger
// Set analysis parameters
const limit = cfg.limit || defaultConcurrentAnalyses
if (isNaN(limit)) {
embark.logger.info(`limit parameter should be a number; got ${limit}.`)
return 1
}
if (limit < 0 || limit > defaultConcurrentAnalyses) {
embark.logger.info(`limit should be between 0 and ${defaultConcurrentAnalyses}.`)
return 1
}
checkEnvVariables(embark);
const armletClient = new armlet.Client(
{
clientToolName: "embark-mythx",
password: process.env.MYTHX_PASSWORD,
ethAddress: process.env.MYTHX_USERNAME,
})
// Filter contracts based on parameter choice
let toSubmit = { "contracts": {}, "sources": contracts.sources };
if (!("ignore" in embark.pluginConfig)) {
embark.pluginConfig.ignore = []
}
for (let [filename, contractObjects] of Object.entries(contracts.contracts)) {
for (let [contractName, contract] of Object.entries(contractObjects)) {
if (!("contracts" in cfg)) {
if (embark.pluginConfig.ignore.indexOf(contractName) == -1) {
if (!toSubmit.contracts[filename]) {
toSubmit.contracts[filename] = {}
}
toSubmit.contracts[filename][contractName] = contract;
}
} else {
if (cfg.contracts.indexOf(contractName) >= 0 && embark.pluginConfig.ignore.indexOf(contractName) == -1) {
if (!toSubmit.contracts[filename]) {
toSubmit.contracts[filename] = {}
}
toSubmit.contracts[filename][contractName] = contract;
}
}
}
}
// Stop here if no contracts are left
if (Object.keys(toSubmit.contracts).length === 0) {
embark.logger.info("No contracts to submit.");
return 0;
}
const submitObjects = mythXUtil.buildRequestData(toSubmit)
const { objects, errors } = await doAnalysis(armletClient, cfg, submitObjects, null, limit)
const result = doReport(cfg, objects, errors)
return result
}
async function getStatus(uuid, embark) {
checkEnvVariables(embark);
// Connect to MythX via armlet
const armletClient = new armlet.Client(
{
clientToolName: "embark-mythx",
password: process.env.MYTHX_PASSWORD,
ethAddress: process.env.MYTHX_USERNAME,
});
await armletClient.login();
try {
const results = await armletClient.getIssues(uuid.toLowerCase());
return ghettoReport(embark.logger, results);
} catch (err) {
embark.logger.warn(err);
return 1;
}
}
const doAnalysis = async (armletClient, config, contracts, contractNames = null, limit) => {
const timeout = (config.timeout || 300) * 1000;
const initialDelay = ('initial-delay' in config) ? config['initial-delay'] * 1000 : undefined;
const results = await asyncPool(limit, contracts, async buildObj => {
const obj = new MythXIssues(buildObj, config);
let analyzeOpts = {
clientToolName: 'embark-mythx',
timeout,
initialDelay
};
analyzeOpts.data = mythXUtil.cleanAnalyzeDataEmptyProps(obj.buildObj, config.debug, config.logger);
analyzeOpts.data.analysisMode = config.full ? "full" : "quick";
if (config.debug > 1) {
config.logger.debug("analyzeOpts: " + `${util.inspect(analyzeOpts, { depth: null })}`);
}
// request analysis to armlet.
try {
//TODO: Call analyze/analyzeWithStatus asynchronously
config.logger.info("Submitting '" + obj.contractName + "' for " + analyzeOpts.data.analysisMode + " analysis...")
const { issues, status } = await armletClient.analyzeWithStatus(analyzeOpts);
obj.uuid = status.uuid;
obj.groupId = status.groupId;
if (status.status === 'Error') {
return [status, null];
} else {
obj.setIssues(issues);
}
return [null, obj];
} catch (err) {
//console.log("catch", JSON.stringify(err));
let errStr;
if (typeof err === 'string') {
errStr = `${err}`;
} else if (typeof err.message === 'string') {
errStr = err.message;
} else {
errStr = `${util.inspect(err)}`;
}
if (errStr.includes('User or default timeout reached after')
|| errStr.includes('Timeout reached after')) {
return [(buildObj.contractName + ": ").yellow + errStr, null];
} else {
return [(buildObj.contractName + ": ").red + errStr, null];
}
}
});
return results.reduce((accum, curr) => {
const [err, obj] = curr;
if (err) {
accum.errors.push(err);
} else if (obj) {
accum.objects.push(obj);
}
return accum;
}, { errors: [], objects: [] });
};
function ghettoReport(logger, results) {
let issuesCount = 0;
results.forEach(ele => {
issuesCount += ele.issues.length;
});
if (issuesCount === 0) {
logger.info('No issues found');
return 0;
}
for (const group of results) {
logger.info(group.sourceList.join(', ').underline);
for (const issue of group.issues) {
logger.info(yaml.safeDump(issue, { 'skipInvalid': true }));
}
}
return 1;
}
module.exports = {
analyse,
getStatus
}

2819
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,29 +1,58 @@
{
"name": "embark-mythx",
"version": "1.0.4",
"description": "MythX plugin for Status Embark",
"repository": "github:flex-dapps/embark-mythx",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"embark",
"embark-plugin",
"mythx",
"smart contract",
"security analysis",
"solidity"
],
"author": "sebastian@flexdapps.com",
"version": "2.0.0",
"license": "MIT",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"files": [
"formatters",
"dist",
"src"
],
"engines": {
"node": ">=10"
},
"scripts": {
"start": "tsdx watch",
"build": "tsdx build",
"test": "tsdx test",
"lint": "tsdx lint",
"prepare": "tsdx build"
},
"peerDependencies": {},
"husky": {
"hooks": {
"pre-commit": "tsdx lint"
}
},
"prettier": {
"printWidth": 80,
"semi": true,
"singleQuote": true
},
"name": "embark-mythx",
"author": "emizzle",
"module": "dist/embark-mythx.esm.js",
"devDependencies": {
"@types/command-line-args": "5.0.0",
"@types/date-fns": "2.6.0",
"@types/jest": "^25.1.4",
"husky": "^4.2.3",
"tsdx": "^0.13.0",
"tslib": "^1.11.1",
"typescript": "^3.8.3"
},
"dependencies": {
"armlet": "^2.7.0",
"command-line-args": "^5.1.1",
"dotenv": "^7.0.0",
"eslint": "^5.16.0",
"minimist": "^1.2.0",
"remix-lib": "^0.4.6",
"tiny-async-pool": "^1.0.4"
"ascii-table": "0.0.9",
"chalk": "3.0.0",
"command-line-args": "5.1.1",
"command-line-usage": "6.1.0",
"date-fns": "2.11.1",
"dotenv": "8.2.0",
"embark-core": "5.3.0-nightly.13",
"embark-logger": "5.3.0-nightly.12",
"eslint": "6.8.0",
"mythxjs": "1.3.11",
"remix-lib": "0.4.23",
"tiny-async-pool": "1.1.0"
}
}

BIN
screenshot.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 157 KiB

79
src/analysis.ts Normal file
View File

@ -0,0 +1,79 @@
import {
Args,
CompiledContract,
CompiledSources,
CompilationInputs,
FunctionHashes
} from './types';
import { replaceLinkedLibs } from './utils';
import * as path from 'path';
import { Data } from './client';
const TOOL_NAME = 'embark-mythx';
export default class Analysis {
constructor(
public contract: CompiledContract,
public sources: CompiledSources,
public inputs: CompilationInputs,
public contractName: string,
public contractFileName: string
) {}
/**
* Formats data for the MythX API
*/
public getRequestData(args: Args) {
const data: Data = {
contractName: this.contractName,
bytecode: replaceLinkedLibs(this.contract.evm.bytecode.object),
sourceMap: this.contract.evm.bytecode.sourceMap,
deployedBytecode: replaceLinkedLibs(
this.contract.evm.deployedBytecode.object
),
deployedSourceMap: this.contract.evm.deployedBytecode.sourceMap,
sourceList: [],
analysisMode: args.options?.mode,
toolName: TOOL_NAME,
noCacheLookup: args.options?.noCacheLookup,
sources: {},
mainSource: path.basename(this.contractFileName)
};
for (const key of Object.keys(this.sources)) {
const ast = this.sources[key].ast;
const source = this.inputs[key].content;
const contractName = path.basename(key);
data.sourceList.push(contractName);
data.sources[contractName] = { ast, source };
}
return data;
}
/**
* Returns dictionary of function signatures and their keccak256 hashes
* for all contracts.
*
* Same function signatures will be overwritten
* as there should be no distinction between their hashes,
* even if such functions defined in different contracts.
*
* @returns {object} Dictionary object where
* key is a hex string first 4 bytes of keccak256 hash
* and value is a corresponding function signature.
*/
public getFunctionHashes() {
const hashes: FunctionHashes = {};
for (const [signature, hash] of Object.entries(
this.contract.evm.methodIdentifiers
)) {
hashes[hash] = signature;
}
return hashes;
}
}

180
src/cli.ts Normal file
View File

@ -0,0 +1,180 @@
import * as chalk from 'chalk';
import { Format, Mode, ALL_CONTRACTS } from './types';
export const FORMAT_OPT = {
name: 'format',
alias: 'o',
type: String,
defaultValue: Format.Stylish,
typeLabel: '{underline string}',
description:
'Output format. Options: text, stylish, compact, table, html, json (default: stylish).',
group: 'options'
};
export const CLI_OPTS = [
// tslint:disable-next-line: max-line-length
{
name: 'mode',
alias: 'm',
type: String,
defaultValue: Mode.Quick,
typeLabel: '{underline string}',
description:
'Analysis mode. Options: quick, standard, deep (default: quick).',
group: 'options'
},
FORMAT_OPT,
{
name: 'no-cache-lookup',
alias: 'c',
type: Boolean,
defaultValue: false,
description: 'Deactivate MythX cache lookups (default: false).',
group: 'options'
},
{
name: 'debug',
alias: 'd',
type: Boolean,
defaultValue: false,
description: 'Print MythX API request and response.',
group: 'options'
},
{
name: 'limit',
alias: 'l',
type: Number,
defaultValue: 10,
description: 'Maximum number of concurrent analyses (default: 10).',
group: 'options'
},
{
name: 'contracts',
type: String,
multiple: true,
defaultValue: ALL_CONTRACTS,
defaultOption: true,
description: 'List of contracts to submit for analysis (default: all).',
group: 'options'
},
{
name: 'timeout',
alias: 't',
type: Number,
description:
'Timeout in secs to wait for analysis to finish (default: smart default based on mode).',
group: 'options'
},
// deprecated
{
name: 'initial-delay',
alias: 'i',
type: Number,
defaultValue: 0,
description:
'[DEPRECATED] Time in seconds before first analysis status check (default: 0).',
group: 'deprecated'
},
// obsolete
{
name: 'full',
alias: 'f',
type: Boolean,
description:
'[OBSOLETE] Perform full instead of quick analysis (not available on free MythX tier).',
group: 'obsolete'
}
];
export const CLI_COMMANDS = [
{
name: 'verify',
typeLabel: '{italic <options> [contracts]}',
description:
'Runs MythX verification. If array of contracts are specified, only those contracts will be analysed.'
},
{
name: 'verify report',
type: String,
typeLabel: '{italic [--format] uuid}',
description: 'Get the report of a completed analysis.'
},
{
name: 'verify status',
type: String,
typeLabel: '{italic uuid}',
description: 'Get the status of an already submitted analysis.'
},
{
name: 'verify list',
description: 'Displays a list of the last 20 submitted analyses in a table.'
},
{
name: 'verify help',
type: Boolean,
defaultValue: false,
description: 'Display this usage guide.'
}
];
export const header =
'Smart contract security analysis with MythX\n\n' +
// tslint:disable: no-trailing-whitespace
chalk.blueBright(` ::::::: \` :::::::\` \`\`\` \`\`\` \`\` \`\` \`\`\`
+++++++\` +++++++\` ...\` \`... .\` .. \`.\` \`.\`
\`\`\`:+++///: -///+++/\`\`\` ..\`. .\`.. \`\` \`\` \`\`..\`\` ..\`\`\`\` \`.. \`.\`
-++++++/ :++++++: .. .\`. .. \`.\` .\` \`.\` ..\` \`.. ...
/++/ :+++ .. ..\` .. .. .. .\` .. \`. \`...\`
\`\`\`\`////\`\`\`:///.\`\`\` .. .. .\` .\` .\` .. \`. \`.\` \`.\`
-+++\` \`+++- +++: .. .. \`... ..\`\` .. \`. \`.\` \`..
.:::\` \`:::. :::. \`\` \`\` ..\` \`\`\`\` \`\` \`\` \`\` \`\`\`
\`\`..
\`
`);
// tslint:enable: no-trailing-whitespace
export const CLI_USAGE = [
{
header: 'embark-mythx',
content: header,
raw: true
},
{
header: 'Available Commands',
content: Array.from(new Set(CLI_COMMANDS.values())).map(command => {
return {
name: `${command.name} ${command.typeLabel || ''}`,
summary: command.description
};
})
},
{
header: 'Examples',
content: [
{
name: 'verify --mode full SimpleStorage ERC20',
summary:
'Runs a full MythX verification for the SimpleStorage and ERC20 contracts only.'
},
{
name: 'verify status 0d60d6b3-e226-4192-b9c6-66b45eca3746',
summary:
'Gets the status of the MythX analysis with the specified uuid.'
},
{
name:
'verify report --format stylish 0d60d6b3-e226-4192-b9c6-66b45eca3746',
summary:
'Gets the status of the MythX analysis with the specified uuid.'
}
]
},
{
header: 'Verify options',
hide: ['contracts'],
optionList: CLI_OPTS,
group: ['options']
}
];

123
src/client.ts Normal file
View File

@ -0,0 +1,123 @@
import { Environment, CompilationInputs, FunctionHashes } from './types';
import { Client as MythXClient } from 'mythxjs';
export interface Data {
contractName: string;
bytecode: string;
sourceMap: any;
deployedBytecode: string;
deployedSourceMap: any;
sourceList: string[];
analysisMode: string;
toolName: string;
noCacheLookup: boolean;
sources: Sources | CompilationInputs;
mainSource: string;
functionHashes?: FunctionHashes;
}
interface Sources {
[key: string]: {
ast: any;
source: string;
};
}
export default class Client {
private mythXClient: MythXClient;
constructor(env: Environment) {
const { apiUrl, username, password, apiKey } = env;
this.mythXClient = new MythXClient(
username,
password,
undefined,
apiUrl,
apiKey
);
}
public failAnalysis(reason: string, status: string) {
throw new Error(
reason +
' ' +
'The analysis job state is ' +
status.toLowerCase() +
' and the result may become available later.'
);
}
public async awaitAnalysisFinish(
uuid: string,
initialDelay: number,
timeout: number
) {
const statuses = ['Error', 'Finished'];
let state = await this.mythXClient.getAnalysisStatus(uuid);
if (statuses.includes(state.status)) {
return state;
}
const timer = (interval: number) =>
new Promise(resolve => setTimeout(resolve, interval));
const maxRequests = 10;
const start = Date.now();
const remaining = Math.max(timeout - initialDelay, 0);
const inverted = Math.sqrt(remaining) / Math.sqrt(285);
for (let r = 0; r < maxRequests; r++) {
const idle = Math.min(
r === 0 ? initialDelay : (inverted * r) ** 2,
start + timeout - Date.now()
);
// eslint-disable-next-line no-await-in-loop
await timer(idle);
if (Date.now() - start >= timeout) {
this.failAnalysis(
`User or default timeout reached after ${timeout / 1000} sec(s).`,
state.status
);
}
// eslint-disable-next-line no-await-in-loop
state = await this.mythXClient.getAnalysisStatus(uuid);
if (statuses.includes(state.status)) {
return state;
}
}
this.failAnalysis(
`Allowed number (${maxRequests}) of requests was reached.`,
state.status
);
}
public async authenticate() {
return this.mythXClient.login();
}
public async submitDataForAnalysis(data: Data) {
return this.mythXClient.analyze(data);
}
public async getReport(uuid: string) {
return this.mythXClient.getDetectedIssues(uuid);
}
public async getApiVersion() {
return this.mythXClient.getVersion();
}
public async getAnalysesList() {
return this.mythXClient.getAnalysesList();
}
public async getAnalysisStatus(uuid: string) {
return this.mythXClient.getAnalysisStatus(uuid);
}
}

250
src/controllers/analyze.ts Normal file
View File

@ -0,0 +1,250 @@
import { Logger } from 'embark-logger';
import {
CompilationInputs,
CompilationResult,
Args,
ALL_CONTRACTS,
Environment,
Mode,
Format,
CompiledSource,
CompiledContract
} from '../types';
import * as chalk from 'chalk';
import Analysis from '../analysis';
import Controller from '.';
import * as util from 'util';
import ReportController from './report';
const asyncPool = require('tiny-async-pool');
export default class AnalyzeController extends Controller {
constructor(
private env: Environment,
protected logger: Logger,
private pluginConfig: any
) {
super(env, logger);
}
public async runAll(
compilationResult: CompilationResult,
compilationInputs: CompilationInputs,
args: Args
) {
this.checkArgs(args);
await this.login();
this.logger.info('Running MythX analysis...');
const ignore = this.pluginConfig.ignore ?? [];
const analyses = this.splitCompilationResult(
compilationInputs,
compilationResult
)
.filter(analysis => !ignore.includes(analysis.contractName))
.filter(analysis => {
if (
args.options?.contracts?.length === 1 &&
args.options?.contracts[0] === ALL_CONTRACTS
) {
return true;
}
return (args.options?.contracts as string[]).includes(
analysis.contractName
);
});
if (analyses.length === 0) {
return this.logger.warn(
'No contracts to analyse. Check command contract filter and plugin ignore (in embark.json).'
);
}
// Run concurrent analyses based on limit arg
await asyncPool(
args.options.limit,
analyses,
async (analysis: Analysis) => {
return this.run(analysis, args);
}
);
this.logger.info('Done!');
}
private async run(analysis: Analysis, args: Args) {
try {
const data = analysis.getRequestData(args);
if (args.options?.debug) {
this.logger.info('-------------------');
this.logger.info('MythX Request Body:\n');
this.logger.info(util.inspect(data, false, null, true));
}
const { uuid } = await this.client.submitDataForAnalysis(data);
this.logger.info(
'Analysis job submitted: ' +
chalk.yellow('https://dashboard.mythx.io/#/console/analyses/' + uuid)
);
this.logger.info(
`Analyzing ${analysis.contractName} in ${args.options.mode} mode...`
);
let initialDelay;
let timeout;
if (args.options.mode === 'quick') {
initialDelay = 20 * 1000;
timeout = 180 * 1000;
} else if (
args.options.mode === 'standard' ||
args.options.mode === 'full'
) {
initialDelay = 900 * 1000;
timeout = 1800 * 1000;
} else {
initialDelay = 2700 * 1000;
timeout = 5400 * 1000;
}
if (args.options?.timeout) {
timeout = args.options.timeout;
}
await this.client.awaitAnalysisFinish(uuid, initialDelay, timeout);
this.logger.info(
`Retrieving ${analysis.contractName} analysis results...`
);
const reportController = new ReportController(this.env, this.logger);
return reportController.run(
uuid,
args?.options?.format,
analysis.inputs,
analysis,
false
);
} catch (err) {
// cannot rethrow here as we are stuck in a concurrent pool of parallel
// API requests that may potentially all fail after the initial error
this.logger.error(`Error analyzing contract: ${err.message}`);
}
}
private checkArgs(args: Args) {
if (args.obsolete?.full) {
throw new Error(
'The --full,f option is now OBSOLETE. Please use --mode full instead.'
);
}
if (args.deprecated?.initialDelay) {
this.logger.warn(
'The --initial-delay,i option is DEPRECATED and will be removed in future versions.'
);
}
if (!Object.values(Mode).includes(args.options.mode)) {
throw new Error(
'Invalid analysis mode. Available modes: quick, standard, deep.'
);
}
if (!Object.values(Format).includes(args.options.format)) {
throw new Error(
`Invalid output format. Available formats: ${Object.values(Format).join(
', '
)}.`
);
}
}
private splitCompilationResult(
compilationInputs: CompilationInputs,
compilationResult: CompilationResult
): Analysis[] {
const compilationResults: Analysis[] = [];
const inputFilePaths = Object.keys(compilationInputs ?? {});
const multipleContractDefs: { [inputFilePath: string]: string[] } = {};
for (const inputFilePath of inputFilePaths) {
if (
compilationResults.some(analysis =>
Object.keys(analysis.sources).includes(inputFilePath)
)
) {
continue;
}
let contractName;
let contract;
const contractList = compilationResult.contracts[inputFilePath];
const contractListNames = Object.keys(contractList);
const sources: { [key: string]: CompiledSource } = {};
// when there are multiple contract definitions in one contract file,
// add the file and contract names to a dictionary to later display a
// warning to the user that MythX may not support this
if (contractListNames.length > 1) {
this.logger.warn(
`Contract file '${inputFilePath}' contains multiple contract definitions ('${contractListNames.join(
"', '"
)}'). MythX may not support this case and therefore the results produced may not be correct.`
);
}
for (const [compiledContractName, compiledContract] of Object.entries(
contractList
)) {
const sourcesToInclude = Object.keys(
JSON.parse(compiledContract.metadata).sources
);
const sourcesFiltered = Object.entries(
compilationResult.sources
).filter(([, { ast }]) => sourcesToInclude.includes(ast.absolutePath));
// TODO: Use Object.fromEntries when lib can target CommonJS or min node
// version supports ES6
sourcesFiltered.forEach(([key, value]) => {
sources[key] = value;
});
if (
// in the case of only 1 contract (this is the only supported MythX case anyway)
!contract ||
// in the case where there are multiple contracts are defined in one contract file
// this is currently NOT supported by MythX, but we can try to handle it
compiledContract.evm?.bytecode?.object?.length >
contract.evm?.bytecode?.object?.length
) {
contract = compiledContract;
contractName = compiledContractName;
}
}
compilationResults.push(
new Analysis(
contract as CompiledContract,
sources,
compilationInputs,
contractName as string,
inputFilePath
)
);
}
for (const [inputFilePath, contractNames] of Object.entries(
multipleContractDefs
)) {
this.logger.warn(
`Contract file '${inputFilePath}' contains multiple contract definitions ('${contractNames.join(
"', '"
)}'). MythX may not support this case and therefore the results produced may not be correct.`
);
}
return compilationResults;
}
}

15
src/controllers/index.ts Normal file
View File

@ -0,0 +1,15 @@
import { Logger } from 'embark-logger';
import { Environment } from '../types';
import Client from '../client';
export default abstract class Controller {
protected client: Client;
constructor(env: Environment, protected logger: Logger) {
this.client = new Client(env);
}
protected async login() {
this.logger.info('Authenticating MythX user...');
return this.client.authenticate();
}
}

36
src/controllers/list.ts Normal file
View File

@ -0,0 +1,36 @@
import Controller from '.';
import { Environment } from '../types';
import { Logger } from 'embark-logger';
import { formatDistance } from 'date-fns';
const AsciiTable = require('ascii-table');
export default class ListController extends Controller {
/* eslint-disable @typescript-eslint/no-useless-constructor */
constructor(env: Environment, logger: Logger) {
super(env, logger);
}
public async run() {
await this.login();
const list = await this.client.getAnalysesList();
const analyses = list.analyses.map((a: any) => {
return {
Mode: a.analysisMode,
Contract: a.mainSource,
Vulnerabilities: Object.entries(a.numVulnerabilities)
.map(([level, num]) => `${level}: ${num}`)
.join(', '),
Submitted: formatDistance(new Date(a.submittedAt), new Date()) + ' ago',
UUID: a.uuid
};
});
const table = AsciiTable.factory({
title: 'Past analyses',
heading: Object.keys(analyses[0]),
rows: Object.values(analyses).map(analysis =>
Object.values(analysis as any[])
)
});
return table.toString();
}
}

331
src/controllers/report.ts Normal file
View File

@ -0,0 +1,331 @@
import Controller from '.';
import { Environment, Format } from '../types';
import { Logger } from 'embark-logger';
import { CompilationInputs } from '../types';
import * as path from 'path';
import chalk from 'chalk';
import Analysis from '../analysis';
const eslintCliEngine = require('eslint').CLIEngine;
const SourceMappingDecoder = require('remix-lib/src/sourceMappingDecoder');
enum Severity {
High = 2,
Medium = 1
}
export default class ReportController extends Controller {
private decoder: any;
constructor(env: Environment, logger: Logger) {
super(env, logger);
this.decoder = new SourceMappingDecoder();
}
public async run(
uuid: string,
format: Format,
inputs: CompilationInputs,
analysis: Analysis | null = null,
doLogin = true
) {
if (!uuid) {
throw new Error("Argument 'uuid' must be provided.");
}
if (doLogin) {
await this.login();
}
const issues = await this.client.getReport(uuid);
this.render(issues, format, inputs, analysis);
}
public async render(
issues: any,
format: Format,
inputs: CompilationInputs,
analysis: Analysis | null = null
) {
this.logger.info(
`Rendering ${analysis?.contractName ?? ''} analysis report...`
);
const functionHashes = analysis?.getFunctionHashes() ?? {};
const data = { functionHashes, sources: { ...inputs } };
const uniqueIssues = this.formatIssues(data, issues);
if (uniqueIssues.length === 0) {
this.logger.info(
chalk.green(
`✔ No errors/warnings found for contract: ${analysis?.contractName}`
)
);
} else {
const formatter = this.getFormatter(format);
const output = formatter(uniqueIssues);
this.logger.info(output);
}
}
/**
* @param {string} name - formatter name
* @returns {object} - ESLint formatter module
*/
private getFormatter(name: Format) {
const custom = ['text'];
let format: string = name;
if (custom.includes(name)) {
format = path.join(__dirname, '../formatters/', name + '.js');
}
return eslintCliEngine.getFormatter(format);
}
/**
* Turn a srcmap entry (the thing between semicolons) into a line and
* column location.
* We make use of this.sourceMappingDecoder of this class to make
* the conversion.
*
* @param {string} srcEntry - a single entry of solc sourceMap
* @param {Array} lineBreakPositions - array returned by the function 'mapLineBreakPositions'
* @returns {object} - line and column location
*/
private textSrcEntry2lineColumn(srcEntry: string, lineBreakPositions: any) {
const ary = srcEntry.split(':');
const sourceLocation = {
length: parseInt(ary[1], 10),
start: parseInt(ary[0], 10)
};
const loc = this.decoder.convertOffsetToLineColumn(
sourceLocation,
lineBreakPositions
);
// FIXME: note we are lossy in that we don't return the end location
if (loc.start) {
// Adjust because routines starts lines at 0 rather than 1.
loc.start.line++;
}
if (loc.end) {
loc.end.line++;
}
return [loc.start, loc.end];
}
/**
* Convert a MythX issue into an ESLint-style issue.
* The eslint report format which we use, has these fields:
*
* - column,
* - endCol,
* - endLine,
* - fatal,
* - line,
* - message,
* - ruleId,
* - severity
*
* but a MythX JSON report has these fields:
*
* - description.head
* - description.tail,
* - locations
* - severity
* - swcId
* - swcTitle
*
* @param {object} issue - the MythX issue we want to convert
* @param {string} sourceCode - holds the contract code
* @param {object[]} locations - array of text-only MythX API issue locations
* @returns {object} eslint - issue object
*/
private issue2EsLint(issue: any, sourceCode: string, locations: any) {
const swcLink = issue.swcID
? 'https://swcregistry.io/SWC-registry/docs/' + issue.swcID
: 'N/A';
const esIssue = {
mythxIssue: issue,
mythxTextLocations: locations,
sourceCode,
fatal: false,
ruleId: swcLink,
message: issue.description.head,
severity: Severity[issue.severity] || 1,
line: -1,
column: 0,
endLine: -1,
endCol: 0
};
let startLineCol;
let endLineCol;
const lineBreakPositions = this.decoder.getLinebreakPositions(sourceCode);
if (locations.length) {
[startLineCol, endLineCol] = this.textSrcEntry2lineColumn(
locations[0].sourceMap,
lineBreakPositions
);
}
if (startLineCol) {
esIssue.line = startLineCol.line;
esIssue.column = startLineCol.column;
esIssue.endLine = endLineCol.line;
esIssue.endCol = endLineCol.column;
}
return esIssue;
}
/**
* Gets the source index from the issue sourcemap
*
* @param {object} location - MythX API issue location object
* @returns {number} - source index
*/
private getSourceIndex(location: any) {
const sourceMapRegex = /(\d+):(\d+):(\d+)/g;
const match = sourceMapRegex.exec(location.sourceMap);
// Ignore `-1` source index for compiler generated code
return match ? match[3] : '0';
}
/**
* Converts MythX analyze API output item to Eslint compatible object
* @param {object} report - issue item from the collection MythX analyze API output
* @param {object} data - Contains array of solidity contracts source code and the input filepath of contract
* @returns {object} - Eslint compatible object
*/
private convertMythXReport2EsIssue(report: any, data: any) {
const { sources, functionHashes } = data;
const results: { [key: string]: any } = {};
/**
* Filters locations only for source files.
* Other location types are not supported to detect code.
*
* @param {object} location - locations to filter
* @returns {object} - filtered locations
*/
const textLocationFilterFn = (location: any) =>
location.sourceType === 'solidity-file' &&
location.sourceFormat === 'text';
report.issues.forEach((issue: any) => {
const locations = issue.locations.filter(textLocationFilterFn);
const location = locations.length ? locations[0] : undefined;
let sourceCode = '';
let sourcePath = '<unknown>';
if (location) {
const sourceIndex = parseInt(this.getSourceIndex(location) ?? 0, 10);
// if DApp's contracts have changed, we can no longer guarantee our sources will be the
// same as at the time of submission. This should only be an issue when getting a past
// analysis report (ie verify report uuid), and not during a just-completed analysis (ie verify)
const fileName = Object.keys(sources)[sourceIndex];
if (fileName) {
sourcePath = path.basename(fileName);
sourceCode = sources[fileName].content;
}
}
if (!results[sourcePath]) {
results[sourcePath] = {
errorCount: 0,
warningCount: 0,
fixableErrorCount: 0,
fixableWarningCount: 0,
filePath: sourcePath,
functionHashes,
sourceCode,
messages: []
};
}
results[sourcePath].messages.push(
this.issue2EsLint(issue, sourceCode, locations)
);
});
for (const key of Object.keys(results)) {
const result = results[key];
for (const { fatal, severity } of result.messages) {
if (this.isFatal(fatal, severity)) {
result.errorCount++;
} else {
result.warningCount++;
}
}
}
return Object.values(results);
}
private formatIssues(data: any, issues: any) {
const eslintIssues = issues
.map((report: any) => this.convertMythXReport2EsIssue(report, data))
.reduce((acc: any, curr: any) => acc.concat(curr), []);
return this.getUniqueIssues(eslintIssues);
}
private isFatal(fatal: any, severity: any) {
return fatal || severity === 2;
}
private getUniqueMessages(messages: any) {
const jsonValues = messages.map((m: any) => JSON.stringify(m));
const uniqueValues = jsonValues.reduce((acc: any, curr: any) => {
if (acc.indexOf(curr) === -1) {
acc.push(curr);
}
return acc;
}, []);
return uniqueValues.map((v: any) => JSON.parse(v));
}
private calculateErrors(messages: any) {
return messages.reduce(
(acc: any, { fatal, severity }: any) =>
this.isFatal(fatal, severity) ? acc + 1 : acc,
0
);
}
private calculateWarnings(messages: any) {
return messages.reduce(
(acc: any, { fatal, severity }: any) =>
!this.isFatal(fatal, severity) ? acc + 1 : acc,
0
);
}
private getUniqueIssues(issues: any) {
return issues.map(({ messages, ...restProps }: any) => {
const uniqueMessages = this.getUniqueMessages(messages);
const warningCount = this.calculateWarnings(uniqueMessages);
const errorCount = this.calculateErrors(uniqueMessages);
return {
...restProps,
messages: uniqueMessages,
errorCount,
warningCount
};
});
}
}

19
src/controllers/status.ts Normal file
View File

@ -0,0 +1,19 @@
import Controller from '.';
import { Environment } from '../types';
import { Logger } from 'embark-logger';
export default class StatusController extends Controller {
/* eslint-disable @typescript-eslint/no-useless-constructor */
constructor(env: Environment, logger: Logger) {
super(env, logger);
}
public async run(uuid: string) {
if (!uuid) {
throw new Error("Argument 'uuid' must be provided.");
}
await this.login();
return this.client.getAnalysisStatus(uuid);
}
}

177
src/index.ts Normal file
View File

@ -0,0 +1,177 @@
import { Logger } from 'embark-logger';
import { Callback, Embark } from 'embark-core';
import AnalyzeController from './controllers/analyze';
import StatusController from './controllers/status';
import ListController from './controllers/list';
import ReportController from './controllers/report';
import * as fs from 'fs';
import {
CompilationInputs,
CompilationResult,
Environment,
UuidArgs,
ReportArgs
} from './types';
import { OptionDefinition } from 'command-line-args';
import * as util from 'util';
import { FORMAT_OPT, CLI_USAGE, CLI_OPTS } from './cli';
const commandLineArgs = require('command-line-args');
const commandLineUsage = require('command-line-usage');
require('dotenv').config();
const COMMAND_REGEX = /(?<=verify ?)(.*|\S+)/g;
export default class EmbarkMythX {
private compilationInputs: CompilationInputs = {};
private compilationResult?: CompilationResult;
private logger: Logger;
constructor(private embark: Embark) {
this.logger = embark.logger;
// Register for compilation results
embark.events.on(
'contracts:compiled:solc',
(compilationResult: CompilationResult) => {
for (const sourcePath of Object.keys(compilationResult.sources)) {
this.compilationInputs[sourcePath] = {
content: fs.readFileSync(sourcePath, 'utf8')
};
}
this.compilationResult = compilationResult;
}
);
this.registerConsoleCommands();
}
private determineEnv(): Environment {
const env: Environment = {
apiKey: process.env.MYTHX_API_KEY,
username: process.env.MYTHX_USERNAME,
password: process.env.MYTHX_PASSWORD,
apiUrl: process.env.MYTHX_API_URL
};
if (!env.username) {
env.username = process.env.MYTHX_ETH_ADDRESS; // for backwards compatibility
}
const { username, password, apiKey } = env;
if (!(username && password) && !apiKey) {
throw new Error(
'No authentication credentials could be found. Unauthenticated use of MythX has been discontinued. Sign up for a free a account at https://mythx.io/ and set the MYTHX_API_KEY environment variable.'
);
}
if (username && password && !apiKey) {
throw new Error(
'You are attempting to authenticate with username/password auth which is no longer supported by mythxjs. Please use MYTHX_API_KEY instead.'
);
}
if (!(username && password && apiKey)) {
throw new Error(
'You must supply MYTHX_USERNAME, MYTHX_PASSWORD, and MYTHX_API_KEY environment variables in order to authenticate.'
);
}
return env;
}
private determineArgs(argv: string[]) {
const mainDefinitions = [{ name: 'command', defaultOption: true }];
return commandLineArgs(mainDefinitions, { stopAtFirstUnknown: true, argv });
}
private registerConsoleCommands() {
this.embark.registerConsoleCommand({
description:
"Run MythX smart contract analysis. Run 'verify help' for command usage.",
matches: (cmd: string) => COMMAND_REGEX.test(cmd),
usage: 'verify [options] [contracts]',
process: async (cmd: string, callback: Callback<string>) => {
// @ts-ignore
const cmdName = cmd
.match(COMMAND_REGEX)[0]
.split(' ')
.filter(a => a);
try {
const env = this.determineEnv();
const main = this.determineArgs(cmdName);
const argv = main._unknown ?? main.command ?? [];
const statusDefinitions: OptionDefinition[] = [
{
name: 'uuid',
type: String,
defaultOption: true,
group: 'options'
}
];
switch (main.command) {
case 'report':
statusDefinitions.push(FORMAT_OPT);
const reportArgs = commandLineArgs(statusDefinitions, {
argv
}) as ReportArgs;
const reportController = new ReportController(env, this.logger);
await reportController.run(
reportArgs?.options?.uuid.toLowerCase(),
reportArgs?.options?.format,
this.compilationInputs
);
break;
case 'list':
const listController = new ListController(env, this.logger);
const list = await listController.run();
this.logger.info(list);
break;
case 'status':
const statusArgs = commandLineArgs(statusDefinitions, {
argv
}) as UuidArgs;
const statusController = new StatusController(env, this.logger);
const status = await statusController.run(
statusArgs?.options?.uuid?.toLowerCase()
);
this.logger.info(util.inspect(status));
break;
case 'help':
this.logger.info(commandLineUsage(CLI_USAGE));
break;
default:
const args = commandLineArgs(CLI_OPTS, { argv, camelCase: true });
const analyzeController = new AnalyzeController(
env,
this.logger,
this.embark.pluginConfig
);
await analyzeController.runAll(
this.compilationResult as CompilationResult,
this.compilationInputs,
args
);
break;
}
} catch (e) {
return callback(e);
}
return callback(null);
}
});
}
}

124
src/types.ts Normal file
View File

@ -0,0 +1,124 @@
export const ALL_CONTRACTS = '_ALL_';
export interface Environment {
apiKey?: string;
username?: string;
password?: string;
apiUrl?: string;
}
export enum Mode {
Quick = 'quick',
Full = 'full',
Standard = 'standard',
Deep = 'deep'
}
export enum Format {
Text = 'text',
Stylish = 'stylish',
Compact = 'compact',
Table = 'table',
Html = 'html',
Json = 'json'
}
export interface Args {
options: {
mode: Mode;
format: Format;
noCacheLookup: boolean;
debug: boolean;
limit: number;
contracts: string | string[];
uuid: string;
timeout: number;
};
deprecated?: {
initialDelay: number;
};
obsolete?: {
full: boolean;
};
}
export interface UuidArgs {
options: {
uuid: string;
};
}
export interface ReportArgs {
options: {
uuid: string;
format: Format;
};
}
export interface CompilationInput {
content: string;
}
export interface CompilationInputs {
[filePath: string]: CompilationInput;
}
export interface CompilationResult {
contracts: CompiledContracts;
sources: CompiledSources;
solidityFileName: string;
compiledContractName?: string;
}
export interface CompiledContracts {
[filePath: string]: CompiledContractList;
}
export interface CompiledContractList {
[className: string]: CompiledContract;
}
export interface CompiledContract {
abi: any[];
devdoc: {
methods: object;
};
evm: {
bytecode: {
sourceMap: string;
object: string;
};
deployedBytecode: {
sourceMap: string;
object: string;
};
methodIdentifiers: {
[signature: string]: string;
};
};
metadata: string;
userdoc: {
methods: object;
};
}
export interface CompiledSources {
[filePath: string]: CompiledSource;
}
export interface CompiledSource {
ast: any;
id: number;
legacyAST: any;
}
export interface FunctionHashes {
[hash: string]: string;
}
export interface CompiledData {
compiled: CompilationResult;
contract: CompiledContract;
contractName: string;
functionHashes: FunctionHashes;
}

10
src/utils.ts Normal file
View File

@ -0,0 +1,10 @@
export function removeRelativePathFromUrl(url: string) {
return url.replace(/^.+\.\//, '').replace('./', '');
}
/* Dynamic linking is not supported. */
const regex = new RegExp(/__\$\w+\$__/, 'g');
const address = '0000000000000000000000000000000000000000';
export function replaceLinkedLibs(byteCode: string) {
return byteCode.replace(regex, address);
}

5
test/blah.test.ts Normal file
View File

@ -0,0 +1,5 @@
describe('blah', () => {
it('works', () => {
expect(true);
});
});

42
tsconfig.json Normal file
View File

@ -0,0 +1,42 @@
{
"include": [
"src",
"types",
"test", "formatters"
],
"compilerOptions": {
"module": "esnext",
"lib": [
"dom",
"esnext"
],
"importHelpers": true,
"declaration": true,
"sourceMap": true,
"rootDirs": [
"./src",
"./test"
],
"strict": true,
"noImplicitAny": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"strictPropertyInitialization": true,
"noImplicitThis": true,
"alwaysStrict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"moduleResolution": "node",
"baseUrl": "./",
"paths": {
"*": [
"src/*",
"node_modules/*"
]
},
"jsx": "react",
"esModuleInterop": true
}
}

9533
yarn.lock Normal file

File diff suppressed because it is too large Load Diff