mirror of https://github.com/embarklabs/embark.git
feat: import resolver
This commit is contained in:
parent
0d8f23303a
commit
29db66be23
|
@ -206,6 +206,7 @@
|
|||
"@types/node": "10.11.7",
|
||||
"@types/os-locale": "2.1.0",
|
||||
"@types/pretty-ms": "3.2.0",
|
||||
"@types/request": "2.48.1",
|
||||
"@types/web3": "1.0.12",
|
||||
"babel-plugin-dynamic-import-node": "2.2.0",
|
||||
"chai": "4.1.2",
|
||||
|
|
|
@ -652,7 +652,7 @@ class EmbarkController {
|
|||
engine.startService("deployment", {
|
||||
trackContracts: false,
|
||||
compileOnceOnly: true,
|
||||
disableOptimizations: options.coverage
|
||||
isCoverage: options.coverage
|
||||
});
|
||||
engine.startService("storage");
|
||||
engine.startService("codeGenerator");
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const fs = require('./fs.js');
|
||||
const File = require('./file.js');
|
||||
const Plugins = require('./plugins.js');
|
||||
const utils = require('../utils/utils.js');
|
||||
const path = require('path');
|
||||
|
@ -11,6 +10,7 @@ const cloneDeep = require('lodash.clonedeep');
|
|||
import { replaceZeroAddressShorthand } from '../utils/addressUtils';
|
||||
import { unitRegex } from "../utils/regexConstants";
|
||||
import * as utilsContractsConfig from "../utils/contractsConfig";
|
||||
import { File, Types } from "./file";
|
||||
|
||||
const DEFAULT_CONFIG_PATH = 'config/';
|
||||
|
||||
|
@ -59,13 +59,13 @@ var Config = function(options) {
|
|||
resolver = resolver || function(callback) {
|
||||
callback(fs.readFileSync(filename).toString());
|
||||
};
|
||||
self.contractsFiles.push(new File({filename, type: File.types.custom, path: filename, resolver}));
|
||||
self.contractsFiles.push(new File({path: filename, type: Types.custom, resolver}));
|
||||
});
|
||||
|
||||
self.events.on('file-remove', (fileType, removedPath) => {
|
||||
if(fileType !== 'contract') return;
|
||||
const normalizedPath = path.normalize(removedPath);
|
||||
self.contractsFiles = self.contractsFiles.filter(file => path.normalize(file.filename) !== normalizedPath);
|
||||
self.contractsFiles = self.contractsFiles.filter(file => path.normalize(file.path) !== normalizedPath);
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -127,7 +127,7 @@ Config.prototype.loadContractFiles = function() {
|
|||
if (!this.contractFiles || newContractsFiles.length !== this.contractFiles.length || !deepEqual(newContractsFiles, this.contractFiles)) {
|
||||
this.contractsFiles = this.contractsFiles.concat(newContractsFiles).filter((file, index, arr) => {
|
||||
return !arr.some((file2, index2) => {
|
||||
return file.filename === file2.filename && index < index2;
|
||||
return file.path === file2.path && index < index2;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -369,11 +369,11 @@ Config.prototype.loadExternalContractsFiles = function() {
|
|||
return this.logger.error(__("HTTP contract file not found") + ": " + contract.file);
|
||||
}
|
||||
const localFile = fileObj.filePath;
|
||||
this.contractsFiles.push(new File({filename: localFile, type: File.types.http, basedir: '', path: fileObj.url, storageConfig: storageConfig}));
|
||||
this.contractsFiles.push(new File({path: localFile, type: Types.http, basedir: '', externalUrl: fileObj.url, storageConfig: storageConfig}));
|
||||
} else if (fs.existsSync(contract.file)) {
|
||||
this.contractsFiles.push(new File({filename: contract.file, type: File.types.dapp_file, basedir: '', path: contract.file, storageConfig: storageConfig}));
|
||||
this.contractsFiles.push(new File({path: contract.file, type: Types.dappFile, basedir: '', storageConfig: storageConfig}));
|
||||
} else if (fs.existsSync(path.join('./node_modules/', contract.file))) {
|
||||
this.contractsFiles.push(new File({filename: path.join('./node_modules/', contract.file), type: File.types.dapp_file, basedir: '', path: path.join('./node_modules/', contract.file), storageConfig: storageConfig}));
|
||||
this.contractsFiles.push(new File({path: path.join('./node_modules/', contract.file), type: Types.dappFile, basedir: '', storageConfig: storageConfig}));
|
||||
} else {
|
||||
this.logger.error(__("contract file not found") + ": " + contract.file);
|
||||
}
|
||||
|
@ -571,7 +571,7 @@ Config.prototype.loadFiles = function(files) {
|
|||
return (file[0] === '$' || file.indexOf('.') >= 0);
|
||||
}).filter(function(file) {
|
||||
let basedir = findMatchingExpression(file, files);
|
||||
readFiles.push(new File({filename: file, type: File.types.dapp_file, basedir: basedir, path: file, storageConfig: storageConfig}));
|
||||
readFiles.push(new File({path: file, type: Types.dappFile, basedir: basedir, storageConfig: storageConfig}));
|
||||
});
|
||||
|
||||
var filesFromPlugins = [];
|
||||
|
@ -605,7 +605,7 @@ Config.prototype.loadPluginContractFiles = function() {
|
|||
contractsPlugins.forEach(function(plugin) {
|
||||
plugin.contractsFiles.forEach(function(file) {
|
||||
var filename = file.replace('./','');
|
||||
self.contractsFiles.push(new File({filename: filename, pluginPath: plugin.pluginPath, type: File.types.custom, path: filename, storageConfig: storageConfig, resolver: function(callback) {
|
||||
self.contractsFiles.push(new File({path: filename, pluginPath: plugin.pluginPath, type: Types.custom, storageConfig: storageConfig, resolver: function(callback) {
|
||||
callback(plugin.loadPluginFile(file));
|
||||
}}));
|
||||
});
|
||||
|
|
|
@ -212,7 +212,7 @@ class Engine {
|
|||
}
|
||||
|
||||
setupCompilerAndContractsManagerService(options) {
|
||||
this.registerModule('compiler', {plugins: this.plugins, disableOptimizations: options.disableOptimizations});
|
||||
this.registerModule('compiler', {plugins: this.plugins, isCoverage: options.isCoverage});
|
||||
this.registerModule('solidity', {ipc: this.ipc, useDashboard: this.useDashboard});
|
||||
this.registerModule('vyper');
|
||||
this.registerModule('contracts_manager', {plugins: this.plugins, compileOnceOnly: options.compileOnceOnly});
|
||||
|
|
|
@ -1,210 +0,0 @@
|
|||
const async = require('async');
|
||||
const fs = require('./fs.js');
|
||||
const path = require('path');
|
||||
const request = require('request');
|
||||
const utils = require('../utils/utils');
|
||||
|
||||
class File {
|
||||
|
||||
constructor(options) {
|
||||
this.filename = options.filename.replace(/\\/g, '/');
|
||||
this.type = options.type;
|
||||
this.path = options.path;
|
||||
this.basedir = options.basedir;
|
||||
this.resolver = options.resolver;
|
||||
this.pluginPath = options.pluginPath ? options.pluginPath : '';
|
||||
this.downloadedImports = false;
|
||||
this.importRemappings = []; // mapping downloaded imports to local file
|
||||
this.storageConfig = options.storageConfig;
|
||||
this.providerUrl = null;
|
||||
}
|
||||
|
||||
addRemappings(prefix, httpFileObj, level, callback) {
|
||||
let target = prefix;
|
||||
if (httpFileObj) {
|
||||
target = httpFileObj.filePath;
|
||||
} else if (fs.existsSync(path.join(path.dirname(this.filename), prefix))) {
|
||||
target = path.join(path.dirname(this.filename), prefix);
|
||||
} else if (fs.existsSync(path.join("node_modules", prefix))) {
|
||||
target = path.join("node_modules", prefix);
|
||||
}
|
||||
|
||||
if (target === prefix) return callback();
|
||||
|
||||
target = fs.dappPath(target);
|
||||
|
||||
const remapping = {
|
||||
prefix,
|
||||
target
|
||||
};
|
||||
|
||||
if (httpFileObj) return callback();
|
||||
|
||||
if (!this.importRemappings.some(existing => existing.prefix === remapping.prefix)) {
|
||||
this.importRemappings.push(remapping);
|
||||
}
|
||||
|
||||
fs.readFile(target, (err, importedContract) => {
|
||||
if (err) return callback(err);
|
||||
if (!importedContract) return callback(`File not found: ${target}`);
|
||||
this._parseFileForImport(importedContract.toString(), false, ++level, callback);
|
||||
});
|
||||
}
|
||||
|
||||
_parseFileForImport(content, isHttpContract, level, callback) {
|
||||
const self = this;
|
||||
if (self.filename.indexOf('.sol') < 0) {
|
||||
// Only supported in Solidity
|
||||
return callback(null, content);
|
||||
}
|
||||
const regex = /import ["']([-a-zA-Z0-9@:%_+.~#?&\/=]+)["'];/g;
|
||||
const filesToDownload = [];
|
||||
const pathWithoutFile = path.dirname(self.path);
|
||||
let newContent = content;
|
||||
let storageConfig = self.storageConfig;
|
||||
if (storageConfig && storageConfig.upload && storageConfig.upload.getUrl) {
|
||||
self.providerUrl = storageConfig.upload.getUrl;
|
||||
}
|
||||
let m, matches = [];
|
||||
while ((m = regex.exec(content))) {
|
||||
matches.push(m[1]);
|
||||
}
|
||||
async.each(matches, (match, next) => {
|
||||
const httpFileObj = utils.getExternalContractUrl(match, self.providerUrl);
|
||||
const fileObj = {
|
||||
fileRelativePath: path.join(path.dirname(self.filename), match),
|
||||
url: `${pathWithoutFile}/${match}`
|
||||
};
|
||||
|
||||
self.addRemappings(match, httpFileObj, level, (err) => {
|
||||
if (err) return next(err);
|
||||
if (httpFileObj) {
|
||||
newContent = newContent.replace(match, httpFileObj.filePath);
|
||||
|
||||
fileObj.fileRelativePath = httpFileObj.filePath;
|
||||
fileObj.url = httpFileObj.url;
|
||||
} else if (!isHttpContract) {
|
||||
// Just a normal import
|
||||
return next();
|
||||
}
|
||||
filesToDownload.push(fileObj);
|
||||
next();
|
||||
});
|
||||
}, (err) => {
|
||||
callback(err, newContent, filesToDownload);
|
||||
});
|
||||
}
|
||||
|
||||
parseFileForImport(content, isHttpContract, callback) {
|
||||
const self = this;
|
||||
if (typeof isHttpContract === 'function') {
|
||||
callback = isHttpContract;
|
||||
isHttpContract = false;
|
||||
}
|
||||
|
||||
this._parseFileForImport(content, isHttpContract, 0, (err, newContent, filesToDownload) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (self.downloadedImports) {
|
||||
// We already parsed this file
|
||||
return callback(null, newContent);
|
||||
}
|
||||
async.each(filesToDownload, ((fileObj, eachCb) => {
|
||||
self.downloadFile(fileObj.fileRelativePath, fileObj.url, (_content) => {
|
||||
eachCb();
|
||||
});
|
||||
}), (err) => {
|
||||
self.downloadedImports = true;
|
||||
callback(err, newContent);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
downloadFile(filename, url, callback) {
|
||||
const self = this;
|
||||
async.waterfall([
|
||||
function makeTheDir(next) {
|
||||
fs.mkdirp(path.dirname(filename), (err) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
next();
|
||||
});
|
||||
},
|
||||
function downloadTheFile(next) {
|
||||
let alreadyCalledBack = false;
|
||||
function doCallback(err) {
|
||||
if (alreadyCalledBack) {
|
||||
return;
|
||||
}
|
||||
alreadyCalledBack = true;
|
||||
next(err);
|
||||
}
|
||||
request(url)
|
||||
.on('response', function (response) {
|
||||
if (response.statusCode !== 200) {
|
||||
doCallback('Getting file returned code ' + response.statusCode);
|
||||
}
|
||||
})
|
||||
.on('error', doCallback)
|
||||
.pipe(fs.createWriteStream(filename))
|
||||
.on('finish', () => {
|
||||
doCallback();
|
||||
});
|
||||
},
|
||||
function readFile(next) {
|
||||
fs.readFile(filename, next);
|
||||
},
|
||||
function parseForImports(content, next) {
|
||||
self.parseFileForImport(content.toString(), true, (err) => {
|
||||
next(err, content);
|
||||
});
|
||||
}
|
||||
], (err, content) => {
|
||||
if (err) {
|
||||
console.error(__('Error while downloading the file'), url, err);
|
||||
return callback('');
|
||||
}
|
||||
callback(content.toString());
|
||||
});
|
||||
}
|
||||
|
||||
content(callback) {
|
||||
let content;
|
||||
if (this.type === File.types.embark_internal) {
|
||||
content = fs.readFileSync(fs.embarkPath(utils.joinPath('dist', this.path))).toString();
|
||||
} else if (this.type === File.types.dapp_file) {
|
||||
content = fs.readFileSync(this.path).toString();
|
||||
} else if (this.type === File.types.custom) {
|
||||
return this.resolver((theContent) => {
|
||||
this.parseFileForImport(theContent, (err, newContent) => {
|
||||
callback(newContent);
|
||||
});
|
||||
});
|
||||
} else if (this.type === File.types.http) {
|
||||
return this.downloadFile(this.filename, this.path, (content) => {
|
||||
if (!content) {
|
||||
return callback(content);
|
||||
}
|
||||
this.path = this.filename;
|
||||
this.type = File.types.dapp_file;
|
||||
callback(content);
|
||||
});
|
||||
} else {
|
||||
throw new Error("unknown file: " + this.filename);
|
||||
}
|
||||
return this.parseFileForImport(content, (err, newContent) => {
|
||||
callback(newContent);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
File.types = {
|
||||
embark_internal: 'embark_internal',
|
||||
dapp_file: 'dapp_file',
|
||||
custom: 'custom',
|
||||
http: 'http'
|
||||
};
|
||||
|
||||
module.exports = File;
|
|
@ -0,0 +1,77 @@
|
|||
import * as path from "path";
|
||||
|
||||
const fs = require("./fs.js");
|
||||
const utils = require("../utils/utils");
|
||||
|
||||
export enum Types {
|
||||
embarkInternal = "embark_internal",
|
||||
dappFile = "dapp_file",
|
||||
custom = "custom",
|
||||
http = "http",
|
||||
}
|
||||
|
||||
interface ImportRemapping {
|
||||
prefix: string;
|
||||
target: string;
|
||||
}
|
||||
|
||||
export class File {
|
||||
public type: Types;
|
||||
public externalUrl: string = "";
|
||||
public path: string;
|
||||
public basedir: string;
|
||||
public resolver: (callback: (content: string) => void) => void;
|
||||
public pluginPath: string;
|
||||
public storageConfig: any;
|
||||
public providerUrl: string;
|
||||
public importRemappings: ImportRemapping[] = [];
|
||||
|
||||
constructor(options: any) {
|
||||
this.type = options.type;
|
||||
|
||||
this.basedir = options.basedir;
|
||||
this.resolver = options.resolver;
|
||||
this.pluginPath = options.pluginPath ? options.pluginPath : "";
|
||||
this.storageConfig = options.storageConfig;
|
||||
this.providerUrl = "";
|
||||
|
||||
if (this.type === Types.http) {
|
||||
const external = utils.getExternalContractUrl(options.externalUrl, this.providerUrl);
|
||||
this.externalUrl = external.url;
|
||||
this.path = external.filePath;
|
||||
} else {
|
||||
this.path = options.path.replace(/\\/g, "/");
|
||||
}
|
||||
}
|
||||
|
||||
public get content(): Promise<string> {
|
||||
return new Promise<string>((resolve) => {
|
||||
switch (this.type) {
|
||||
case Types.embarkInternal: {
|
||||
const content = fs.readFileSync(fs.embarkPath(path.join("dist", this.path)), "utf-8");
|
||||
return resolve(content);
|
||||
}
|
||||
|
||||
case Types.dappFile: {
|
||||
const content = fs.readFileSync(this.path, "utf-8").toString();
|
||||
return resolve(content);
|
||||
}
|
||||
|
||||
case Types.custom: {
|
||||
return this.resolver((content: string) => {
|
||||
resolve(content);
|
||||
});
|
||||
}
|
||||
|
||||
case Types.http: {
|
||||
fs.ensureFileSync(this.path);
|
||||
return utils.downloadFile(this.externalUrl, this.path, () => {
|
||||
const content = fs.readFileSync(this.path, "utf-8");
|
||||
resolve(content);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
|
@ -107,10 +107,14 @@ class BlockchainConnector {
|
|||
|
||||
if (type === 'vm') {
|
||||
const sim = self._getSimulator();
|
||||
const options = Object.assign({}, self.config.contractsConfig.deployment, {gasPrice: "0x01", gasLimit: "0xfffffffffffff"});
|
||||
self.provider = sim.provider(options);
|
||||
const options = Object.assign({}, self.config.contractsConfig.deployment, {
|
||||
gasPrice: "0x01",
|
||||
gasLimit: "0xfffffffffffff"
|
||||
});
|
||||
|
||||
if (coverage) {
|
||||
options.allowUnlimitedContractSize = true;
|
||||
self.provider = sim.provider(options);
|
||||
// Here we patch the sendAsync method on the provider. The goal behind this is to force pure/constant/view calls to become
|
||||
// transactions, so that we can pull in execution traces and account for those executions in code coverage.
|
||||
//
|
||||
|
@ -143,6 +147,8 @@ class BlockchainConnector {
|
|||
});
|
||||
});
|
||||
};
|
||||
} else {
|
||||
self.provider = sim.provider(options);
|
||||
}
|
||||
|
||||
self.web3.setProvider(self.provider);
|
||||
|
|
|
@ -9,7 +9,7 @@ This module abstracts the compiler interface. It exposes a plugin api to registe
|
|||
arguments:
|
||||
|
||||
* `contractFiles` - <array of embark File Objects>
|
||||
* `options` - <object> config object `{disableOptimizations: boolean (default: false)}`
|
||||
* `options` - <object> config object `{isCoverage: boolean (default: false)}`
|
||||
|
||||
response:
|
||||
|
||||
|
@ -33,8 +33,8 @@ response:
|
|||
example:
|
||||
|
||||
```
|
||||
const File = require('src/lib/core/file.js');
|
||||
const contractFiles = [(new File({filename: "simplestorage.sol", type: "custom", path: "simplestorage.sol", resolver: (cb) => { return cb(".. contract code...") }}))];
|
||||
import { File } from 'src/lib/core/file.js';
|
||||
const contractFiles = [(new File({path: "simplestorage.sol", type: "custom", resolver: (cb) => { return cb(".. contract code...") }}))];
|
||||
|
||||
embark.events.request("compiler:contracts", contractFiles, {}, (err, compiledObject) => {
|
||||
})
|
||||
|
|
|
@ -7,17 +7,17 @@ import { CompilerPluginObject, Plugins } from "../../../typings/plugins";
|
|||
class Compiler {
|
||||
private logger: any;
|
||||
private plugins: Plugins;
|
||||
private disableOptimizations: any;
|
||||
private isCoverage: boolean;
|
||||
|
||||
constructor(embark: Embark, options: any) {
|
||||
this.logger = embark.logger;
|
||||
this.plugins = options.plugins;
|
||||
this.disableOptimizations = options.disableOptimizations;
|
||||
this.isCoverage = options.isCoverage;
|
||||
|
||||
embark.events.setCommandHandler("compiler:contracts", this.compile_contracts.bind(this));
|
||||
}
|
||||
|
||||
private compile_contracts(contractFiles: any[], options: any, cb: any) {
|
||||
private compile_contracts(contractFiles: any[], cb: any) {
|
||||
if (contractFiles.length === 0) {
|
||||
return cb(null, {});
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ class Compiler {
|
|||
const compiledObject: {[index: string]: any} = {};
|
||||
|
||||
const compilerOptions = {
|
||||
disableOptimizations: this.disableOptimizations || options.disableOptimizations,
|
||||
isCoverage: this.isCoverage,
|
||||
};
|
||||
|
||||
async.eachObject(this.getAvailableCompilers(),
|
||||
|
@ -60,7 +60,7 @@ class Compiler {
|
|||
},
|
||||
(err: any) => {
|
||||
contractFiles.filter((f: any) => !f.compiled).forEach((file: any) => {
|
||||
this.logger.warn(__("%s doesn't have a compatible contract compiler. Maybe a plugin exists for it.", file.filename));
|
||||
this.logger.warn(__("%s doesn't have a compatible contract compiler. Maybe a plugin exists for it.", file.path));
|
||||
});
|
||||
|
||||
cb(err, compiledObject);
|
||||
|
@ -81,7 +81,7 @@ class Compiler {
|
|||
|
||||
private filesMatchingExtension(extension: string) {
|
||||
return (file: any) => {
|
||||
const fileMatch = file.filename.match(/\.[0-9a-z]+$/);
|
||||
const fileMatch = file.path.match(/\.[0-9a-z]+$/);
|
||||
if (fileMatch && (fileMatch[0] === extension)) {
|
||||
file.compiled = true;
|
||||
return true;
|
||||
|
|
|
@ -19,7 +19,6 @@ class ContractsManager {
|
|||
this.deployOnlyOnConfig = false;
|
||||
this.compileError = false;
|
||||
this.compileOnceOnly = options.compileOnceOnly;
|
||||
this.disableOptimizations = options.disableOptimizations;
|
||||
|
||||
self.events.setCommandHandler('contracts:list', (cb) => {
|
||||
cb(self.compileError, self.listContracts());
|
||||
|
@ -270,8 +269,6 @@ class ContractsManager {
|
|||
let self = this;
|
||||
self.contracts = {};
|
||||
|
||||
let compilerOptions = {disableOptimizations: this.disableOptimizations};
|
||||
|
||||
if(resetContracts) self.contracts = {};
|
||||
async.waterfall([
|
||||
function beforeBuild(callback) {
|
||||
|
@ -308,7 +305,7 @@ class ContractsManager {
|
|||
if (self.compileOnceOnly && hasCompiledContracts && allContractsCompiled) {
|
||||
return callback();
|
||||
}
|
||||
self.events.request("compiler:contracts", self.contractsFiles, compilerOptions, function (err, compiledObject) {
|
||||
self.events.request("compiler:contracts", self.contractsFiles, function (err, compiledObject) {
|
||||
self.compiledContracts = compiledObject;
|
||||
callback(err);
|
||||
});
|
||||
|
|
|
@ -7,9 +7,9 @@ import { Injector } from "./injector";
|
|||
import { Instrumenter } from "./instrumenter";
|
||||
import { InstrumentWalker } from "./instrumentWalker";
|
||||
import { coverageContractsPath } from "./path";
|
||||
import { Suppressor } from "./suppressor";
|
||||
import { BranchType, Coverage } from "./types";
|
||||
|
||||
const File = require("../../core/file");
|
||||
const fs = require("../../core/fs");
|
||||
|
||||
const STATEMENT_EVENT = "__StatementCoverage";
|
||||
|
@ -24,10 +24,10 @@ function nextId() {
|
|||
export class ContractEnhanced {
|
||||
public id: number;
|
||||
public coverage: Coverage;
|
||||
public coverageFilepath: string;
|
||||
public originalSource: string;
|
||||
public source: string;
|
||||
private ast: parser.ASTNode;
|
||||
private coverageFilepath: string;
|
||||
private functionsBodyLocation: {[id: number]: Location} = {};
|
||||
|
||||
constructor(public filepath: string, public solcVersion: string) {
|
||||
|
@ -52,7 +52,6 @@ export class ContractEnhanced {
|
|||
}
|
||||
|
||||
public instrument() {
|
||||
new Suppressor(this).process();
|
||||
const instrumenter = new Instrumenter(this);
|
||||
const instrumentWalker = new InstrumentWalker(instrumenter);
|
||||
instrumentWalker.walk(this.ast);
|
||||
|
|
|
@ -4,6 +4,7 @@ import Web3Contract from "web3/eth/contract";
|
|||
|
||||
import { Contract } from "../../../typings/contract";
|
||||
import { Embark } from "../../../typings/embark";
|
||||
import { removePureView } from "../../utils/solidity/code";
|
||||
import { ContractEnhanced } from "./contractEnhanced";
|
||||
import { coverageContractsPath } from "./path";
|
||||
import { Coverage as ICoverage } from "./types";
|
||||
|
@ -23,8 +24,11 @@ export default class Coverage {
|
|||
|
||||
this.contracts = this.getContracts();
|
||||
|
||||
this.instrumentContracts();
|
||||
this.swapContracts();
|
||||
this.embark.events.setCommandHandler("coverage:prepareContracts", async (done) => {
|
||||
await this.prepareContracts();
|
||||
this.swapContracts();
|
||||
done();
|
||||
});
|
||||
|
||||
this.embark.events.on("tests:ready", this.pushDeployedContracts.bind(this));
|
||||
this.embark.events.on("tests:finished", this.produceCoverageReport.bind(this));
|
||||
|
@ -41,15 +45,16 @@ export default class Coverage {
|
|||
.map((filepath) => new ContractEnhanced(filepath, solcVersion));
|
||||
}
|
||||
|
||||
private instrumentContracts() {
|
||||
this.contracts.forEach((contract) => contract.instrument());
|
||||
private async prepareContracts() {
|
||||
const promises = this.contracts.map(async (contract) => {
|
||||
contract.instrument();
|
||||
contract.save();
|
||||
});
|
||||
await Promise.all(promises);
|
||||
removePureView(coverageContractsPath());
|
||||
}
|
||||
|
||||
private swapContracts() {
|
||||
this.contracts.forEach((contract) => {
|
||||
contract.save();
|
||||
});
|
||||
|
||||
this.embark.config.embarkConfig.contracts = this.contractsDir.reduce((acc: string[], value: string) => (
|
||||
acc.concat(path.join(coverageContractsPath(), value))
|
||||
), []);
|
||||
|
|
|
@ -71,7 +71,7 @@ export class Instrumenter {
|
|||
this.contract.addBranch(node.loc.start.line, "if", locations);
|
||||
}
|
||||
|
||||
private addInjectionPoints(type: InjectionPointType, id: number, location: Location, locationIdx?: number) {
|
||||
this.injectionPoints.push({type, id, location, locationIdx});
|
||||
private addInjectionPoints(type: InjectionPointType, id: number, location: Location) {
|
||||
this.injectionPoints.push({type, id, location});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,4 +2,4 @@ import * as path from "path";
|
|||
|
||||
const fs = require("../../core/fs");
|
||||
|
||||
export const coverageContractsPath = () => path.join(fs.dappPath(), "coverage", "instrumentedContracts");
|
||||
export const coverageContractsPath = () => path.join("coverage", "instrumentedContracts");
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
import { ContractEnhanced } from "./contractEnhanced";
|
||||
|
||||
export class Suppressor {
|
||||
|
||||
constructor(private contract: ContractEnhanced) {
|
||||
}
|
||||
|
||||
public process() {
|
||||
this.contract.source = this.contract.source.replace(/pure/g, "");
|
||||
this.contract.source = this.contract.source.replace(/view/g, "");
|
||||
}
|
||||
}
|
|
@ -7,7 +7,6 @@ export interface InjectionPoint {
|
|||
type: InjectionPointType;
|
||||
id: number;
|
||||
location: Location;
|
||||
locationIdx?: number;
|
||||
}
|
||||
|
||||
export interface Coverage {
|
||||
|
|
|
@ -287,11 +287,10 @@ class Pipeline {
|
|||
}
|
||||
async.map(
|
||||
files,
|
||||
function (file, fileCb) {
|
||||
self.logger.trace("reading " + file.filename);
|
||||
return file.content(fileContent => {
|
||||
self.runPlugins(file, fileContent, fileCb);
|
||||
});
|
||||
async function (file, fileCb) {
|
||||
self.logger.trace("reading " + file.path);
|
||||
const fileContent = await file.content;
|
||||
self.runPlugins(file, fileContent, fileCb);
|
||||
},
|
||||
function (err, contentFiles) {
|
||||
if (err) {
|
||||
|
@ -310,7 +309,7 @@ class Pipeline {
|
|||
}
|
||||
|
||||
async.each(contentFiles, function (file, eachCb) {
|
||||
let filename = file.filename.replace(file.basedir + '/', '');
|
||||
let filename = file.path.replace(file.basedir + '/', '');
|
||||
self.logger.info(`${'Pipeline:'.cyan} writing file ` + (utils.joinPath(self.buildDir, targetDir, filename)).bold.dim);
|
||||
|
||||
fs.copy(file.path, utils.joinPath(self.buildDir, targetDir, filename), {overwrite: true}, eachCb);
|
||||
|
@ -387,21 +386,21 @@ class Pipeline {
|
|||
runPlugins(file, fileContent, fileCb) {
|
||||
const self = this;
|
||||
if (self.pipelinePlugins.length <= 0) {
|
||||
return fileCb(null, {content: fileContent, filename: file.filename, path: file.path, basedir: file.basedir, modified: true});
|
||||
return fileCb(null, {content: fileContent, path: file.path, basedir: file.basedir, modified: true});
|
||||
}
|
||||
async.eachSeries(self.pipelinePlugins, (plugin, pluginCB) => {
|
||||
if (file.options && file.options.skipPipeline) {
|
||||
return pluginCB();
|
||||
}
|
||||
|
||||
fileContent = plugin.runPipeline({targetFile: file.filename, source: fileContent});
|
||||
fileContent = plugin.runPipeline({targetFile: file.path, source: fileContent});
|
||||
file.modified = true;
|
||||
pluginCB();
|
||||
}, err => {
|
||||
if (err) {
|
||||
self.logger.error(err.message);
|
||||
}
|
||||
return fileCb(null, {content: fileContent, filename: file.filename, path: file.path, basedir: file.basedir, modified: true});
|
||||
return fileCb(null, {content: fileContent, path: file.path, basedir: file.basedir, modified: true});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
let async = require('../../utils/async_extend.js');
|
||||
let SolcW = require('./solcW.js');
|
||||
const remapImports = require('../../utils/solidity/remapImports');
|
||||
|
||||
class Solidity {
|
||||
|
||||
|
@ -21,7 +22,7 @@ class Solidity {
|
|||
'post',
|
||||
'/embark-api/contract/compile',
|
||||
(req, res) => {
|
||||
if(typeof req.body.code !== 'string'){
|
||||
if (typeof req.body.code !== 'string') {
|
||||
return res.send({error: 'Body parameter \'code\' must be a string'});
|
||||
}
|
||||
const input = {[req.body.name]: {content: req.body.code.replace(/\r\n/g, '\n')}};
|
||||
|
@ -39,7 +40,7 @@ class Solidity {
|
|||
self.solcW.compile(jsonObj, function (err, output) {
|
||||
self.events.emit('contracts:compile:solc', jsonObj);
|
||||
|
||||
if(err){
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (output.errors && returnAllErrors) {
|
||||
|
@ -47,7 +48,7 @@ class Solidity {
|
|||
}
|
||||
|
||||
if (output.errors) {
|
||||
for (let i=0; i<output.errors.length; i++) {
|
||||
for (let i = 0; i < output.errors.length; i++) {
|
||||
if (output.errors[i].type === 'Warning') {
|
||||
self.logger.warn(output.errors[i].formattedMessage);
|
||||
}
|
||||
|
@ -70,10 +71,10 @@ class Solidity {
|
|||
if (self.solcAlreadyLoaded) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
let storageConfig = self.storageConfig;
|
||||
if (storageConfig && storageConfig.upload && storageConfig.upload.getUrl) {
|
||||
self.providerUrl = storageConfig.upload.getUrl;
|
||||
self.providerUrl = storageConfig.upload.getUrl;
|
||||
}
|
||||
self.solcW = new SolcW(self.embark, {logger: self.logger, events: self.events, ipc: self.ipc, useDashboard: self.useDashboard, providerUrl: self.providerUrl});
|
||||
|
||||
|
@ -90,7 +91,7 @@ class Solidity {
|
|||
sources: codeInputs,
|
||||
settings: {
|
||||
optimizer: {
|
||||
enabled: (!options.disableOptimizations && self.options.optimize),
|
||||
enabled: (!options.isCoverage && self.options.optimize),
|
||||
runs: self.options["optimize-runs"]
|
||||
},
|
||||
outputSelection: {
|
||||
|
@ -168,23 +169,23 @@ class Solidity {
|
|||
function prepareInput(callback) {
|
||||
async.each(contractFiles,
|
||||
function (file, fileCb) {
|
||||
let filename = file.filename;
|
||||
let filename = file.path;
|
||||
|
||||
for (let directory of self.embark.config.contractDirectories) {
|
||||
let match = new RegExp("^" + directory);
|
||||
filename = filename.replace(match, '');
|
||||
}
|
||||
|
||||
originalFilepath[filename] = file.filename;
|
||||
|
||||
file.content(function (fileContent) {
|
||||
if (!fileContent) {
|
||||
originalFilepath[filename] = file.path;
|
||||
|
||||
remapImports.prepareForCompilation(file, options.isCoverage)
|
||||
.then(fileContent => {
|
||||
input[filename] = {content: fileContent.replace(/\r\n/g, '\n')};
|
||||
fileCb();
|
||||
}).catch((_e) => {
|
||||
self.logger.error(__('Error while loading the content of ') + filename);
|
||||
return fileCb();
|
||||
}
|
||||
input[filename] = {content: fileContent.replace(/\r\n/g, '\n')};
|
||||
fileCb();
|
||||
});
|
||||
fileCb();
|
||||
});
|
||||
},
|
||||
function (err) {
|
||||
callback(err);
|
||||
|
|
|
@ -192,6 +192,13 @@ class Test {
|
|||
function checkDeploymentOpts(next) {
|
||||
self.checkDeploymentOptions(options, next);
|
||||
},
|
||||
function prepareContracts(next) {
|
||||
if (!self.firstDeployment || !self.options.coverage) {
|
||||
return next();
|
||||
}
|
||||
console.info('Preparing contracts for coverage'.cyan);
|
||||
self.events.request("coverage:prepareContracts", next);
|
||||
},
|
||||
function compileContracts(next) {
|
||||
if (!self.firstDeployment) {
|
||||
return next();
|
||||
|
@ -262,6 +269,13 @@ class Test {
|
|||
return instance;
|
||||
}
|
||||
|
||||
track(jsonInterface, address) {
|
||||
this.events.request('blockchain:get', (web3) => {
|
||||
const instance = new web3.eth.Contract(jsonInterface, address);
|
||||
this.events.emit("tests:manualDeploy", instance);
|
||||
});
|
||||
}
|
||||
|
||||
async _deploy(config, callback) {
|
||||
const self = this;
|
||||
async.waterfall([
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
import * as globule from "globule";
|
||||
import * as path from "path";
|
||||
|
||||
const fs = require("../../core/fs");
|
||||
|
||||
export const removePureView = (dir: string) => {
|
||||
globule.find(path.join(dir, "**/*.sol")).forEach((filepath) => {
|
||||
let source = fs.readFileSync(filepath, "utf-8");
|
||||
source = replacePureView(source);
|
||||
fs.writeFileSync(filepath, source);
|
||||
});
|
||||
};
|
||||
|
||||
export const replacePureView = (source: string) => {
|
||||
return source.replace(/pure/g, "").replace(/view/g, "");
|
||||
};
|
|
@ -0,0 +1,144 @@
|
|||
import * as path from "path";
|
||||
import { File, Types } from "../../core/file";
|
||||
import { removePureView, replacePureView } from "./code";
|
||||
|
||||
const { urlJoin, groupBy } = require("../../utils/utils");
|
||||
const fs = require("../../core/fs");
|
||||
|
||||
const FIND_IMPORTS_REGEX = /^import[\s]*(['"])(.*)\1;/gm;
|
||||
const FIND_FILE_REGEX = /import[\s]*(['"])(.*)\1;/;
|
||||
|
||||
interface RemapImport {
|
||||
path: string;
|
||||
searchValue: string;
|
||||
replaceValue: string;
|
||||
}
|
||||
|
||||
const getImports = (source: string) => {
|
||||
const importStatements = source.match(FIND_IMPORTS_REGEX) || [];
|
||||
|
||||
return importStatements.map((importStatement) => {
|
||||
const fileStatement = FIND_FILE_REGEX.exec(importStatement) || [];
|
||||
if (fileStatement.length < 3) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return fileStatement[2];
|
||||
}).filter((fileImport) => fileImport.length);
|
||||
};
|
||||
|
||||
const prepareInitialFile = async (file: File) => {
|
||||
if (file.type === Types.http) {
|
||||
return await file.content;
|
||||
}
|
||||
|
||||
const destination = path.join(".embark", file.path);
|
||||
if (file.type === Types.dappFile) {
|
||||
fs.copySync(file.path, destination);
|
||||
}
|
||||
|
||||
if (file.type === Types.custom) {
|
||||
fs.writeFileSync(destination);
|
||||
}
|
||||
|
||||
file.path = destination;
|
||||
};
|
||||
|
||||
const buildNewFile = (file: File, importPath: string) => {
|
||||
let from: string;
|
||||
let to: string;
|
||||
|
||||
// started with HTTP file that then further imports local paths in
|
||||
// it's own repo/directory
|
||||
if (file.type === Types.http && !isHttp(importPath)) {
|
||||
const externalUrl = urlJoin(file.externalUrl, importPath);
|
||||
return new File({ externalUrl, type: Types.http });
|
||||
}
|
||||
|
||||
// http import
|
||||
if (isHttp(importPath)) {
|
||||
return new File({ externalUrl: importPath, type: Types.http });
|
||||
}
|
||||
|
||||
// imported from node_modules, ie import "@aragon/os/contracts/acl/ACL.sol"
|
||||
if (isNodeModule(importPath)) {
|
||||
from = path.join("node_modules", importPath);
|
||||
to = path.join(".embark", from);
|
||||
fs.copySync(from, to);
|
||||
return new File({ path: to, type: Types.dappFile });
|
||||
}
|
||||
|
||||
// started with node_modules then further imports local paths in it's own repo/directory
|
||||
if (isEmbarkNodeModule(file.path)) {
|
||||
from = path.join(path.dirname(file.path.replace(".embark", ".")), importPath);
|
||||
to = path.join(path.dirname(file.path), importPath);
|
||||
fs.copySync(from, to);
|
||||
return new File({ path: to, type: Types.dappFile });
|
||||
}
|
||||
|
||||
// local import, ie import "../path/to/contract" or "./path/to/contract"
|
||||
from = path.join(path.dirname(file.path.replace(".embark", ".")), importPath);
|
||||
to = path.join(".embark", from);
|
||||
|
||||
fs.copySync(from, to);
|
||||
return new File({ path: to, type: Types.dappFile });
|
||||
};
|
||||
|
||||
const rescursivelyFindRemapImports = async (file: File) => {
|
||||
let remapImports: RemapImport[] = [];
|
||||
const content = await file.content;
|
||||
const imports = getImports(content);
|
||||
|
||||
// if no imports, break recursion
|
||||
if (!imports.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const importPath of imports) {
|
||||
const newFile = buildNewFile(file, importPath);
|
||||
file.importRemappings.push({prefix: importPath, target: newFile.path});
|
||||
remapImports.push({path: file.path, searchValue: importPath, replaceValue: newFile.path});
|
||||
remapImports = remapImports.concat(
|
||||
await rescursivelyFindRemapImports(newFile),
|
||||
);
|
||||
}
|
||||
|
||||
return remapImports;
|
||||
};
|
||||
|
||||
const isEmbarkNodeModule = (input: string) => {
|
||||
return input.startsWith(".embark/node_modules");
|
||||
};
|
||||
|
||||
const isNodeModule = (input: string) => {
|
||||
return !input.startsWith("..") && fs.existsSync(path.join("./node_modules/", input));
|
||||
};
|
||||
|
||||
const isHttp = (input: string) => {
|
||||
return input.startsWith("https://") || input.startsWith("http://");
|
||||
};
|
||||
|
||||
const replaceImports = (remapImports: RemapImport[]) => {
|
||||
const byPath: {[path: string]: [{searchValue: string, replaceValue: string}]} = groupBy(remapImports, "path");
|
||||
Object.keys(byPath).forEach((p) => {
|
||||
let source = fs.readFileSync(p, "utf-8");
|
||||
byPath[p].forEach(({searchValue, replaceValue}) => {
|
||||
source = source.replace(`import "${searchValue}";`, `import "${replaceValue}";`);
|
||||
});
|
||||
fs.writeFileSync(p, source);
|
||||
});
|
||||
};
|
||||
|
||||
export const prepareForCompilation = async (file: File, isCoverage = false) => {
|
||||
await prepareInitialFile(file);
|
||||
const remapImports = await rescursivelyFindRemapImports(file);
|
||||
replaceImports(remapImports);
|
||||
|
||||
const content = await file.content;
|
||||
if (!isCoverage) {
|
||||
return content;
|
||||
}
|
||||
|
||||
removePureView(path.join(".embark"));
|
||||
return replacePureView(content);
|
||||
};
|
|
@ -624,6 +624,22 @@ function isEs6Module(module) {
|
|||
return typeof module === 'object' && typeof module.default === 'function' && module.__esModule;
|
||||
}
|
||||
|
||||
function urlJoin(url, path) {
|
||||
let urlChunks = url.split('/');
|
||||
let levels = path.split('../');
|
||||
|
||||
// remove relative path parts from end of url
|
||||
urlChunks = urlChunks.slice(0, urlChunks.length - levels.length);
|
||||
|
||||
// remove relative path parts from start of match
|
||||
levels.splice(0, levels.length - 1);
|
||||
|
||||
// add on our match so we can join later
|
||||
urlChunks = urlChunks.concat(levels.join().replace('./', ''));
|
||||
|
||||
return urlChunks.join('/');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
joinPath,
|
||||
dirname,
|
||||
|
@ -674,5 +690,6 @@ module.exports = {
|
|||
jsonFunctionReplacer,
|
||||
getWindowSize,
|
||||
toposort,
|
||||
isEs6Module
|
||||
isEs6Module,
|
||||
urlJoin
|
||||
};
|
||||
|
|
|
@ -189,40 +189,37 @@ describe('embark.Config', function () {
|
|||
];
|
||||
const expected = [
|
||||
{
|
||||
"filename": ".embark/contracts/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol",
|
||||
"type": "http",
|
||||
"path": "https://raw.githubusercontent.com/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol",
|
||||
"externalUrl": "https://raw.githubusercontent.com/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol",
|
||||
"path": ".embark/contracts/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol",
|
||||
"pluginPath": '',
|
||||
"importRemappings": [],
|
||||
"basedir": "",
|
||||
"importRemappings": [],
|
||||
"resolver": undefined,
|
||||
"storageConfig": undefined,
|
||||
"providerUrl": undefined,
|
||||
"downloadedImports": false
|
||||
"providerUrl": ""
|
||||
},
|
||||
{
|
||||
"filename": ".embark/contracts/status-im/contracts/master/contracts/identity/ERC725.sol",
|
||||
"type": "http",
|
||||
"path": "https://raw.githubusercontent.com/status-im/contracts/master/contracts/identity/ERC725.sol",
|
||||
"externalUrl": "https://raw.githubusercontent.com/status-im/contracts/master/contracts/identity/ERC725.sol",
|
||||
"path": ".embark/contracts/status-im/contracts/master/contracts/identity/ERC725.sol",
|
||||
"pluginPath": '',
|
||||
"importRemappings": [],
|
||||
"basedir": "",
|
||||
"importRemappings": [],
|
||||
"resolver": undefined,
|
||||
"storageConfig": undefined,
|
||||
"providerUrl": undefined,
|
||||
"downloadedImports": false
|
||||
"providerUrl": ""
|
||||
},
|
||||
{
|
||||
"filename": ".embark/contracts/bzz:/1ffe993abc835f480f688d07ad75ad1dbdbd1ddb368a08b7ed4d3e400771dd63",
|
||||
"externalUrl": "https://swarm-gateways.net/bzz:/1ffe993abc835f480f688d07ad75ad1dbdbd1ddb368a08b7ed4d3e400771dd63",
|
||||
"path": ".embark/contracts/bzz:/1ffe993abc835f480f688d07ad75ad1dbdbd1ddb368a08b7ed4d3e400771dd63",
|
||||
"type": "http",
|
||||
"path": "https://swarm-gateways.net/bzz:/1ffe993abc835f480f688d07ad75ad1dbdbd1ddb368a08b7ed4d3e400771dd63",
|
||||
"pluginPath": '',
|
||||
"importRemappings": [],
|
||||
"basedir": "",
|
||||
"importRemappings": [],
|
||||
"resolver": undefined,
|
||||
"storageConfig": undefined,
|
||||
"providerUrl": undefined,
|
||||
"downloadedImports": false
|
||||
"providerUrl": ""
|
||||
}
|
||||
];
|
||||
config.loadExternalContractsFiles();
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
/*globals describe, it*/
|
||||
import { File, Types } from "../lib/core/file";
|
||||
|
||||
let ContractsManager = require('../lib/modules/contracts_manager/index.js');
|
||||
let Compiler = require('../lib/modules/compiler/');
|
||||
let Logger = require('../lib/core/logger.js');
|
||||
let File = require('../lib/core/file.js');
|
||||
let TestLogger = require('../lib/utils/test_logger');
|
||||
let Events = require('../lib/core/events');
|
||||
let Ipc = require('../lib/core/ipc.js');
|
||||
|
@ -12,7 +13,7 @@ let assert = require('assert');
|
|||
let Plugins = require('../lib/core/plugins.js');
|
||||
|
||||
let readFile = function(file) {
|
||||
return new File({filename: file, type: File.types.dapp_file, path: file});
|
||||
return new File({filename: file, type: Types.dappFile, path: file});
|
||||
};
|
||||
|
||||
const currentSolcVersion = require('../../package.json').dependencies.solc;
|
||||
|
|
111
src/test/file.js
111
src/test/file.js
|
@ -1,111 +0,0 @@
|
|||
/*globals describe, it*/
|
||||
const File = require('../lib/core/file');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const assert = require('assert');
|
||||
const sinon = require('sinon');
|
||||
|
||||
describe('embark.File', function () {
|
||||
describe('parseFileForImport', () => {
|
||||
it('should find all the imports', function (done) {
|
||||
const contract = fs.readFileSync('./dist/test/contracts/contract_with_import.sol').toString();
|
||||
const file = new File({filename: '.embark/contracts/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol',
|
||||
path: 'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/test_app/app/contracts/simple_storage.sol'});
|
||||
const downloadFileStub = sinon.stub(file, 'downloadFile')
|
||||
.callsFake((path, url, cb) => {
|
||||
cb();
|
||||
});
|
||||
|
||||
file.parseFileForImport(contract, true, () => {
|
||||
assert.strictEqual(downloadFileStub.callCount, 1);
|
||||
assert.strictEqual(downloadFileStub.firstCall.args[0],
|
||||
path.normalize('.embark/contracts/embark-framework/embark/master/test_app/app/contracts/ownable.sol'));
|
||||
assert.strictEqual(downloadFileStub.firstCall.args[1],
|
||||
'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/test_app/app/contracts/./ownable.sol');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should find and add remappings for all recursive imports', function (done) {
|
||||
const contract = fs.readFileSync('./dist/test/contracts/recursive_test_0.sol').toString();
|
||||
const file = new File({filename: './dist/test/contracts/recursive_test_0.sol',
|
||||
path: path.join(__dirname, './contracts/recursive_test_0.sol')});
|
||||
|
||||
file.parseFileForImport(contract, () => {
|
||||
assert.deepEqual(file.importRemappings[0], {
|
||||
prefix: "./recursive_test_1.sol",
|
||||
target: path.join(__dirname, "./contracts/recursive_test_1.sol")
|
||||
});
|
||||
assert.deepEqual(file.importRemappings[1], {
|
||||
prefix: "./recursive_test_2.sol",
|
||||
target: path.join(__dirname, "./contracts/recursive_test_2.sol")
|
||||
});
|
||||
assert.deepEqual(file.importRemappings[2], {
|
||||
prefix: "embark-test-contract-0/recursive_test_3.sol",
|
||||
target: path.resolve(path.join("node_modules", "./embark-test-contract-0/recursive_test_3.sol"))
|
||||
});
|
||||
assert.deepEqual(file.importRemappings[3], {
|
||||
prefix: "embark-test-contract-1/recursive_test_4.sol",
|
||||
target: path.resolve(path.join("node_modules", "./embark-test-contract-1/recursive_test_4.sol"))
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should find all the imports but not call download because not a http contract', function (done) {
|
||||
const contract = fs.readFileSync('./dist/test/contracts/contract_with_import.sol').toString();
|
||||
const file = new File({filename: '.embark/contracts/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol',
|
||||
path: 'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/test_app/app/contracts/simple_storage.sol'});
|
||||
const downloadFileStub = sinon.stub(file, 'downloadFile')
|
||||
.callsFake((path, url, cb) => {
|
||||
cb();
|
||||
});
|
||||
|
||||
file.parseFileForImport(contract, () => {
|
||||
assert.strictEqual(downloadFileStub.callCount, 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should find all the imports and call downlaod because it is an http import', function (done) {
|
||||
const contract = fs.readFileSync('./dist/test/contracts/contract_with_http_import.sol').toString();
|
||||
const file = new File({filename: '.embark/contracts/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol',
|
||||
path: 'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/test_app/app/contracts/simple_storage.sol'});
|
||||
const downloadFileStub = sinon.stub(file, 'downloadFile')
|
||||
.callsFake((path, url, cb) => {
|
||||
cb();
|
||||
});
|
||||
|
||||
file.parseFileForImport(contract, () => {
|
||||
assert.strictEqual(downloadFileStub.callCount, 1);
|
||||
assert.strictEqual(downloadFileStub.firstCall.args[0],
|
||||
'.embark/contracts/embark-framework/embark/master/test_apps/contracts_app/contracts/contract_args.sol');
|
||||
assert.strictEqual(downloadFileStub.firstCall.args[1],
|
||||
'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/contracts_app/contracts/contract_args.sol');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should find all the imports but only once if called twice', function (done) {
|
||||
const contract = fs.readFileSync('./dist/test/contracts/contract_with_http_import.sol').toString();
|
||||
const file = new File({filename: '.embark/contracts/embark-framework/embark/master/test_app/app/contracts/simple_storage.sol',
|
||||
path: 'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/test_app/app/contracts/simple_storage.sol'});
|
||||
const downloadFileStub = sinon.stub(file, 'downloadFile')
|
||||
.callsFake((path, url, cb) => {
|
||||
cb();
|
||||
});
|
||||
|
||||
file.parseFileForImport(contract, () => {
|
||||
// Parse again
|
||||
file.parseFileForImport(contract, () => {
|
||||
assert.strictEqual(downloadFileStub.callCount, 1);
|
||||
assert.strictEqual(downloadFileStub.firstCall.args[0],
|
||||
'.embark/contracts/embark-framework/embark/master/test_apps/contracts_app/contracts/contract_args.sol');
|
||||
assert.strictEqual(downloadFileStub.firstCall.args[1],
|
||||
'https://raw.githubusercontent.com/embark-framework/embark/master/test_apps/contracts_app/contracts/contract_args.sol');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,17 +1,18 @@
|
|||
/*globals describe, it*/
|
||||
import { File, Types } from "../../../lib/core/file";
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
// TODO: need to rethink i18n and how that is required in each module
|
||||
require('../../../lib/core/i18n/i18n');
|
||||
|
||||
const Compiler = require('../../../lib/modules/compiler');
|
||||
const File = require('../../../lib/core/file.js');
|
||||
const Plugins = require('../../../lib/core/plugins.js');
|
||||
const TestLogger = require('../../../lib/utils/test_logger');
|
||||
const Events = require('../../../lib/core/events');
|
||||
|
||||
const readFile = function(file) {
|
||||
return new File({filename: file, type: File.types.dapp_file, path: file});
|
||||
return new File({filename: file, type: Types.dappFile, path: file});
|
||||
};
|
||||
|
||||
const currentSolcVersion = require('../../../../package.json').dependencies.solc;
|
||||
|
@ -67,7 +68,7 @@ describe('embark.Compiler', function() {
|
|||
readFile('dist/test/contracts/simple_storage.sol'),
|
||||
readFile('dist/test/contracts/token.sol'),
|
||||
readFile('dist/test/contracts/erc20.vy')
|
||||
], {}, (err, compiledObject) => {
|
||||
], (err, compiledObject) => {
|
||||
assert.deepEqual(compiledObject, { contractA: 'solResult', contractB: 'vyResult' })
|
||||
done();
|
||||
})
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
/*globals describe, it*/
|
||||
import { File, Types } from "../../../lib/core/file.js";
|
||||
|
||||
let SolidityCompiler = require('../../../lib/modules/solidity');
|
||||
let TestLogger = require('../../../lib/utils/test_logger');
|
||||
let File = require('../../../lib/core/file.js');
|
||||
let Ipc = require('../../../lib/core/ipc.js');
|
||||
let assert = require('assert');
|
||||
|
||||
let readFile = function(file) {
|
||||
return new File({filename: file, type: File.types.dapp_file, path: file});
|
||||
return new File({filename: file, type: Types.dappFile, path: file});
|
||||
};
|
||||
|
||||
let ipcObject = new Ipc({
|
||||
|
|
|
@ -14,5 +14,4 @@ contract SimpleStorage {
|
|||
function get() public view returns (uint retVal) {
|
||||
return storedData;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
pragma solidity ^0.4.17;
|
||||
|
||||
import "another_folder/another_test.sol";
|
||||
import "../another_folder/another_test.sol";
|
||||
import "zeppelin-solidity/contracts/ownership/Ownable.sol";
|
||||
|
||||
contract SimpleStorageTest is Ownable {
|
||||
|
|
|
@ -3,7 +3,6 @@ const fs = require('fs-extra');
|
|||
const assert = require('assert');
|
||||
|
||||
describe('http contracts', () => {
|
||||
|
||||
it('should have downloaded the file in .embark/contracts', (done) => {
|
||||
const contractPath = '.embark/contracts/status-im/contracts/151-embark31/contracts/token/StandardToken.sol';
|
||||
fs.access(contractPath, (err) => {
|
||||
|
|
27
yarn.lock
27
yarn.lock
|
@ -846,6 +846,11 @@
|
|||
"@types/connect" "*"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/caseless@*":
|
||||
version "0.12.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.1.tgz#9794c69c8385d0192acc471a540d1f8e0d16218a"
|
||||
integrity sha512-FhlMa34NHp9K5MY1Uz8yb+ZvuX0pnvn3jScRSNAb75KHGB8d3rEU6hqMs3Z2vjuytcMfRg6c5CHMc3wtYyD2/A==
|
||||
|
||||
"@types/connect@*":
|
||||
version "3.4.32"
|
||||
resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.32.tgz#aa0e9616b9435ccad02bc52b5b454ffc2c70ba28"
|
||||
|
@ -904,6 +909,13 @@
|
|||
"@types/express-serve-static-core" "*"
|
||||
"@types/serve-static" "*"
|
||||
|
||||
"@types/form-data@*":
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/form-data/-/form-data-2.2.1.tgz#ee2b3b8eaa11c0938289953606b745b738c54b1e"
|
||||
integrity sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/fs-extra@^5.0.2":
|
||||
version "5.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-5.0.4.tgz#b971134d162cc0497d221adde3dbb67502225599"
|
||||
|
@ -992,6 +1004,16 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.3.tgz#7ee330ba7caafb98090bece86a5ee44115904c2c"
|
||||
integrity sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==
|
||||
|
||||
"@types/request@2.48.1":
|
||||
version "2.48.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.1.tgz#e402d691aa6670fbbff1957b15f1270230ab42fa"
|
||||
integrity sha512-ZgEZ1TiD+KGA9LiAAPPJL68Id2UWfeSO62ijSXZjFJArVV+2pKcsVHmrcu+1oiE3q6eDGiFiSolRc4JHoerBBg==
|
||||
dependencies:
|
||||
"@types/caseless" "*"
|
||||
"@types/form-data" "*"
|
||||
"@types/node" "*"
|
||||
"@types/tough-cookie" "*"
|
||||
|
||||
"@types/semver@^5.5.0":
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-5.5.0.tgz#146c2a29ee7d3bae4bf2fcb274636e264c813c45"
|
||||
|
@ -1012,6 +1034,11 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/tough-cookie@*":
|
||||
version "2.3.4"
|
||||
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-2.3.4.tgz#821878b81bfab971b93a265a561d54ea61f9059f"
|
||||
integrity sha512-Set5ZdrAaKI/qHdFlVMgm/GsAv/wkXhSTuZFkJ+JI7HK+wIkIlOaUXSXieIvJ0+OvGIqtREFoE+NHJtEq0gtEw==
|
||||
|
||||
"@types/underscore@*":
|
||||
version "1.8.9"
|
||||
resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.8.9.tgz#fef41f800cd23db1b4f262ddefe49cd952d82323"
|
||||
|
|
Loading…
Reference in New Issue