mirror of https://github.com/embarklabs/embark.git
Merge branch 'develop' of https://github.com/embark-framework/embark into develop
This commit is contained in:
commit
502befc453
|
@ -7,3 +7,5 @@
|
|||
|
||||
### Review
|
||||
<use @mentions for quick questions, specific feedback, and progress updates.>
|
||||
|
||||
### Cool Spaceship Picture
|
||||
|
|
20
bin/embark
20
bin/embark
|
@ -10,26 +10,6 @@ try {
|
|||
}
|
||||
}
|
||||
|
||||
function launchEmbark() {
|
||||
var Cmd = require('../cmd/cmd');
|
||||
var cli = new Cmd();
|
||||
cli.process(process.argv);
|
||||
}
|
||||
|
||||
const path = require('path');
|
||||
try {
|
||||
const dappPackage = require(path.join(process.cwd(), 'package.json'));
|
||||
require(path.join(process.cwd(), 'embark.json')); // Make sure we are in a Dapp
|
||||
require('check-dependencies')(dappPackage, (state) => {
|
||||
if (state.status) {
|
||||
require('colors');
|
||||
console.error('\nMissing dependencies. Please run npm install'.red);
|
||||
process.exit();
|
||||
}
|
||||
launchEmbark();
|
||||
});
|
||||
} catch (_e) {
|
||||
// We are not in a Dapp
|
||||
launchEmbark();
|
||||
}
|
||||
|
||||
|
|
31
cmd/cmd.js
31
cmd/cmd.js
|
@ -30,6 +30,25 @@ process.env.NODE_PATH = utils.joinPath(process.env.EMBARK_PATH, 'node_modules')
|
|||
(process.env.NODE_PATH ? require('path').delimiter : '') +
|
||||
(process.env.NODE_PATH || '');
|
||||
|
||||
function checkDeps() {
|
||||
const path = require('path');
|
||||
try {
|
||||
const dappPackage = require(path.join(process.cwd(), 'package.json'));
|
||||
require(path.join(process.cwd(), 'embark.json')); // Make sure we are in a Dapp
|
||||
require('check-dependencies')(dappPackage, (state) => {
|
||||
if (state.status) {
|
||||
require('colors');
|
||||
console.error('\nMissing dependencies. Please run npm install'.red);
|
||||
process.exit();
|
||||
}
|
||||
return true;
|
||||
});
|
||||
} catch (_e) {
|
||||
// We are not in a Dapp
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
class Cmd {
|
||||
constructor() {
|
||||
program.version(embark.version);
|
||||
|
@ -129,6 +148,7 @@ class Cmd {
|
|||
.option('--pipeline [pipeline]', __('webpack config to use (default: production)'))
|
||||
.description(__('deploy and build dapp at ') + 'dist/ (default: development)')
|
||||
.action(function(env, _options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(_options.locale);
|
||||
_options.env = env || 'development';
|
||||
_options.logFile = _options.logfile; // fix casing
|
||||
|
@ -155,6 +175,7 @@ class Cmd {
|
|||
.option('--pipeline [pipeline]', __('webpack config to use (default: development)'))
|
||||
.description(__('run dapp (default: %s)', 'development'))
|
||||
.action(function(env, options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(options.locale);
|
||||
embark.run({
|
||||
env: env || 'development',
|
||||
|
@ -181,6 +202,7 @@ class Cmd {
|
|||
.option('--pipeline [pipeline]', __('webpack config to use (default: development)'))
|
||||
.description(__('Start the Embark console'))
|
||||
.action(function(env, options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(options.locale);
|
||||
embark.console({
|
||||
env: env || 'development',
|
||||
|
@ -200,6 +222,7 @@ class Cmd {
|
|||
.option('--locale [locale]', __('language to use (default: en)'))
|
||||
.description(__('run blockchain server (default: %s)', 'development'))
|
||||
.action(function(env, options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(options.locale);
|
||||
embark.initConfig(env || 'development', {
|
||||
embarkConfig: 'embark.json',
|
||||
|
@ -222,6 +245,7 @@ class Cmd {
|
|||
.option('--locale [locale]', __('language to use (default: en)'))
|
||||
|
||||
.action(function(env, options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(options.locale);
|
||||
embark.initConfig(env || 'development', {
|
||||
embarkConfig: 'embark.json',
|
||||
|
@ -246,6 +270,7 @@ class Cmd {
|
|||
.option('--loglevel [loglevel]', __('level of logging to display') + ' ["error", "warn", "info", "debug", "trace"]', /^(error|warn|info|debug|trace)$/i, 'warn')
|
||||
.description(__('run tests'))
|
||||
.action(function(file, options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(options.locale);
|
||||
embark.runTests({file, loglevel: options.loglevel, gasDetails: options.gasDetails, node: options.node});
|
||||
});
|
||||
|
@ -262,6 +287,7 @@ class Cmd {
|
|||
.option('--pipeline [pipeline]', __('webpack config to use (default: production)'))
|
||||
.description(__('Upload your dapp to a decentralized storage') + '.')
|
||||
.action(function(env, _options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(_options.locale);
|
||||
if (env === "ipfs" || env === "swarm") {
|
||||
console.warn(("did you mean " + "embark upload".bold + " ?").underline);
|
||||
|
@ -286,6 +312,7 @@ class Cmd {
|
|||
.option('--locale [locale]', __('language to use (default: en)'))
|
||||
.description(__('generates documentation based on the smart contracts configured'))
|
||||
.action(function(env, options) {
|
||||
checkDeps();
|
||||
i18n.setOrDetectLocale(options.locale);
|
||||
embark.graph({
|
||||
env: env || 'development',
|
||||
|
@ -316,6 +343,10 @@ class Cmd {
|
|||
.command('eject-webpack')
|
||||
.description(__('copy the default webpack config into your dapp for customization'))
|
||||
.action(function() {
|
||||
embark.initConfig('development', {
|
||||
embarkConfig: 'embark.json',
|
||||
interceptLogs: false
|
||||
});
|
||||
embark.ejectWebpack();
|
||||
});
|
||||
}
|
||||
|
|
|
@ -79,7 +79,8 @@ class EmbarkController {
|
|||
context: self.context,
|
||||
useDashboard: options.useDashboard,
|
||||
webServerConfig: webServerConfig,
|
||||
webpackConfigName: options.webpackConfigName
|
||||
webpackConfigName: options.webpackConfigName,
|
||||
ipcRole: 'server'
|
||||
});
|
||||
|
||||
async.waterfall([
|
||||
|
@ -127,6 +128,7 @@ class EmbarkController {
|
|||
engine.startService("codeGenerator");
|
||||
engine.startService("namingSystem");
|
||||
engine.startService("console");
|
||||
engine.startService("pluginCommand");
|
||||
|
||||
engine.events.on('check:backOnline:Ethereum', function () {
|
||||
engine.logger.info(__('Ethereum node detected') + '..');
|
||||
|
@ -204,8 +206,10 @@ class EmbarkController {
|
|||
engine.startService("pipeline");
|
||||
}
|
||||
engine.startService("deployment", {onlyCompile: options.onlyCompile});
|
||||
if (!options.onlyCompile) {
|
||||
engine.startService("storage");
|
||||
engine.startService("codeGenerator");
|
||||
}
|
||||
|
||||
callback();
|
||||
},
|
||||
|
@ -266,8 +270,11 @@ class EmbarkController {
|
|||
engine.logger.info(__("loaded plugins") + ": " + pluginList.join(", "));
|
||||
}
|
||||
|
||||
engine.ipc.connect((err) => {
|
||||
if (err) {
|
||||
if (engine.ipc.connected) {
|
||||
engine.startService("codeRunner");
|
||||
engine.startService("console");
|
||||
return callback();
|
||||
}
|
||||
engine.startService("processManager");
|
||||
engine.startService("serviceMonitor");
|
||||
engine.startService("libraryManager");
|
||||
|
@ -277,17 +284,10 @@ class EmbarkController {
|
|||
engine.startService("deployment");
|
||||
engine.startService("storage");
|
||||
engine.startService("codeGenerator");
|
||||
engine.startService("webServer");
|
||||
engine.startService("namingSystem");
|
||||
engine.startService("console");
|
||||
|
||||
return callback();
|
||||
}
|
||||
|
||||
engine.startService("codeRunner");
|
||||
engine.startService("console");
|
||||
engine.startService("pluginCommand");
|
||||
callback();
|
||||
});
|
||||
},
|
||||
function web3IPC(callback) {
|
||||
// Do specific work in case we are connected to a socket:
|
||||
|
|
|
@ -282,9 +282,7 @@ Config.prototype.loadNameSystemConfigFile = function() {
|
|||
// todo: spec out names for registration in the file itself for a dev chain
|
||||
var configObject = {
|
||||
"default": {
|
||||
"available_providers": ["ens"],
|
||||
"provider": "ens",
|
||||
"enabled": true
|
||||
"enabled": false
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ class Engine {
|
|||
this.useDashboard = options.useDashboard;
|
||||
this.webServerConfig = options.webServerConfig;
|
||||
this.webpackConfigName = options.webpackConfigName;
|
||||
this.ipcRole = options.ipcRole || 'client';
|
||||
}
|
||||
|
||||
init(_options, callback) {
|
||||
|
@ -38,14 +39,16 @@ class Engine {
|
|||
utils.interceptLogs(console, this.logger);
|
||||
}
|
||||
|
||||
this.ipc = new IPC({logger: this.logger, ipcRole: 'client'});
|
||||
this.ipc.connect((err) => {
|
||||
if(err) {
|
||||
this.ipc = new IPC({logger: this.logger, ipcRole: 'server'});
|
||||
this.ipc.serve();
|
||||
}
|
||||
this.ipc = new IPC({logger: this.logger, ipcRole: this.ipcRole});
|
||||
if (this.ipc.isClient()) {
|
||||
return this.ipc.connect((_err) => {
|
||||
callback();
|
||||
});
|
||||
} else if (this.ipc.isServer()) {
|
||||
this.ipc.serve();
|
||||
return callback();
|
||||
}
|
||||
callback();
|
||||
}
|
||||
|
||||
registerModule(moduleName, options) {
|
||||
|
@ -70,7 +73,8 @@ class Engine {
|
|||
"processManager": this.processManagerService,
|
||||
"storage": this.storageService,
|
||||
"graph": this.graphService,
|
||||
"codeCoverage": this.codeCoverageService
|
||||
"codeCoverage": this.codeCoverageService,
|
||||
"pluginCommand": this.pluginCommandService
|
||||
};
|
||||
|
||||
let service = services[serviceName];
|
||||
|
@ -130,6 +134,10 @@ class Engine {
|
|||
this.servicesMonitor.startMonitor();
|
||||
}
|
||||
|
||||
pluginCommandService() {
|
||||
this.registerModule('plugin_cmd', {embarkConfigFile: this.embarkConfig, embarkConfig: this.config.embarkConfig, packageFile: 'package.json'});
|
||||
}
|
||||
|
||||
namingSystem(_options) {
|
||||
this.registerModule('ens');
|
||||
}
|
||||
|
|
|
@ -24,10 +24,11 @@ class File {
|
|||
// Only supported in Solidity
|
||||
return callback(null, content);
|
||||
}
|
||||
const regex = /import ["|']([-a-zA-Z0-9@:%_+.~#?&\/=]+)["|'];/g;
|
||||
const regex = /import ["']([-a-zA-Z0-9@:%_+.~#?&\/=]+)["'];/g;
|
||||
let matches;
|
||||
const filesToDownload = [];
|
||||
const pathWithoutFile = path.dirname(self.path);
|
||||
let newContent = content;
|
||||
while ((matches = regex.exec(content))) {
|
||||
const httpFileObj = utils.getExternalContractUrl(matches[1]);
|
||||
const fileObj = {
|
||||
|
@ -36,7 +37,7 @@ class File {
|
|||
};
|
||||
if (httpFileObj) {
|
||||
// Replace http import by filePath import in content
|
||||
content = content.replace(matches[1], httpFileObj.filePath);
|
||||
newContent = newContent.replace(matches[1], httpFileObj.filePath);
|
||||
|
||||
fileObj.fileRelativePath = httpFileObj.filePath;
|
||||
fileObj.url = httpFileObj.url;
|
||||
|
@ -49,7 +50,7 @@ class File {
|
|||
|
||||
if (self.downloadedImports) {
|
||||
// We already parsed this file
|
||||
return callback(null, content);
|
||||
return callback(null, newContent);
|
||||
}
|
||||
self.downloadedImports = true;
|
||||
async.each(filesToDownload, ((fileObj, eachCb) => {
|
||||
|
@ -57,7 +58,7 @@ class File {
|
|||
eachCb();
|
||||
});
|
||||
}), (err) => {
|
||||
callback(err, content);
|
||||
callback(err, newContent);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ class CodeRunner {
|
|||
this.events = options.events;
|
||||
this.ipc = options.ipc;
|
||||
this.commands = [];
|
||||
this.runCode = new RunCode();
|
||||
this.runCode = new RunCode({logger: this.logger});
|
||||
this.registerIpcEvents();
|
||||
this.IpcClientListen();
|
||||
this.registerEvents();
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
const vm = require('vm');
|
||||
|
||||
class RunCode {
|
||||
constructor() {
|
||||
this.context = Object.assign({}, {global, console, exports, require, module, __filename, __dirname});
|
||||
constructor({logger}) {
|
||||
this.logger = logger;
|
||||
this.context = Object.assign({}, {
|
||||
global, console, exports, require, module, __filename, __dirname, process,
|
||||
setTimeout, setInterval, clearTimeout, clearInterval
|
||||
});
|
||||
}
|
||||
|
||||
doEval(code) {
|
||||
try {
|
||||
return vm.runInNewContext(code, this.context);
|
||||
} catch(e) {
|
||||
console.error(e.message);
|
||||
this.logger.error(e.message);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ class Console {
|
|||
// TODO: only if the blockchain is actually active!
|
||||
// will need to pass te current embark state here
|
||||
'ipfs - ' + __('instantiated js-ipfs object configured to the current environment (available if ipfs is enabled)'),
|
||||
'swarm - ' + __('instantiated swarm-api object configured to the current environment (available if swarm is enabled)'),
|
||||
'web3 - ' + __('instantiated web3.js object configured to the current environment'),
|
||||
'EmbarkJS - ' + __('EmbarkJS static functions for Storage, Messages, Names, etc.'),
|
||||
'quit - ' + __('to immediatly exit (alias: exit)'),
|
||||
|
|
|
@ -117,7 +117,7 @@ class ContractsManager {
|
|||
|
||||
contract.code = compiledContract.code;
|
||||
contract.runtimeBytecode = compiledContract.runtimeBytecode;
|
||||
contract.realRuntimeBytecode = (contract.realRuntimeBytecode || contract.runtimeBytecode);
|
||||
contract.realRuntimeBytecode = (compiledContract.realRuntimeBytecode || compiledContract.runtimeBytecode);
|
||||
contract.swarmHash = compiledContract.swarmHash;
|
||||
contract.gasEstimates = compiledContract.gasEstimates;
|
||||
contract.functionHashes = compiledContract.functionHashes;
|
||||
|
|
|
@ -38,7 +38,7 @@ class ContractDeployer {
|
|||
const match = arg.match(/\$accounts\[([0-9]+)]/);
|
||||
if (match) {
|
||||
if (!accounts[match[1]]) {
|
||||
return cb(__('No corresponding account at index $d', match[1]));
|
||||
return cb(__('No corresponding account at index %d', match[1]));
|
||||
}
|
||||
return cb(null, accounts[match[1]]);
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ class DeployManager {
|
|||
self.events.emit("deploy:beforeAll");
|
||||
|
||||
const contractDeploys = {};
|
||||
const errors = [];
|
||||
contracts.forEach(contract => {
|
||||
function deploy(result, callback) {
|
||||
if (typeof result === 'function') {
|
||||
|
@ -59,7 +60,12 @@ class DeployManager {
|
|||
}
|
||||
contract._gasLimit = self.gasLimit;
|
||||
self.events.request('deploy:contract', contract, (err) => {
|
||||
callback(err);
|
||||
if (err) {
|
||||
contract.error = err.message || err;
|
||||
self.logger.error(err.message || err);
|
||||
errors.push(err);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -72,11 +78,12 @@ class DeployManager {
|
|||
contractDeploys[className].push(deploy);
|
||||
});
|
||||
|
||||
async.auto(contractDeploys, function(err, _results) {
|
||||
if (err) {
|
||||
self.logger.error(__("error deploying contracts"));
|
||||
self.logger.error(err.message);
|
||||
self.logger.debug(err.stack);
|
||||
try {
|
||||
async.auto(contractDeploys, function(_err, _results) {
|
||||
if (errors.length) {
|
||||
_err = __("Error deploying contracts. Please fix errors to continue.");
|
||||
self.logger.error(_err);
|
||||
return done(_err);
|
||||
}
|
||||
if (contracts.length === 0) {
|
||||
self.logger.info(__("no contracts found"));
|
||||
|
@ -85,6 +92,10 @@ class DeployManager {
|
|||
self.logger.info(__("finished deploying contracts"));
|
||||
done(err);
|
||||
});
|
||||
} catch (e) {
|
||||
self.logger.error(e.message || e);
|
||||
done(__('Error deploying'));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ function registerSubDomain(ens, registrar, resolver, defaultAccount, subdomain,
|
|||
callback(null, transaction);
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error(err);
|
||||
logger.error(err.message || err);
|
||||
callback('Failed to register with error: ' + (err.message || err));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
/*global IpfsApi*/
|
||||
|
||||
let __embarkIPFS = {};
|
||||
const __embarkIPFS = {};
|
||||
|
||||
const NoConnectionError = 'No IPFS connection. Please ensure to call Embark.Storage.setProvider()';
|
||||
|
||||
__embarkIPFS.setProvider = function (options) {
|
||||
var self = this;
|
||||
const self = this;
|
||||
return new Promise(function (resolve, reject) {
|
||||
try {
|
||||
if (!options) {
|
||||
|
@ -13,7 +13,7 @@ __embarkIPFS.setProvider = function (options) {
|
|||
self._ipfsConnection = IpfsApi('localhost', '5001');
|
||||
self._getUrl = "http://localhost:8080/ipfs/";
|
||||
} else {
|
||||
var ipfsOptions = {host: options.host || options.server, protocol: 'http'};
|
||||
const ipfsOptions = {host: options.host || options.server, protocol: 'http'};
|
||||
if (options.protocol) {
|
||||
ipfsOptions.protocol = options.protocol;
|
||||
}
|
||||
|
@ -50,10 +50,9 @@ __embarkIPFS.isAvailable = function () {
|
|||
|
||||
__embarkIPFS.saveText = function (text) {
|
||||
const self = this;
|
||||
var promise = new Promise(function (resolve, reject) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (!self._ipfsConnection) {
|
||||
var connectionError = new Error(NoConnectionError);
|
||||
return reject(connectionError);
|
||||
return reject(new Error(NoConnectionError));
|
||||
}
|
||||
self._ipfsConnection.add(self._ipfsConnection.Buffer.from(text), function (err, result) {
|
||||
if (err) {
|
||||
|
@ -63,15 +62,13 @@ __embarkIPFS.saveText = function (text) {
|
|||
resolve(result[0].path);
|
||||
});
|
||||
});
|
||||
|
||||
return promise;
|
||||
};
|
||||
|
||||
__embarkIPFS.get = function (hash) {
|
||||
const self = this;
|
||||
// TODO: detect type, then convert if needed
|
||||
//var ipfsHash = web3.toAscii(hash);
|
||||
var promise = new Promise(function (resolve, reject) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (!self._ipfsConnection) {
|
||||
var connectionError = new Error(NoConnectionError);
|
||||
return reject(connectionError);
|
||||
|
@ -83,27 +80,23 @@ __embarkIPFS.get = function (hash) {
|
|||
resolve(files[0].content.toString());
|
||||
});
|
||||
});
|
||||
|
||||
return promise;
|
||||
};
|
||||
|
||||
__embarkIPFS.uploadFile = function (inputSelector) {
|
||||
const self = this;
|
||||
var file = inputSelector[0].files[0];
|
||||
const file = inputSelector[0].files[0];
|
||||
|
||||
if (file === undefined) {
|
||||
throw new Error('no file found');
|
||||
}
|
||||
|
||||
var promise = new Promise(function (resolve, reject) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (!self._ipfsConnection) {
|
||||
var connectionError = new Error(NoConnectionError);
|
||||
return reject(connectionError);
|
||||
return reject(new Error(NoConnectionError));
|
||||
}
|
||||
var reader = new FileReader();
|
||||
const reader = new FileReader();
|
||||
reader.onloadend = function () {
|
||||
var fileContent = reader.result;
|
||||
var buffer = self._ipfsConnection.Buffer.from(fileContent);
|
||||
const buffer = self._ipfsConnection.Buffer.from(reader.result);
|
||||
self._ipfsConnection.add(buffer, function (err, result) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
|
@ -114,8 +107,6 @@ __embarkIPFS.uploadFile = function (inputSelector) {
|
|||
};
|
||||
reader.readAsArrayBuffer(file);
|
||||
});
|
||||
|
||||
return promise;
|
||||
};
|
||||
|
||||
__embarkIPFS.getUrl = function (hash) {
|
||||
|
@ -125,8 +116,7 @@ __embarkIPFS.getUrl = function (hash) {
|
|||
__embarkIPFS.resolve = function (name, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!this._ipfsConnection) {
|
||||
var connectionError = new Error(NoConnectionError);
|
||||
return callback(connectionError);
|
||||
return callback(new Error(NoConnectionError));
|
||||
}
|
||||
|
||||
this._ipfsConnection.name.resolve(name)
|
||||
|
@ -144,6 +134,10 @@ __embarkIPFS.register = function(addr, callback) {
|
|||
return new Error(NoConnectionError);
|
||||
}
|
||||
|
||||
if (addr.length !== 46 || !addr.startsWith('Qm')) {
|
||||
return callback('String is not an IPFS hash');
|
||||
}
|
||||
|
||||
this._ipfsConnection.name.publish("/ipfs/" + addr)
|
||||
.then(res => {
|
||||
callback(null, res.Name);
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
let fs = require('./../../core/fs.js');
|
||||
let utils = require('./../../utils/utils.js');
|
||||
let async = require('async');
|
||||
class PluginCommand {
|
||||
constructor(embark) {
|
||||
this.embark = embark;
|
||||
this.config = this.embark.pluginConfig;
|
||||
this.embarkConfig = this.config.embarkConfig;
|
||||
this.registerCommand();
|
||||
}
|
||||
registerCommand() {
|
||||
const self = this;
|
||||
self.embark.registerConsoleCommand((cmd, _options) => {
|
||||
let cmdArray = cmd.split(' ');
|
||||
cmdArray = cmdArray.filter(cmd => cmd.trim().length > 0);
|
||||
let cmdName = cmdArray[0];
|
||||
return {
|
||||
match: () => cmdName === 'plugin',
|
||||
process: (callback) => {
|
||||
if(cmdArray.length < 3 || cmdArray[1] !== 'install' || typeof cmdArray[2] === 'undefined') {
|
||||
return callback('invalid use of plugin command. Please use plugin install <package>');
|
||||
}
|
||||
let npmInstall = ['npm', 'install', '--save'];
|
||||
npmInstall = npmInstall.concat(cmdArray.slice(2));
|
||||
let npmPackage = npmInstall[3];
|
||||
self.embark.logger.info(`Installing npm package ${npmPackage} ...`);
|
||||
async.waterfall([
|
||||
function npmInstallAsync(cb) {
|
||||
utils.runCmd(npmInstall.join(' '), {silent: false, exitOnError: false}, (err) => {
|
||||
if(err) {
|
||||
return cb(err);
|
||||
}
|
||||
cb();
|
||||
});
|
||||
},
|
||||
function addToEmbarkConfig(cb) {
|
||||
// get the installed package from package.json
|
||||
let packageFile = fs.readJSONSync(self.config.packageFile);
|
||||
let dependencies = Object.keys(packageFile.dependencies);
|
||||
let installedPackage = dependencies.filter((dep) => npmPackage.indexOf(dep) >=0);
|
||||
self.embarkConfig.plugins[installedPackage[0]] = {};
|
||||
fs.writeFile(self.config.embarkConfigFile, JSON.stringify(self.embarkConfig, null, 2), cb);
|
||||
}
|
||||
], (err) => {
|
||||
if(err) {
|
||||
let errorMessage = `Error installing npm package ${npmPackage}. Please visit https://embark.status.im/plugins/ for all supported plugins`;
|
||||
self.embark.logger.error(errorMessage);
|
||||
return callback('Error occurred');
|
||||
}
|
||||
callback(null, `npm package ${npmPackage} successfully installed as a plugin`);
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PluginCommand;
|
|
@ -1,6 +1,6 @@
|
|||
/*global web3 */
|
||||
let __embarkSwarm = {};
|
||||
const bytes = require("eth-lib/lib/bytes");
|
||||
let __embarkSwarm = {_swarmConnection: undefined};
|
||||
import SwarmAPI from 'swarm-api';
|
||||
|
||||
__embarkSwarm.setProvider = function (options) {
|
||||
let protocol = options.protocol || 'http';
|
||||
|
@ -13,10 +13,10 @@ __embarkSwarm.setProvider = function (options) {
|
|||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
if (!web3.bzz.currentProvider && !options.useOnlyGivenProvider) {
|
||||
web3.bzz.setProvider(this._connectUrl);
|
||||
this._swarmConnection = new SwarmAPI({gateway: this._connectUrl});
|
||||
}
|
||||
else if (options.useOnlyGivenProvider && web3.bzz.givenProvider !== null) {
|
||||
web3.bzz.setProvider(web3.bzz.givenProvider);
|
||||
this._swarmConnection = new SwarmAPI({gateway: web3.bzz.givenProvider});
|
||||
}
|
||||
resolve(this);
|
||||
} catch (err) {
|
||||
|
@ -29,21 +29,20 @@ __embarkSwarm.setProvider = function (options) {
|
|||
__embarkSwarm.isAvailable = function () {
|
||||
return new Promise((resolve, reject) => {
|
||||
// if web3 swarm object doesn't exist
|
||||
if (!web3.bzz) {
|
||||
if (!this._swarmConnection) {
|
||||
return resolve(false);
|
||||
}
|
||||
// swarm obj exists, but has no provider set (seems to happen a LOT!),
|
||||
// try setting the provider to our currently set provider again
|
||||
else if(!web3.bzz.currentProvider && this._config.host){
|
||||
web3.bzz.setProvider(this._connectUrl);
|
||||
else if (!this._swarmConnection.gateway && this._config.host) {
|
||||
this._swarmConnection.gateway = this._connectUrl;
|
||||
}
|
||||
if (!web3.bzz.currentProvider) {
|
||||
if (!this._swarmConnection.gateway) {
|
||||
return resolve(false);
|
||||
}
|
||||
web3.bzz.isAvailable()
|
||||
.then(resolve)
|
||||
.catch(() => {
|
||||
reject(this._connectError);
|
||||
this._swarmConnection.isAvailable((err, isAvailable) => {
|
||||
if (err) return reject(err);
|
||||
resolve(isAvailable);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
@ -54,10 +53,11 @@ __embarkSwarm.saveText = function (text) {
|
|||
if (!isAvailable) {
|
||||
return reject(this._connectError);
|
||||
}
|
||||
web3.bzz.upload(text)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
}).catch(reject);
|
||||
this._swarmConnection.uploadRaw(text, (err, hash) => {
|
||||
if (err) return reject(err);
|
||||
resolve(hash);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -67,10 +67,11 @@ __embarkSwarm.get = function (hash) {
|
|||
if (!isAvailable) {
|
||||
return reject(this._connectError);
|
||||
}
|
||||
web3.bzz.download(hash)
|
||||
.then((uint8Array) => resolve(bytes.toString(bytes.fromUint8Array(uint8Array))))
|
||||
.catch(reject);
|
||||
}).catch(reject);
|
||||
this._swarmConnection.downloadRaw(hash, (err, content) => {
|
||||
if (err) return reject(err);
|
||||
resolve(content);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -89,10 +90,11 @@ __embarkSwarm.uploadFile = function (inputSelector) {
|
|||
if (!isAvailable) {
|
||||
return reject(this._connectError);
|
||||
}
|
||||
web3.bzz.upload(fileContent)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
}).catch(reject);
|
||||
this._swarmConnection.uploadRaw(fileContent, (err, hash) => {
|
||||
if (err) return reject(err);
|
||||
resolve(hash);
|
||||
});
|
||||
});
|
||||
};
|
||||
reader.onerror = reject;
|
||||
reader.readAsArrayBuffer(file);
|
||||
|
@ -100,7 +102,7 @@ __embarkSwarm.uploadFile = function (inputSelector) {
|
|||
};
|
||||
|
||||
__embarkSwarm.getUrl = function (hash) {
|
||||
return `${this._config.getUrl || this._connectUrl + '/bzz:/'}${hash}`;
|
||||
return `${this._connectUrl}/bzz-raw:/${hash}`;
|
||||
};
|
||||
|
||||
const NotAvailable = "Not available with Swarm";
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
const UploadSwarm = require('./upload.js');
|
||||
const utils = require('../../utils/utils.js');
|
||||
const fs = require('../../core/fs.js');
|
||||
const Web3Bzz = require('web3-bzz');
|
||||
const SwarmAPI = require('swarm-api');
|
||||
// TODO: not great, breaks module isolation
|
||||
const StorageProcessesLauncher = require('../storage/storageProcessesLauncher');
|
||||
const constants = require('../../constants.json');
|
||||
|
||||
class Swarm {
|
||||
|
||||
|
@ -27,21 +28,28 @@ class Swarm {
|
|||
return;
|
||||
}
|
||||
|
||||
this.bzz = new Web3Bzz(this.providerUrl);
|
||||
this.swarm = new SwarmAPI({gateway: this.providerUrl});
|
||||
|
||||
this.setServiceCheck();
|
||||
this.addProviderToEmbarkJS();
|
||||
// TODO add check to see if we need to start process
|
||||
this.startProcess(() => {});
|
||||
this.addObjectToConsole();
|
||||
this.registerUploadCommand();
|
||||
|
||||
this._checkService((err) => {
|
||||
if (!err) {
|
||||
// swarm needs geth to be running first
|
||||
this.events.once(constants.blockchain.blockchainReady, () => {
|
||||
this.swarm.isAvailable((err, isAvailable) => {
|
||||
if (!err || isAvailable) {
|
||||
this.logger.info("Swarm node found, using currently running node");
|
||||
return;
|
||||
}
|
||||
self.logger.info("Swarm node not found, attempting to start own node");
|
||||
self.startProcess(() => {});
|
||||
this.logger.info("SWARM: Swarm node not found, attempting to start own node");
|
||||
return this.startProcess(() => {});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
addObjectToConsole() {
|
||||
this.events.emit("runcode:register", "swarm", this.swarm);
|
||||
}
|
||||
|
||||
setServiceCheck() {
|
||||
|
@ -56,22 +64,20 @@ class Swarm {
|
|||
});
|
||||
|
||||
self.events.request("services:register", 'Swarm', function (cb) {
|
||||
self.logger.trace(`Checking Swarm availability on ${self.bzz.currentProvider}...`);
|
||||
self.logger.trace(`Checking Swarm availability on ${self.providerUrl}...`);
|
||||
self._checkService((err, result) => {
|
||||
if (err) {
|
||||
self.logger.trace("Check Swarm availability error: " + err);
|
||||
return cb({name: "Swarm ", status: 'off'});
|
||||
}
|
||||
self.logger.trace("Swarm " + (result ? '':'on') + "available");
|
||||
self.logger.trace("Swarm " + (result ? '' : 'un') + "available");
|
||||
return cb({name: "Swarm ", status: result ? 'on' : 'off'});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_checkService(cb) {
|
||||
this.bzz.isAvailable().then(result => {
|
||||
cb(null, result);
|
||||
}).catch(cb);
|
||||
this.swarm.isAvailable(cb);
|
||||
}
|
||||
|
||||
addProviderToEmbarkJS() {
|
||||
|
@ -101,8 +107,8 @@ class Swarm {
|
|||
let upload_swarm = new UploadSwarm({
|
||||
buildDir: self.buildDir || 'dist/',
|
||||
storageConfig: self.storageConfig,
|
||||
getUrl: self.getUrl,
|
||||
bzz: self.bzz
|
||||
providerUrl: self.providerUrl,
|
||||
swarm: self.swarm
|
||||
});
|
||||
|
||||
upload_swarm.deploy(cb);
|
||||
|
|
|
@ -5,33 +5,26 @@ class Swarm {
|
|||
constructor(options) {
|
||||
this.options = options;
|
||||
this.buildDir = options.buildDir || 'dist/';
|
||||
this.bzz = options.bzz;
|
||||
this.getUrl = options.getUrl;
|
||||
this.swarm = options.swarm;
|
||||
this.providerUrl = options.providerUrl;
|
||||
}
|
||||
|
||||
deploy(cb) {
|
||||
console.log(__("deploying to swarm!"));
|
||||
let self = this;
|
||||
let bzz = this.bzz;
|
||||
const self = this;
|
||||
const swarm = this.swarm;
|
||||
async.waterfall([
|
||||
function runCommand(callback) {
|
||||
console.log(("=== " + __("adding %s to swarm", self.buildDir)).green);
|
||||
bzz.upload({
|
||||
path: self.buildDir, // path to data / file / directory
|
||||
kind: "directory", // could also be "file" or "data"
|
||||
defaultFile: "index.html" // optional, and only for kind === "directory"
|
||||
})
|
||||
.then((success) => {
|
||||
callback(null, success);
|
||||
})
|
||||
.catch(callback);
|
||||
swarm.uploadDirectory(self.buildDir, 'index.html', callback);
|
||||
},
|
||||
function printUrls(dir_hash, callback) {
|
||||
if (!dir_hash) {
|
||||
return callback('No directory hash was returned');
|
||||
}
|
||||
console.log(("=== " + __("DApp available at") + ` ${self.getUrl}${dir_hash}/`).green);
|
||||
console.log(("=== " + __("DApp available at") + ` https://swarm-gateways.net/bzz:/${dir_hash}`).green);
|
||||
console.log(("=== " + __("DApp available at") + ` ${self.providerUrl}/bzz:/${dir_hash}/index.html`).green);
|
||||
console.log(("=== " + __("DApp available at") + ` https://swarm-gateways.net/bzz:/${dir_hash}/index.html`).green);
|
||||
console.log(("=== " + __("NOTE: Swarm AND a blockchain node must be running for the dApp to work correctly (ie 'embark run')").yellow));
|
||||
|
||||
callback(null, dir_hash);
|
||||
}
|
||||
|
|
|
@ -99,7 +99,42 @@ class Test {
|
|||
if (!this.sim) {
|
||||
this.sim = getSimulator();
|
||||
}
|
||||
this.web3.setProvider(this.sim.provider(this.simOptions));
|
||||
|
||||
let simProvider = this.sim.provider(this.simOptions);
|
||||
|
||||
// Here we patch the sendAsync method on the provider. The goal behind this is to force pure/constant/view calls to become
|
||||
// transactions, so that we can pull in execution traces and account for those executions in code coverage.
|
||||
//
|
||||
// Instead of a simple call, here's what happens:
|
||||
//
|
||||
// 1) A transaction is sent with the same payload, and a pre-defined gas price;
|
||||
// 2) We wait for the transaction to be mined by asking for the receipt;
|
||||
// 3) Once we get the receipt back, we dispatch the real call and pass the original callback;
|
||||
//
|
||||
// This will still allow tests to get the return value from the call and run contracts unmodified.
|
||||
simProvider.realSendAsync = simProvider.sendAsync.bind(simProvider);
|
||||
simProvider.sendAsync = function(payload, cb) {
|
||||
if(payload.method !== 'eth_call') {
|
||||
return simProvider.realSendAsync(payload, cb);
|
||||
}
|
||||
|
||||
let newParams = Object.assign({}, payload.params[0], {gasPrice: '0x77359400'});
|
||||
let newPayload = {
|
||||
id: payload.id + 1,
|
||||
method: 'eth_sendTransaction',
|
||||
params: [newParams],
|
||||
jsonrpc: payload.jsonrpc
|
||||
};
|
||||
|
||||
simProvider.realSendAsync(newPayload, (_err, response) => {
|
||||
let txHash = response.result;
|
||||
self.web3.eth.getTransactionReceipt(txHash, (_err, _res) => {
|
||||
simProvider.realSendAsync(payload, cb);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
this.web3.setProvider(simProvider);
|
||||
callback();
|
||||
}
|
||||
|
||||
|
@ -144,26 +179,28 @@ class Test {
|
|||
self.engine.startService("codeCoverage");
|
||||
|
||||
if (self.options.node === 'embark') {
|
||||
return self.engine.ipc.connect((err) => {
|
||||
if (err) {
|
||||
this.engine.logger.error(err.message || err);
|
||||
this.engine.logger.error("Could not connect to Embark's IPC. Is embark running?");
|
||||
if (!self.engine.ipc.connected) {
|
||||
self.engine.logger.error("Could not connect to Embark's IPC. Is embark running?");
|
||||
process.exit(1);
|
||||
}
|
||||
self.engine.ipc.request('blockchain:node', {}, (err, node) => {
|
||||
if (err) {
|
||||
return self.engine.logger.error(err.message || err);
|
||||
}
|
||||
self.options.node = node;
|
||||
cb();
|
||||
});
|
||||
});
|
||||
return self.connectToIpcNode(cb);
|
||||
}
|
||||
cb();
|
||||
}
|
||||
], callback);
|
||||
}
|
||||
|
||||
connectToIpcNode(cb) {
|
||||
this.engine.ipc.request('blockchain:node', {}, (err, node) => {
|
||||
if (err) {
|
||||
this.engine.logger.error(err.message || err);
|
||||
return cb();
|
||||
}
|
||||
this.options.node = node;
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
onReady(callback) {
|
||||
const self = this;
|
||||
if (this.ready) {
|
||||
|
|
|
@ -22,7 +22,13 @@ class TemplateGenerator {
|
|||
console.log(__('Installing Template from ' + uri + '....').green);
|
||||
|
||||
fs.mkdirpSync(utils.dirname(tmpFilePath));
|
||||
utils.downloadFile(url, tmpFilePath, () => {
|
||||
utils.downloadFile(url, tmpFilePath, (err) => {
|
||||
if (err) {
|
||||
console.error(err.red);
|
||||
console.error('Does the template really exist?'.red);
|
||||
console.error(`Embark's supported templates: https://embark.status.im/templates/`.green);
|
||||
process.exit(1);
|
||||
}
|
||||
utils.extractZip(tmpFilePath, fspath, {
|
||||
map: file => {
|
||||
let fixed_path = file.path.split('/');
|
||||
|
|
|
@ -129,17 +129,37 @@ function pingEndpoint(host, port, type, protocol, origin, callback) {
|
|||
});
|
||||
}
|
||||
|
||||
function runCmd(cmd, options) {
|
||||
function runCmd(cmd, options, callback) {
|
||||
const shelljs = require('shelljs');
|
||||
let result = shelljs.exec(cmd, options || {silent: true});
|
||||
if (result.code !== 0) {
|
||||
console.log("error doing.. " + cmd);
|
||||
console.log(result.output);
|
||||
if (result.stderr !== undefined) {
|
||||
console.log(result.stderr);
|
||||
options = Object.assign({silent: true, exitOnError: true, async: true}, options || {});
|
||||
const outputToConsole = !options.silent;
|
||||
options.silent = true;
|
||||
let result = shelljs.exec(cmd, options, function (code, stdout) {
|
||||
if(code !== 0) {
|
||||
if (options.exitOnError) {
|
||||
return exit();
|
||||
}
|
||||
exit();
|
||||
if(typeof callback === 'function') {
|
||||
callback(`shell returned code ${code}`);
|
||||
}
|
||||
} else {
|
||||
if(typeof callback === 'function') {
|
||||
return callback(null, stdout);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
result.stdout.on('data', function(data) {
|
||||
if(outputToConsole) {
|
||||
console.log(data);
|
||||
}
|
||||
});
|
||||
|
||||
result.stderr.on('data', function(data) {
|
||||
if (outputToConsole) {
|
||||
console.log(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function cd(folder) {
|
||||
|
@ -160,13 +180,17 @@ function downloadFile(url, dest, cb) {
|
|||
const o_fs = require('fs-extra');
|
||||
var file = o_fs.createWriteStream(dest);
|
||||
(url.substring(0, 5) === 'https' ? https : http).get(url, function (response) {
|
||||
if (response.statusCode !== 200) {
|
||||
cb(`Download failed, response code ${response.statusCode}`);
|
||||
return;
|
||||
}
|
||||
response.pipe(file);
|
||||
file.on('finish', function () {
|
||||
file.close(cb);
|
||||
});
|
||||
}).on('error', function (err) {
|
||||
o_fs.unlink(dest);
|
||||
if (cb) cb(err.message);
|
||||
cb(err.message);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -81,6 +81,7 @@
|
|||
"solc": "0.4.24",
|
||||
"string-replace-async": "^1.2.1",
|
||||
"style-loader": "^0.19.0",
|
||||
"swarm-api": "^0.1.2",
|
||||
"tar": "^3.1.5",
|
||||
"toposort": "^1.0.0",
|
||||
"underscore": "^1.9.0",
|
||||
|
|
|
@ -26,7 +26,12 @@ module.exports = {
|
|||
proxy: true, // Proxy is used to present meaningful information about transactions
|
||||
targetGasLimit: 8000000, // Target gas limit sets the artificial target gas floor for the blocks to mine
|
||||
simulatorMnemonic: "example exile argue silk regular smile grass bomb merge arm assist farm", // Mnemonic used by the simulator to generate a wallet
|
||||
simulatorBlocktime: 0 // Specify blockTime in seconds for automatic mining. Default is 0 and no auto-mining.
|
||||
simulatorBlocktime: 0, // Specify blockTime in seconds for automatic mining. Default is 0 and no auto-mining.
|
||||
account: {
|
||||
// numAccounts: 3, // When specified, creates accounts for use in the dapp. This option only works in the development environment, and can be used as a quick start option that bypasses the need for MetaMask in development. These accounts are unlocked and funded with the below settings.
|
||||
// password: "config/development/password", // Password for the created accounts (as specified in the `numAccounts` setting)
|
||||
// balance: "5 ether" // Balance to be given to the created accounts (as specified in the `numAccounts` setting)
|
||||
}
|
||||
},
|
||||
|
||||
// merges with the settings in default
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
module.exports = {
|
||||
// default applies to all environments
|
||||
default: {
|
||||
enabled: true,
|
||||
available_providers: ["ens"],
|
||||
provider: "ens"
|
||||
},
|
||||
|
|
|
@ -21,16 +21,18 @@ class App extends React.Component {
|
|||
error: null,
|
||||
activeKey: 1,
|
||||
whisperEnabled: false,
|
||||
storageEnabled: false
|
||||
storageEnabled: false,
|
||||
blockchainEnabled: false
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
EmbarkJS.onReady((err) => {
|
||||
this.setState({blockchainEnabled: true});
|
||||
if (err) {
|
||||
// If err is not null then it means something went wrong connecting to ethereum
|
||||
// you can use this to ask the user to enable metamask for e.g
|
||||
return this.setState({error: err});
|
||||
return this.setState({error: err.message || err});
|
||||
}
|
||||
if (EmbarkJS.isNewWeb3()) {
|
||||
EmbarkJS.Messages.Providers.whisper.getWhisperVersion((err, _version) => {
|
||||
|
@ -81,7 +83,7 @@ class App extends React.Component {
|
|||
return (<div>
|
||||
<h3>Embark - Usage Example</h3>
|
||||
<Tabs onSelect={this.handleSelect} activeKey={this.state.activeKey} id="uncontrolled-tab-example">
|
||||
<Tab eventKey={1} title="Blockchain">
|
||||
<Tab eventKey={1} title={this._renderStatus('Blockchain', this.state.blockchainEnabled)}>
|
||||
<Blockchain/>
|
||||
</Tab>
|
||||
<Tab eventKey={2} title={this._renderStatus('Decentralized Storage', this.state.storageEnabled)}>
|
||||
|
|
|
@ -26,7 +26,12 @@ module.exports = {
|
|||
proxy: true, // Proxy is used to present meaningful information about transactions
|
||||
targetGasLimit: 8000000, // Target gas limit sets the artificial target gas floor for the blocks to mine
|
||||
simulatorMnemonic: "example exile argue silk regular smile grass bomb merge arm assist farm", // Mnemonic used by the simulator to generate a wallet
|
||||
simulatorBlocktime: 0 // Specify blockTime in seconds for automatic mining. Default is 0 and no auto-mining.
|
||||
simulatorBlocktime: 0, // Specify blockTime in seconds for automatic mining. Default is 0 and no auto-mining.
|
||||
account: {
|
||||
// numAccounts: 3, // When specified, creates accounts for use in the dapp. This option only works in the development environment, and can be used as a quick start option that bypasses the need for MetaMask in development. These accounts are unlocked and funded with the below settings.
|
||||
// password: "config/development/password", // Password for the created accounts (as specified in the `numAccounts` setting)
|
||||
// balance: "5 ether" // Balance to be given to the created accounts (as specified in the `numAccounts` setting)
|
||||
}
|
||||
},
|
||||
|
||||
// merges with the settings in default
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
module.exports = {
|
||||
// default applies to all environments
|
||||
default: {
|
||||
enabled: true,
|
||||
available_providers: ["ens"],
|
||||
provider: "ens"
|
||||
},
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
pragma solidity ^0.4.18;
|
||||
|
||||
interface ENS {
|
||||
|
||||
// Logged when the owner of a node assigns a new owner to a subnode.
|
||||
event NewOwner(bytes32 indexed node, bytes32 indexed label, address owner);
|
||||
|
||||
// Logged when the owner of a node transfers ownership to a new account.
|
||||
event Transfer(bytes32 indexed node, address owner);
|
||||
|
||||
// Logged when the resolver for a node changes.
|
||||
event NewResolver(bytes32 indexed node, address resolver);
|
||||
|
||||
// Logged when the TTL of a node changes
|
||||
event NewTTL(bytes32 indexed node, uint64 ttl);
|
||||
|
||||
|
||||
function setSubnodeOwner(bytes32 node, bytes32 label, address owner) external;
|
||||
function setResolver(bytes32 node, address resolver) external;
|
||||
function setOwner(bytes32 node, address owner) external;
|
||||
function setTTL(bytes32 node, uint64 ttl) external;
|
||||
function owner(bytes32 node) external view returns (address);
|
||||
function resolver(bytes32 node) external view returns (address);
|
||||
function ttl(bytes32 node) external view returns (uint64);
|
||||
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
pragma solidity ^0.4.18;
|
||||
|
||||
import './ENS.sol';
|
||||
|
||||
/**
|
||||
* The ENS registry contract.
|
||||
*/
|
||||
contract ENSRegistry is ENS {
|
||||
struct Record {
|
||||
address owner;
|
||||
address resolver;
|
||||
uint64 ttl;
|
||||
}
|
||||
|
||||
mapping (bytes32 => Record) records;
|
||||
|
||||
// Permits modifications only by the owner of the specified node.
|
||||
modifier only_owner(bytes32 node, address owner) {
|
||||
require(records[node].owner == 0 || records[node].owner == msg.sender || records[node].owner == owner);
|
||||
_;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Constructs a new ENS registrar.
|
||||
*/
|
||||
constructor() public {
|
||||
records[0x0].owner = msg.sender;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Transfers ownership of a node to a new address. May only be called by the current owner of the node.
|
||||
* @param node The node to transfer ownership of.
|
||||
* @param owner The address of the new owner.
|
||||
*/
|
||||
function setOwner(bytes32 node, address owner) public only_owner(node, owner) {
|
||||
emit Transfer(node, owner);
|
||||
records[node].owner = owner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Transfers ownership of a subnode sha3(node, label) to a new address. May only be called by the owner of the parent node.
|
||||
* @param node The parent node.
|
||||
* @param label The hash of the label specifying the subnode.
|
||||
* @param owner The address of the new owner.
|
||||
*/
|
||||
function setSubnodeOwner(bytes32 node, bytes32 label, address owner) public only_owner(node, owner) {
|
||||
bytes32 subnode = keccak256(abi.encodePacked(node, label));
|
||||
emit NewOwner(node, label, owner);
|
||||
records[subnode].owner = owner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Sets the resolver address for the specified node.
|
||||
* @param node The node to update.
|
||||
* @param resolver The address of the resolver.
|
||||
*/
|
||||
function setResolver(bytes32 node, address resolver) public only_owner(node, 0x0) {
|
||||
emit NewResolver(node, resolver);
|
||||
records[node].resolver = resolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Sets the TTL for the specified node.
|
||||
* @param node The node to update.
|
||||
* @param ttl The TTL in seconds.
|
||||
*/
|
||||
function setTTL(bytes32 node, uint64 ttl) public only_owner(node, 0x0) {
|
||||
emit NewTTL(node, ttl);
|
||||
records[node].ttl = ttl;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the address that owns the specified node.
|
||||
* @param node The specified node.
|
||||
* @return address of the owner.
|
||||
*/
|
||||
function owner(bytes32 node) public view returns (address) {
|
||||
return records[node].owner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the address of the resolver for the specified node.
|
||||
* @param node The specified node.
|
||||
* @return address of the resolver.
|
||||
*/
|
||||
function resolver(bytes32 node) public view returns (address) {
|
||||
return records[node].resolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the TTL of a node, and any records associated with it.
|
||||
* @param node The specified node.
|
||||
* @return ttl of the node.
|
||||
*/
|
||||
function ttl(bytes32 node) public view returns (uint64) {
|
||||
return records[node].ttl;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
pragma solidity ^0.4.18;
|
||||
|
||||
import './ENS.sol';
|
||||
import './Resolver.sol';
|
||||
|
||||
/**
|
||||
* A registrar that allocates subdomains to the first person to claim them.
|
||||
*/
|
||||
contract FIFSRegistrar {
|
||||
ENS ens;
|
||||
bytes32 rootNode;
|
||||
|
||||
modifier only_owner(bytes32 subnode) {
|
||||
bytes32 node = keccak256(abi.encodePacked(rootNode, subnode));
|
||||
address currentOwner = ens.owner(node);
|
||||
require(currentOwner == 0 || currentOwner == msg.sender);
|
||||
_;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param ensAddr The address of the ENS registry.
|
||||
* @param node The node that this registrar administers.
|
||||
*/
|
||||
constructor(ENS ensAddr, bytes32 node) public {
|
||||
ens = ensAddr;
|
||||
rootNode = node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a name, or change the owner of an existing registration.
|
||||
* @param subnode The hash of the label to register.
|
||||
* @param owner The address of the new owner.
|
||||
*/
|
||||
function register(bytes32 subnode, address owner) public only_owner(subnode) {
|
||||
ens.setSubnodeOwner(rootNode, subnode, owner);
|
||||
}
|
||||
}
|
|
@ -1,191 +0,0 @@
|
|||
pragma solidity ^0.4.23;
|
||||
|
||||
import "./ENS.sol";
|
||||
|
||||
/**
|
||||
* A simple resolver anyone can use; only allows the owner of a node to set its
|
||||
* address.
|
||||
*/
|
||||
contract Resolver {
|
||||
event AddrChanged(bytes32 indexed node, address a);
|
||||
event ContentChanged(bytes32 indexed node, bytes32 hash);
|
||||
event NameChanged(bytes32 indexed node, string name);
|
||||
event ABIChanged(bytes32 indexed node, uint256 indexed contentType);
|
||||
event PubkeyChanged(bytes32 indexed node, bytes32 x, bytes32 y);
|
||||
event TextChanged(bytes32 indexed node, string indexedKey, string key);
|
||||
|
||||
struct PublicKey {
|
||||
bytes32 x;
|
||||
bytes32 y;
|
||||
}
|
||||
|
||||
struct Record {
|
||||
address addr;
|
||||
bytes32 content;
|
||||
string name;
|
||||
PublicKey pubkey;
|
||||
mapping(string=>string) text;
|
||||
mapping(uint256=>bytes) abis;
|
||||
}
|
||||
|
||||
ENS ens;
|
||||
|
||||
mapping (bytes32 => Record) records;
|
||||
|
||||
modifier only_owner(bytes32 node) {
|
||||
address currentOwner = ens.owner(node);
|
||||
require(currentOwner == 0 || currentOwner == msg.sender);
|
||||
_;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param ensAddr The ENS registrar contract.
|
||||
*/
|
||||
constructor(ENS ensAddr) public {
|
||||
ens = ensAddr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the address associated with an ENS node.
|
||||
* May only be called by the owner of that node in the ENS registry.
|
||||
* @param node The node to update.
|
||||
* @param addr The address to set.
|
||||
*/
|
||||
function setAddr(bytes32 node, address addr) public only_owner(node) {
|
||||
records[node].addr = addr;
|
||||
emit AddrChanged(node, addr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the content hash associated with an ENS node.
|
||||
* May only be called by the owner of that node in the ENS registry.
|
||||
* Note that this resource type is not standardized, and will likely change
|
||||
* in future to a resource type based on multihash.
|
||||
* @param node The node to update.
|
||||
* @param hash The content hash to set
|
||||
*/
|
||||
function setContent(bytes32 node, bytes32 hash) public only_owner(node) {
|
||||
records[node].content = hash;
|
||||
emit ContentChanged(node, hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name associated with an ENS node, for reverse records.
|
||||
* May only be called by the owner of that node in the ENS registry.
|
||||
* @param node The node to update.
|
||||
* @param name The name to set.
|
||||
*/
|
||||
function setName(bytes32 node, string name) public only_owner(node) {
|
||||
records[node].name = name;
|
||||
emit NameChanged(node, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the ABI associated with an ENS node.
|
||||
* Nodes may have one ABI of each content type. To remove an ABI, set it to
|
||||
* the empty string.
|
||||
* @param node The node to update.
|
||||
* @param contentType The content type of the ABI
|
||||
* @param data The ABI data.
|
||||
*/
|
||||
function setABI(bytes32 node, uint256 contentType, bytes data) public only_owner(node) {
|
||||
// Content types must be powers of 2
|
||||
require(((contentType - 1) & contentType) == 0);
|
||||
|
||||
records[node].abis[contentType] = data;
|
||||
emit ABIChanged(node, contentType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the SECP256k1 public key associated with an ENS node.
|
||||
* @param node The ENS node to query
|
||||
* @param x the X coordinate of the curve point for the public key.
|
||||
* @param y the Y coordinate of the curve point for the public key.
|
||||
*/
|
||||
function setPubkey(bytes32 node, bytes32 x, bytes32 y) public only_owner(node) {
|
||||
records[node].pubkey = PublicKey(x, y);
|
||||
emit PubkeyChanged(node, x, y);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the text data associated with an ENS node and key.
|
||||
* May only be called by the owner of that node in the ENS registry.
|
||||
* @param node The node to update.
|
||||
* @param key The key to set.
|
||||
* @param value The text data value to set.
|
||||
*/
|
||||
function setText(bytes32 node, string key, string value) public only_owner(node) {
|
||||
records[node].text[key] = value;
|
||||
emit TextChanged(node, key, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the text data associated with an ENS node and key.
|
||||
* @param node The ENS node to query.
|
||||
* @param key The text data key to query.
|
||||
* @return The associated text data.
|
||||
*/
|
||||
function text(bytes32 node, string key) public view returns (string) {
|
||||
return records[node].text[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the SECP256k1 public key associated with an ENS node.
|
||||
* Defined in EIP 619.
|
||||
* @param node The ENS node to query
|
||||
* @return x, y the X and Y coordinates of the curve point for the public key.
|
||||
*/
|
||||
function pubkey(bytes32 node) public view returns (bytes32 x, bytes32 y) {
|
||||
return (records[node].pubkey.x, records[node].pubkey.y);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the ABI associated with an ENS node.
|
||||
* Defined in EIP205.
|
||||
* @param node The ENS node to query
|
||||
* @param contentTypes A bitwise OR of the ABI formats accepted by the caller.
|
||||
* @return contentType The content type of the return value
|
||||
* @return data The ABI data
|
||||
*/
|
||||
function ABI(bytes32 node, uint256 contentTypes) public view returns (uint256 contentType, bytes data) {
|
||||
Record storage record = records[node];
|
||||
for (contentType = 1; contentType <= contentTypes; contentType <<= 1) {
|
||||
if ((contentType & contentTypes) != 0 && record.abis[contentType].length > 0) {
|
||||
data = record.abis[contentType];
|
||||
return;
|
||||
}
|
||||
}
|
||||
contentType = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name associated with an ENS node, for reverse records.
|
||||
* Defined in EIP181.
|
||||
* @param node The ENS node to query.
|
||||
* @return The associated name.
|
||||
*/
|
||||
function name(bytes32 node) public view returns (string) {
|
||||
return records[node].name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the content hash associated with an ENS node.
|
||||
* Note that this resource type is not standardized, and will likely change
|
||||
* in future to a resource type based on multihash.
|
||||
* @param node The ENS node to query.
|
||||
* @return The associated content hash.
|
||||
*/
|
||||
function content(bytes32 node) public view returns (bytes32) {
|
||||
return records[node].content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the address associated with an ENS node.
|
||||
* @param node The ENS node to query.
|
||||
* @return The associated address.
|
||||
*/
|
||||
function addr(bytes32 node) public view returns (address) {
|
||||
return records[node].addr;
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
pragma solidity ^0.4.17;
|
||||
|
||||
import "https://github.com/embark-framework/embark/blob/develop/test_apps/contracts_app/contracts/ownable.sol";
|
||||
import "https://github.com/embark-framework/embark/blob/develop/test_apps/contracts_app/contracts/contract_args.sol";
|
||||
|
||||
|
||||
contract SimpleStorageWithHttpImport is Ownable {
|
||||
|
|
|
@ -72,15 +72,6 @@ module.exports = {
|
|||
SimpleStorageWithHttpImport: {
|
||||
fromIndex: 0,
|
||||
args: [100]
|
||||
},
|
||||
ENSRegistry: {
|
||||
"deploy": false
|
||||
},
|
||||
Resolver: {
|
||||
"deploy": false
|
||||
},
|
||||
FIFSRegistrar: {
|
||||
"deploy": false
|
||||
}
|
||||
},
|
||||
afterDeploy: [
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
/*global contract, config, it, assert, before*/
|
||||
const Resolver = require('Embark/contracts/Resolver');
|
||||
|
||||
const namehash = require('eth-ens-namehash');
|
||||
const address = '0x38ac14a9B6a7c8F9C46e4804074186c9F201D0A0';
|
||||
const rootNode = namehash.hash('embark.eth');
|
||||
|
||||
config({
|
||||
contracts: {
|
||||
"ENSRegistry": {
|
||||
"args": []
|
||||
},
|
||||
"Resolver": {
|
||||
"args": ["$ENSRegistry"]
|
||||
},
|
||||
"FIFSRegistrar": {
|
||||
"args": ["$ENSRegistry", rootNode],
|
||||
"onDeploy": [
|
||||
`ENSRegistry.methods.setOwner('${rootNode}', web3.eth.defaultAccount).send().then(() => {
|
||||
ENSRegistry.methods.setResolver('${rootNode}', "$Resolver").send();
|
||||
Resolver.methods.setAddr('${rootNode}', '${address}').send();
|
||||
});`
|
||||
]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
contract("ENS", function () {
|
||||
it("should have registered embark.eth", function () {
|
||||
let maxRetry = 20;
|
||||
let domainAddress;
|
||||
|
||||
const wait = setInterval(async () => {
|
||||
domainAddress = await Resolver.methods.addr(rootNode).call();
|
||||
if (domainAddress || maxRetry === 0) {
|
||||
clearInterval(wait);
|
||||
assert.strictEqual(domainAddress, address);
|
||||
return;
|
||||
}
|
||||
maxRetry--;
|
||||
}, 50);
|
||||
});
|
||||
});
|
|
@ -1,22 +1,34 @@
|
|||
/*global contract, it, embark, assert, before*/
|
||||
/*global contract, it, embark, assert, before, web3*/
|
||||
const SimpleStorage = embark.require('Embark/contracts/SimpleStorage');
|
||||
const Utils = require('embarkjs').Utils;
|
||||
|
||||
contract("SimpleStorage Deploy", function () {
|
||||
let SimpleStorageInstance;
|
||||
|
||||
before(async function() {
|
||||
SimpleStorageInstance = await SimpleStorage.deploy({arguments: [150]}).send();
|
||||
let simpleStorageInstance;
|
||||
before(function(done) {
|
||||
Utils.secureSend(web3, SimpleStorage.deploy({arguments: [150]}), {}, true, function(err, receipt) {
|
||||
if(err) {
|
||||
return done(err);
|
||||
}
|
||||
simpleStorageInstance = SimpleStorage;
|
||||
simpleStorageInstance.options.address = receipt.contractAddress;
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it("should set constructor value", async function () {
|
||||
let result = await SimpleStorageInstance.methods.storedData().call();
|
||||
let result = await simpleStorageInstance.methods.storedData().call();
|
||||
assert.strictEqual(parseInt(result, 10), 150);
|
||||
});
|
||||
|
||||
it("set storage value", async function () {
|
||||
await SimpleStorageInstance.methods.set(150).send();
|
||||
let result = await SimpleStorageInstance.methods.get().call();
|
||||
assert.strictEqual(parseInt(result, 10), 150);
|
||||
it("set storage value", function (done) {
|
||||
Utils.secureSend(web3, simpleStorageInstance.methods.set(200), {}, false, async function(err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
let result = await simpleStorageInstance.methods.get().call();
|
||||
assert.strictEqual(parseInt(result, 10), 200);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue