* Removed duplicated code to build and deploy contracts and replaced with a call to the build function.

* Updated upload functions to use async `exec` and removed returns from the callbacks.
This commit is contained in:
Eric Mastro 2018-04-15 18:41:50 +10:00 committed by emizzle
parent c1c61de2d5
commit 8fc978eb50
3 changed files with 71 additions and 85 deletions

View File

@ -147,15 +147,16 @@ class Embark {
});
}
build(options) {
let engine = new Engine({
env: options.env,
version: this.version,
embarkConfig: 'embark.json',
interceptLogs: false
});
engine.init();
build(options, engine, continueProcessing) {
if(!engine){
engine = new Engine({
env: options.env,
version: this.version,
embarkConfig: 'embark.json',
interceptLogs: false
});
engine.init();
}
async.waterfall([
function startServices(callback) {
@ -185,7 +186,9 @@ class Embark {
engine.logger.info("finished building".underline);
}
// needed due to child processes
process.exit();
if(!continueProcessing){
process.exit();
}
});
}
@ -252,6 +255,7 @@ class Embark {
options.buildDir = 'dist/';
options.storageConfig = this.config.storageConfig;
// initialise embark engine
let engine = new Engine({
env: options.env,
version: this.version,
@ -260,73 +264,54 @@ class Embark {
});
engine.init();
// load plugins
this.plugins.loadInternalPlugin('ipfs', options);
this.plugins.loadInternalPlugin('swarm', options);
let cmdPlugins = this.plugins.getPluginsFor('uploadCmds');
let plugins = this.plugins;
let cmdPlugin;
if (cmdPlugins.length > 0) {
cmdPlugin = cmdPlugins.find((pluginCmd) => {
return pluginCmd.name == platform;
});
}
if (cmdPlugin) {
async.waterfall([
function (callback){
engine.logger.debug('building dapp...');
engine.startMonitor();
engine.startService("libraryManager");
engine.startService("web3");
engine.startService("pipeline");
engine.startService("codeGenerator");
engine.startService("deployment");
engine.startService("ipfs");
// 3. upload to storage (outputDone event triggered after webpack finished)
engine.events.on('outputDone', function () {
engine.logger.debug('deploying to ' + platform + '...');
cmdPlugin.uploadCmds[0].cb()
.then((success) => {
callback(null, success);
})
.catch((err) => {
callback(err);
});
let self = this;
async.waterfall([
function setupStoragePlugin(callback){
// check use has input existing storage plugin
let cmdPlugins = plugins.getPluginsFor('uploadCmds');
if (cmdPlugins.length > 0) {
cmdPlugin = cmdPlugins.find((pluginCmd) => {
return pluginCmd.name == platform;
});
// 1. build the contracts
engine.deployManager.deployContracts(function (err) {
if(err){
callback(err);
}
// 2. trigger code generation and dapp webpack
engine.events.emit('asset-changed', engine.contractsManager);
});
}
], function (err, _result) {
if (err) {
engine.logger.error(err.message);
engine.logger.debug(err.stack);
if (!cmdPlugin) {
engine.logger.info('try "embark upload ipfs" or "embark upload swarm"'.green);
callback({message: 'unknown platform: ' + platform});
} else {
engine.logger.info("finished building dapp and deploying to " + platform.underline);
callback();
}
},
function buildAndDeployContracts(callback){
// 2. upload to storage (outputDone event triggered after webpack finished)
engine.events.on('outputDone', function () {
engine.logger.info('deploying to ' + platform + '...');
cmdPlugin.uploadCmds[0].cb()
.then((success) => {
callback(null, success);
})
.catch(callback);
});
// 1. build the contracts and dapp webpack
self.build(options, engine, true);
}
], function (err, _result) {
if (err) {
engine.logger.error(err.message);
engine.logger.debug(err.stack);
} else {
engine.logger.info("finished building dapp and deploying to " + platform.underline);
}
// needed due to child processes
process.exit();
});
} else {
engine.logger.error(("unknown platform: " + platform));
engine.logger.info('try "embark upload ipfs" or "embark upload swarm"'.green);
}
// needed due to child processes
process.exit();
});
}
runTests(file) {

View File

@ -24,28 +24,28 @@ class IPFS {
ipfs_bin = "~/go/bin/ipfs";
}
return callback(null, ipfs_bin);
callback(null, ipfs_bin);
},
function runCommand(ipfs_bin, callback) {
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
console.log(("=== adding " + self.buildDir + " to ipfs").green);
console.log(cmd.green);
let result = shelljs.exec(cmd);
return callback(null, result);
shelljs.exec(cmd, function(code, stdout, stderr){
callback(stderr, stdout);
});
},
function getHashFromOutput(result, callback) {
let rows = result.output.split("\n");
let rows = result.split("\n");
let dir_row = rows[rows.length - 2];
let dir_hash = dir_row.split(" ")[1];
return callback(null, dir_hash);
callback(null, dir_hash);
},
function printUrls(dir_hash, callback) {
console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green);
console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green);
return callback();
callback();
}
], function (err, _result) {
if (err) {

View File

@ -20,30 +20,31 @@ class Swarm {
swarm_bin = "~/go/bin/swarm";
}
return callback(null, swarm_bin);
callback(null, swarm_bin);
},
function runCommand(swarm_bin, callback) {
let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`;
console.log(("=== adding " + self.buildDir + " to swarm").green);
console.log(cmd.green);
let result = shelljs.exec(cmd);
return callback(null, result);
shelljs.exec(cmd, function(code, stdout, stderr){
callback(stderr, {code: code, output: stdout});
});
},
function getHashFromOutput(result, callback) {
if (result.code !== 0) {
return callback("couldn't upload, is the swarm daemon running?");
callback("couldn't upload, is the swarm daemon running?");
}
else{
let rows = result.output.split("\n");
let dir_hash = rows.reverse()[1];
let rows = result.output.split("\n");
let dir_hash = rows.reverse()[1];
return callback(null, dir_hash);
callback(null, dir_hash);
}
},
function printUrls(dir_hash, callback) {
console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green);
return callback();
callback();
}
], function (err, _result) {
if (err) {