mirror of https://github.com/embarklabs/embark.git
* Removed duplicated code to build and deploy contracts and replaced with a call to the build function.
* Updated upload functions to use async `exec` and removed returns from the callbacks.
This commit is contained in:
parent
c1c61de2d5
commit
8fc978eb50
121
lib/index.js
121
lib/index.js
|
@ -147,15 +147,16 @@ class Embark {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
build(options) {
|
build(options, engine, continueProcessing) {
|
||||||
|
if(!engine){
|
||||||
let engine = new Engine({
|
engine = new Engine({
|
||||||
env: options.env,
|
env: options.env,
|
||||||
version: this.version,
|
version: this.version,
|
||||||
embarkConfig: 'embark.json',
|
embarkConfig: 'embark.json',
|
||||||
interceptLogs: false
|
interceptLogs: false
|
||||||
});
|
});
|
||||||
engine.init();
|
engine.init();
|
||||||
|
}
|
||||||
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
function startServices(callback) {
|
function startServices(callback) {
|
||||||
|
@ -185,7 +186,9 @@ class Embark {
|
||||||
engine.logger.info("finished building".underline);
|
engine.logger.info("finished building".underline);
|
||||||
}
|
}
|
||||||
// needed due to child processes
|
// needed due to child processes
|
||||||
process.exit();
|
if(!continueProcessing){
|
||||||
|
process.exit();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,6 +255,7 @@ class Embark {
|
||||||
options.buildDir = 'dist/';
|
options.buildDir = 'dist/';
|
||||||
options.storageConfig = this.config.storageConfig;
|
options.storageConfig = this.config.storageConfig;
|
||||||
|
|
||||||
|
// initialise embark engine
|
||||||
let engine = new Engine({
|
let engine = new Engine({
|
||||||
env: options.env,
|
env: options.env,
|
||||||
version: this.version,
|
version: this.version,
|
||||||
|
@ -260,73 +264,54 @@ class Embark {
|
||||||
});
|
});
|
||||||
engine.init();
|
engine.init();
|
||||||
|
|
||||||
|
// load plugins
|
||||||
this.plugins.loadInternalPlugin('ipfs', options);
|
this.plugins.loadInternalPlugin('ipfs', options);
|
||||||
this.plugins.loadInternalPlugin('swarm', options);
|
this.plugins.loadInternalPlugin('swarm', options);
|
||||||
|
|
||||||
let cmdPlugins = this.plugins.getPluginsFor('uploadCmds');
|
let plugins = this.plugins;
|
||||||
let cmdPlugin;
|
let cmdPlugin;
|
||||||
if (cmdPlugins.length > 0) {
|
let self = this;
|
||||||
cmdPlugin = cmdPlugins.find((pluginCmd) => {
|
async.waterfall([
|
||||||
return pluginCmd.name == platform;
|
function setupStoragePlugin(callback){
|
||||||
});
|
// check use has input existing storage plugin
|
||||||
}
|
let cmdPlugins = plugins.getPluginsFor('uploadCmds');
|
||||||
|
|
||||||
if (cmdPlugin) {
|
if (cmdPlugins.length > 0) {
|
||||||
async.waterfall([
|
cmdPlugin = cmdPlugins.find((pluginCmd) => {
|
||||||
|
return pluginCmd.name == platform;
|
||||||
function (callback){
|
|
||||||
engine.logger.debug('building dapp...');
|
|
||||||
engine.startMonitor();
|
|
||||||
engine.startService("libraryManager");
|
|
||||||
engine.startService("web3");
|
|
||||||
engine.startService("pipeline");
|
|
||||||
engine.startService("codeGenerator");
|
|
||||||
engine.startService("deployment");
|
|
||||||
engine.startService("ipfs");
|
|
||||||
|
|
||||||
// 3. upload to storage (outputDone event triggered after webpack finished)
|
|
||||||
engine.events.on('outputDone', function () {
|
|
||||||
engine.logger.debug('deploying to ' + platform + '...');
|
|
||||||
cmdPlugin.uploadCmds[0].cb()
|
|
||||||
.then((success) => {
|
|
||||||
callback(null, success);
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
callback(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// 1. build the contracts
|
|
||||||
engine.deployManager.deployContracts(function (err) {
|
|
||||||
if(err){
|
|
||||||
callback(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. trigger code generation and dapp webpack
|
|
||||||
engine.events.emit('asset-changed', engine.contractsManager);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
], function (err, _result) {
|
if (!cmdPlugin) {
|
||||||
if (err) {
|
engine.logger.info('try "embark upload ipfs" or "embark upload swarm"'.green);
|
||||||
engine.logger.error(err.message);
|
callback({message: 'unknown platform: ' + platform});
|
||||||
engine.logger.debug(err.stack);
|
|
||||||
} else {
|
} else {
|
||||||
engine.logger.info("finished building dapp and deploying to " + platform.underline);
|
callback();
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
function buildAndDeployContracts(callback){
|
||||||
|
// 2. upload to storage (outputDone event triggered after webpack finished)
|
||||||
|
engine.events.on('outputDone', function () {
|
||||||
|
engine.logger.info('deploying to ' + platform + '...');
|
||||||
|
cmdPlugin.uploadCmds[0].cb()
|
||||||
|
.then((success) => {
|
||||||
|
callback(null, success);
|
||||||
|
})
|
||||||
|
.catch(callback);
|
||||||
|
});
|
||||||
|
// 1. build the contracts and dapp webpack
|
||||||
|
self.build(options, engine, true);
|
||||||
|
}
|
||||||
|
], function (err, _result) {
|
||||||
|
if (err) {
|
||||||
|
engine.logger.error(err.message);
|
||||||
|
engine.logger.debug(err.stack);
|
||||||
|
} else {
|
||||||
|
engine.logger.info("finished building dapp and deploying to " + platform.underline);
|
||||||
|
}
|
||||||
|
|
||||||
// needed due to child processes
|
// needed due to child processes
|
||||||
process.exit();
|
process.exit();
|
||||||
});
|
});
|
||||||
|
|
||||||
} else {
|
|
||||||
engine.logger.error(("unknown platform: " + platform));
|
|
||||||
engine.logger.info('try "embark upload ipfs" or "embark upload swarm"'.green);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
runTests(file) {
|
runTests(file) {
|
||||||
|
|
|
@ -24,28 +24,28 @@ class IPFS {
|
||||||
ipfs_bin = "~/go/bin/ipfs";
|
ipfs_bin = "~/go/bin/ipfs";
|
||||||
}
|
}
|
||||||
|
|
||||||
return callback(null, ipfs_bin);
|
callback(null, ipfs_bin);
|
||||||
},
|
},
|
||||||
function runCommand(ipfs_bin, callback) {
|
function runCommand(ipfs_bin, callback) {
|
||||||
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
|
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
|
||||||
console.log(("=== adding " + self.buildDir + " to ipfs").green);
|
console.log(("=== adding " + self.buildDir + " to ipfs").green);
|
||||||
console.log(cmd.green);
|
console.log(cmd.green);
|
||||||
let result = shelljs.exec(cmd);
|
shelljs.exec(cmd, function(code, stdout, stderr){
|
||||||
|
callback(stderr, stdout);
|
||||||
return callback(null, result);
|
});
|
||||||
},
|
},
|
||||||
function getHashFromOutput(result, callback) {
|
function getHashFromOutput(result, callback) {
|
||||||
let rows = result.output.split("\n");
|
let rows = result.split("\n");
|
||||||
let dir_row = rows[rows.length - 2];
|
let dir_row = rows[rows.length - 2];
|
||||||
let dir_hash = dir_row.split(" ")[1];
|
let dir_hash = dir_row.split(" ")[1];
|
||||||
|
|
||||||
return callback(null, dir_hash);
|
callback(null, dir_hash);
|
||||||
},
|
},
|
||||||
function printUrls(dir_hash, callback) {
|
function printUrls(dir_hash, callback) {
|
||||||
console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green);
|
console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green);
|
||||||
console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green);
|
console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green);
|
||||||
|
|
||||||
return callback();
|
callback();
|
||||||
}
|
}
|
||||||
], function (err, _result) {
|
], function (err, _result) {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
|
|
@ -20,30 +20,31 @@ class Swarm {
|
||||||
swarm_bin = "~/go/bin/swarm";
|
swarm_bin = "~/go/bin/swarm";
|
||||||
}
|
}
|
||||||
|
|
||||||
return callback(null, swarm_bin);
|
callback(null, swarm_bin);
|
||||||
},
|
},
|
||||||
function runCommand(swarm_bin, callback) {
|
function runCommand(swarm_bin, callback) {
|
||||||
let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`;
|
let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`;
|
||||||
console.log(("=== adding " + self.buildDir + " to swarm").green);
|
console.log(("=== adding " + self.buildDir + " to swarm").green);
|
||||||
console.log(cmd.green);
|
console.log(cmd.green);
|
||||||
let result = shelljs.exec(cmd);
|
shelljs.exec(cmd, function(code, stdout, stderr){
|
||||||
|
callback(stderr, {code: code, output: stdout});
|
||||||
return callback(null, result);
|
});
|
||||||
},
|
},
|
||||||
function getHashFromOutput(result, callback) {
|
function getHashFromOutput(result, callback) {
|
||||||
if (result.code !== 0) {
|
if (result.code !== 0) {
|
||||||
return callback("couldn't upload, is the swarm daemon running?");
|
callback("couldn't upload, is the swarm daemon running?");
|
||||||
}
|
}
|
||||||
|
else{
|
||||||
|
let rows = result.output.split("\n");
|
||||||
|
let dir_hash = rows.reverse()[1];
|
||||||
|
|
||||||
let rows = result.output.split("\n");
|
callback(null, dir_hash);
|
||||||
let dir_hash = rows.reverse()[1];
|
}
|
||||||
|
|
||||||
return callback(null, dir_hash);
|
|
||||||
},
|
},
|
||||||
function printUrls(dir_hash, callback) {
|
function printUrls(dir_hash, callback) {
|
||||||
console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green);
|
console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green);
|
||||||
|
|
||||||
return callback();
|
callback();
|
||||||
}
|
}
|
||||||
], function (err, _result) {
|
], function (err, _result) {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
|
Loading…
Reference in New Issue