Merge pull request #364 from embark-framework/bug_fix/build-before-upload-fixed

Bug fix/build before upload fixed
This commit is contained in:
Iuri Matias 2018-04-16 11:13:09 -04:00 committed by GitHub
commit 39c7e3c625
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 166 additions and 105 deletions

View File

@ -170,13 +170,15 @@ class Cmd {
upload() { upload() {
program program
.command('upload [platform] [environment]') .command('upload [platform] [environment]')
.option('--logfile [logfile]', 'filename to output logs (default: none)')
.description('upload your dapp to a decentralized storage (e.g embark upload ipfs)') .description('upload your dapp to a decentralized storage (e.g embark upload ipfs)')
.action(function (platform, env, _options) { .action(function (platform, env, _options) {
// TODO: get env in cmd line as well let environment = env || 'development';
embark.initConfig(env || 'development', { embark.initConfig(environment, {
embarkConfig: 'embark.json', interceptLogs: false embarkConfig: 'embark.json', interceptLogs: false
}); });
embark.upload(platform); _options.env = environment;
embark.upload(platform, _options);
}); });
} }

View File

@ -147,15 +147,16 @@ class Embark {
}); });
} }
build(options) { build(options, engine, continueProcessing) {
if(!engine){
let engine = new Engine({ engine = new Engine({
env: options.env, env: options.env,
version: this.version, version: this.version,
embarkConfig: 'embark.json', embarkConfig: 'embark.json',
interceptLogs: false interceptLogs: false
}); });
engine.init(); engine.init();
}
async.waterfall([ async.waterfall([
function startServices(callback) { function startServices(callback) {
@ -185,7 +186,9 @@ class Embark {
engine.logger.info("finished building".underline); engine.logger.info("finished building".underline);
} }
// needed due to child processes // needed due to child processes
if(err || !continueProcessing){
process.exit(); process.exit();
}
}); });
} }
@ -247,29 +250,68 @@ class Embark {
} }
// TODO: should deploy if it hasn't already // TODO: should deploy if it hasn't already
upload(platform) { upload(platform, options) {
let options = {
buildDir: 'dist/',
storageConfig: this.config.storageConfig
};
options.buildDir = 'dist/';
options.storageConfig = this.config.storageConfig;
// initialise embark engine
let engine = new Engine({
env: options.env,
version: this.version,
embarkConfig: options.embarkConfig || 'embark.json',
logfile: options.logfile
});
engine.init();
// load plugins
this.plugins.loadInternalPlugin('ipfs', options); this.plugins.loadInternalPlugin('ipfs', options);
this.plugins.loadInternalPlugin('swarm', options); this.plugins.loadInternalPlugin('swarm', options);
let cmdPlugins = this.plugins.getPluginsFor('uploadCmds'); let plugins = this.plugins;
let cmdPlugin; let cmdPlugin;
let self = this;
async.waterfall([
function setupStoragePlugin(callback){
// check use has input existing storage plugin
let cmdPlugins = plugins.getPluginsFor('uploadCmds');
if (cmdPlugins.length > 0) { if (cmdPlugins.length > 0) {
cmdPlugin = cmdPlugins.find((pluginCmd) => { cmdPlugin = cmdPlugins.find((pluginCmd) => {
return pluginCmd.name == platform; return pluginCmd.name == platform;
}); });
} }
if (!cmdPlugin) {
if (cmdPlugin) { engine.logger.info('try "embark upload ipfs" or "embark upload swarm"'.green);
cmdPlugin.uploadCmds[0].cb(); callback({message: 'unknown platform: ' + platform});
} else { } else {
console.log(("unknown platform: " + platform).red); callback();
console.log('try "embark upload ipfs" or "embark upload swarm"'.green);
} }
},
function buildAndDeployContracts(callback){
// 2. upload to storage (outputDone event triggered after webpack finished)
engine.events.on('outputDone', function () {
engine.logger.info('deploying to ' + platform + '...');
cmdPlugin.uploadCmds[0].cb()
.then((success) => {
callback(null, success);
})
.catch(callback);
});
// 1. build the contracts and dapp webpack
self.build(options, engine, true);
}
], function (err, _result) {
if (err) {
engine.logger.error(err.message);
engine.logger.debug(err.stack);
} else {
engine.logger.info("finished building dapp and deploying to " + platform.underline);
}
// needed due to child processes
process.exit();
});
} }
runTests(file) { runTests(file) {

View File

@ -12,6 +12,7 @@ class IPFS {
} }
deploy() { deploy() {
return new Promise((resolve, reject) => {
console.log("deploying!"); console.log("deploying!");
let self = this; let self = this;
async.waterfall([ async.waterfall([
@ -23,37 +24,39 @@ class IPFS {
ipfs_bin = "~/go/bin/ipfs"; ipfs_bin = "~/go/bin/ipfs";
} }
return callback(null, ipfs_bin); callback(null, ipfs_bin);
}, },
function runCommand(ipfs_bin, callback) { function runCommand(ipfs_bin, callback) {
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`; let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
console.log(("=== adding " + self.buildDir + " to ipfs").green); console.log(("=== adding " + self.buildDir + " to ipfs").green);
console.log(cmd.green); console.log(cmd.green);
let result = shelljs.exec(cmd); shelljs.exec(cmd, function(code, stdout, stderr){
callback(stderr, stdout);
return callback(null, result); });
}, },
function getHashFromOutput(result, callback) { function getHashFromOutput(result, callback) {
let rows = result.output.split("\n"); let rows = result.split("\n");
let dir_row = rows[rows.length - 2]; let dir_row = rows[rows.length - 2];
let dir_hash = dir_row.split(" ")[1]; let dir_hash = dir_row.split(" ")[1];
return callback(null, dir_hash); callback(null, dir_hash);
}, },
function printUrls(dir_hash, callback) { function printUrls(dir_hash, callback) {
console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green); console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green);
console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green); console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green);
return callback(); callback();
} }
], function (err, _result) { ], function (err, _result) {
if (err) { if (err) {
console.log("error uploading to ipfs".red); console.log("error uploading to ipfs".red);
console.log(err); console.log(err);
reject(err);
} }
else resolve('successfully uploaded to ipfs');
});
}); });
} }
} }
module.exports = IPFS; module.exports = IPFS;

View File

@ -9,6 +9,7 @@ class Swarm {
} }
deploy() { deploy() {
return new Promise((resolve, reject) => {
let self = this; let self = this;
async.waterfall([ async.waterfall([
function findBinary(callback) { function findBinary(callback) {
@ -19,36 +20,40 @@ class Swarm {
swarm_bin = "~/go/bin/swarm"; swarm_bin = "~/go/bin/swarm";
} }
return callback(null, swarm_bin); callback(null, swarm_bin);
}, },
function runCommand(swarm_bin, callback) { function runCommand(swarm_bin, callback) {
let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`; let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`;
console.log(("=== adding " + self.buildDir + " to swarm").green); console.log(("=== adding " + self.buildDir + " to swarm").green);
console.log(cmd.green); console.log(cmd.green);
let result = shelljs.exec(cmd); shelljs.exec(cmd, function(code, stdout, stderr){
callback(stderr, {code: code, output: stdout});
return callback(null, result); });
}, },
function getHashFromOutput(result, callback) { function getHashFromOutput(result, callback) {
if (result.code !== 0) { if (result.code !== 0) {
return callback("couldn't upload, is the swarm daemon running?"); callback("couldn't upload, is the swarm daemon running?");
} }
else{
let rows = result.output.split("\n"); let rows = result.output.split("\n");
let dir_hash = rows.reverse()[1]; let dir_hash = rows.reverse()[1];
return callback(null, dir_hash); callback(null, dir_hash);
}
}, },
function printUrls(dir_hash, callback) { function printUrls(dir_hash, callback) {
console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green); console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green);
return callback(); callback();
} }
], function (err, _result) { ], function (err, _result) {
if (err) { if (err) {
console.log("error uploading to swarm".red); console.log("error uploading to swarm".red);
console.log(err); console.log(err);
reject(err);
} }
else resolve('successfully uploaded to swarm');
});
}); });
} }
} }

View File

@ -1,3 +1,12 @@
import $ from 'jquery';
import AlreadyDeployedToken from 'Embark/contracts/AlreadyDeployedToken';
import AnotherStorage from 'Embark/contracts/AnotherStorage';
import async from 'async';
import MyToken from 'Embark/contracts/MyToken';
import MyToken2 from 'Embark/contracts/MyToken2';
import SimpleStorage from 'Embark/contracts/SimpleStorage';
import Token from 'Embark/contracts/Token';
$(document).ready(function() { $(document).ready(function() {