Merge pull request #364 from embark-framework/bug_fix/build-before-upload-fixed
Bug fix/build before upload fixed
This commit is contained in:
commit
39c7e3c625
|
@ -170,13 +170,15 @@ class Cmd {
|
|||
upload() {
|
||||
program
|
||||
.command('upload [platform] [environment]')
|
||||
.option('--logfile [logfile]', 'filename to output logs (default: none)')
|
||||
.description('upload your dapp to a decentralized storage (e.g embark upload ipfs)')
|
||||
.action(function (platform, env, _options) {
|
||||
// TODO: get env in cmd line as well
|
||||
embark.initConfig(env || 'development', {
|
||||
let environment = env || 'development';
|
||||
embark.initConfig(environment, {
|
||||
embarkConfig: 'embark.json', interceptLogs: false
|
||||
});
|
||||
embark.upload(platform);
|
||||
_options.env = environment;
|
||||
embark.upload(platform, _options);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
96
lib/index.js
96
lib/index.js
|
@ -147,15 +147,16 @@ class Embark {
|
|||
});
|
||||
}
|
||||
|
||||
build(options) {
|
||||
|
||||
let engine = new Engine({
|
||||
env: options.env,
|
||||
version: this.version,
|
||||
embarkConfig: 'embark.json',
|
||||
interceptLogs: false
|
||||
});
|
||||
engine.init();
|
||||
build(options, engine, continueProcessing) {
|
||||
if(!engine){
|
||||
engine = new Engine({
|
||||
env: options.env,
|
||||
version: this.version,
|
||||
embarkConfig: 'embark.json',
|
||||
interceptLogs: false
|
||||
});
|
||||
engine.init();
|
||||
}
|
||||
|
||||
async.waterfall([
|
||||
function startServices(callback) {
|
||||
|
@ -185,7 +186,9 @@ class Embark {
|
|||
engine.logger.info("finished building".underline);
|
||||
}
|
||||
// needed due to child processes
|
||||
process.exit();
|
||||
if(err || !continueProcessing){
|
||||
process.exit();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -247,29 +250,68 @@ class Embark {
|
|||
}
|
||||
|
||||
// TODO: should deploy if it hasn't already
|
||||
upload(platform) {
|
||||
let options = {
|
||||
buildDir: 'dist/',
|
||||
storageConfig: this.config.storageConfig
|
||||
};
|
||||
upload(platform, options) {
|
||||
|
||||
options.buildDir = 'dist/';
|
||||
options.storageConfig = this.config.storageConfig;
|
||||
|
||||
// initialise embark engine
|
||||
let engine = new Engine({
|
||||
env: options.env,
|
||||
version: this.version,
|
||||
embarkConfig: options.embarkConfig || 'embark.json',
|
||||
logfile: options.logfile
|
||||
});
|
||||
engine.init();
|
||||
|
||||
// load plugins
|
||||
this.plugins.loadInternalPlugin('ipfs', options);
|
||||
this.plugins.loadInternalPlugin('swarm', options);
|
||||
|
||||
let cmdPlugins = this.plugins.getPluginsFor('uploadCmds');
|
||||
let plugins = this.plugins;
|
||||
let cmdPlugin;
|
||||
if (cmdPlugins.length > 0) {
|
||||
cmdPlugin = cmdPlugins.find((pluginCmd) => {
|
||||
return pluginCmd.name == platform;
|
||||
});
|
||||
}
|
||||
let self = this;
|
||||
async.waterfall([
|
||||
function setupStoragePlugin(callback){
|
||||
// check use has input existing storage plugin
|
||||
let cmdPlugins = plugins.getPluginsFor('uploadCmds');
|
||||
|
||||
if (cmdPlugins.length > 0) {
|
||||
cmdPlugin = cmdPlugins.find((pluginCmd) => {
|
||||
return pluginCmd.name == platform;
|
||||
});
|
||||
}
|
||||
if (!cmdPlugin) {
|
||||
engine.logger.info('try "embark upload ipfs" or "embark upload swarm"'.green);
|
||||
callback({message: 'unknown platform: ' + platform});
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
},
|
||||
function buildAndDeployContracts(callback){
|
||||
// 2. upload to storage (outputDone event triggered after webpack finished)
|
||||
engine.events.on('outputDone', function () {
|
||||
engine.logger.info('deploying to ' + platform + '...');
|
||||
cmdPlugin.uploadCmds[0].cb()
|
||||
.then((success) => {
|
||||
callback(null, success);
|
||||
})
|
||||
.catch(callback);
|
||||
});
|
||||
// 1. build the contracts and dapp webpack
|
||||
self.build(options, engine, true);
|
||||
}
|
||||
], function (err, _result) {
|
||||
if (err) {
|
||||
engine.logger.error(err.message);
|
||||
engine.logger.debug(err.stack);
|
||||
} else {
|
||||
engine.logger.info("finished building dapp and deploying to " + platform.underline);
|
||||
}
|
||||
|
||||
if (cmdPlugin) {
|
||||
cmdPlugin.uploadCmds[0].cb();
|
||||
} else {
|
||||
console.log(("unknown platform: " + platform).red);
|
||||
console.log('try "embark upload ipfs" or "embark upload swarm"'.green);
|
||||
}
|
||||
// needed due to child processes
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
|
||||
runTests(file) {
|
||||
|
|
|
@ -12,48 +12,51 @@ class IPFS {
|
|||
}
|
||||
|
||||
deploy() {
|
||||
console.log("deploying!");
|
||||
let self = this;
|
||||
async.waterfall([
|
||||
function findBinary(callback) {
|
||||
let ipfs_bin = shelljs.which(self.configIpfsBin);
|
||||
return new Promise((resolve, reject) => {
|
||||
console.log("deploying!");
|
||||
let self = this;
|
||||
async.waterfall([
|
||||
function findBinary(callback) {
|
||||
let ipfs_bin = shelljs.which(self.configIpfsBin);
|
||||
|
||||
if (ipfs_bin === 'ipfs not found' || !ipfs_bin) {
|
||||
console.log(('=== WARNING: ' + self.configIpfsBin + ' not found or not in the path. Guessing ~/go/bin/ipfs for path').yellow);
|
||||
ipfs_bin = "~/go/bin/ipfs";
|
||||
if (ipfs_bin === 'ipfs not found' || !ipfs_bin) {
|
||||
console.log(('=== WARNING: ' + self.configIpfsBin + ' not found or not in the path. Guessing ~/go/bin/ipfs for path').yellow);
|
||||
ipfs_bin = "~/go/bin/ipfs";
|
||||
}
|
||||
|
||||
callback(null, ipfs_bin);
|
||||
},
|
||||
function runCommand(ipfs_bin, callback) {
|
||||
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
|
||||
console.log(("=== adding " + self.buildDir + " to ipfs").green);
|
||||
console.log(cmd.green);
|
||||
shelljs.exec(cmd, function(code, stdout, stderr){
|
||||
callback(stderr, stdout);
|
||||
});
|
||||
},
|
||||
function getHashFromOutput(result, callback) {
|
||||
let rows = result.split("\n");
|
||||
let dir_row = rows[rows.length - 2];
|
||||
let dir_hash = dir_row.split(" ")[1];
|
||||
|
||||
callback(null, dir_hash);
|
||||
},
|
||||
function printUrls(dir_hash, callback) {
|
||||
console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green);
|
||||
console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green);
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
return callback(null, ipfs_bin);
|
||||
},
|
||||
function runCommand(ipfs_bin, callback) {
|
||||
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
|
||||
console.log(("=== adding " + self.buildDir + " to ipfs").green);
|
||||
console.log(cmd.green);
|
||||
let result = shelljs.exec(cmd);
|
||||
|
||||
return callback(null, result);
|
||||
},
|
||||
function getHashFromOutput(result, callback) {
|
||||
let rows = result.output.split("\n");
|
||||
let dir_row = rows[rows.length - 2];
|
||||
let dir_hash = dir_row.split(" ")[1];
|
||||
|
||||
return callback(null, dir_hash);
|
||||
},
|
||||
function printUrls(dir_hash, callback) {
|
||||
console.log(("=== DApp available at http://localhost:8080/ipfs/" + dir_hash + "/").green);
|
||||
console.log(("=== DApp available at http://gateway.ipfs.io/ipfs/" + dir_hash + "/").green);
|
||||
|
||||
return callback();
|
||||
}
|
||||
], function (err, _result) {
|
||||
if (err) {
|
||||
console.log("error uploading to ipfs".red);
|
||||
console.log(err);
|
||||
}
|
||||
], function (err, _result) {
|
||||
if (err) {
|
||||
console.log("error uploading to ipfs".red);
|
||||
console.log(err);
|
||||
reject(err);
|
||||
}
|
||||
else resolve('successfully uploaded to ipfs');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = IPFS;
|
||||
|
|
|
@ -9,46 +9,51 @@ class Swarm {
|
|||
}
|
||||
|
||||
deploy() {
|
||||
let self = this;
|
||||
async.waterfall([
|
||||
function findBinary(callback) {
|
||||
let swarm_bin = shelljs.which('swarm');
|
||||
return new Promise((resolve, reject) => {
|
||||
let self = this;
|
||||
async.waterfall([
|
||||
function findBinary(callback) {
|
||||
let swarm_bin = shelljs.which('swarm');
|
||||
|
||||
if (swarm_bin === 'swarm not found' || !swarm_bin) {
|
||||
console.log('=== WARNING: Swarm not in an executable path. Guessing ~/go/bin/swarm for path'.yellow);
|
||||
swarm_bin = "~/go/bin/swarm";
|
||||
if (swarm_bin === 'swarm not found' || !swarm_bin) {
|
||||
console.log('=== WARNING: Swarm not in an executable path. Guessing ~/go/bin/swarm for path'.yellow);
|
||||
swarm_bin = "~/go/bin/swarm";
|
||||
}
|
||||
|
||||
callback(null, swarm_bin);
|
||||
},
|
||||
function runCommand(swarm_bin, callback) {
|
||||
let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`;
|
||||
console.log(("=== adding " + self.buildDir + " to swarm").green);
|
||||
console.log(cmd.green);
|
||||
shelljs.exec(cmd, function(code, stdout, stderr){
|
||||
callback(stderr, {code: code, output: stdout});
|
||||
});
|
||||
},
|
||||
function getHashFromOutput(result, callback) {
|
||||
if (result.code !== 0) {
|
||||
callback("couldn't upload, is the swarm daemon running?");
|
||||
}
|
||||
else{
|
||||
let rows = result.output.split("\n");
|
||||
let dir_hash = rows.reverse()[1];
|
||||
|
||||
callback(null, dir_hash);
|
||||
}
|
||||
},
|
||||
function printUrls(dir_hash, callback) {
|
||||
console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green);
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
return callback(null, swarm_bin);
|
||||
},
|
||||
function runCommand(swarm_bin, callback) {
|
||||
let cmd = `"${swarm_bin}" --defaultpath ${self.buildDir} index.html --recursive up ${self.buildDir}`;
|
||||
console.log(("=== adding " + self.buildDir + " to swarm").green);
|
||||
console.log(cmd.green);
|
||||
let result = shelljs.exec(cmd);
|
||||
|
||||
return callback(null, result);
|
||||
},
|
||||
function getHashFromOutput(result, callback) {
|
||||
if (result.code !== 0) {
|
||||
return callback("couldn't upload, is the swarm daemon running?");
|
||||
], function (err, _result) {
|
||||
if (err) {
|
||||
console.log("error uploading to swarm".red);
|
||||
console.log(err);
|
||||
reject(err);
|
||||
}
|
||||
|
||||
let rows = result.output.split("\n");
|
||||
let dir_hash = rows.reverse()[1];
|
||||
|
||||
return callback(null, dir_hash);
|
||||
},
|
||||
function printUrls(dir_hash, callback) {
|
||||
console.log(("=== DApp available at http://localhost:8500/bzz:/" + dir_hash + "/").green);
|
||||
|
||||
return callback();
|
||||
}
|
||||
], function (err, _result) {
|
||||
if (err) {
|
||||
console.log("error uploading to swarm".red);
|
||||
console.log(err);
|
||||
}
|
||||
else resolve('successfully uploaded to swarm');
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
import $ from 'jquery';
|
||||
import AlreadyDeployedToken from 'Embark/contracts/AlreadyDeployedToken';
|
||||
import AnotherStorage from 'Embark/contracts/AnotherStorage';
|
||||
import async from 'async';
|
||||
import MyToken from 'Embark/contracts/MyToken';
|
||||
import MyToken2 from 'Embark/contracts/MyToken2';
|
||||
import SimpleStorage from 'Embark/contracts/SimpleStorage';
|
||||
import Token from 'Embark/contracts/Token';
|
||||
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
|
|
Loading…
Reference in New Issue