mirror of https://github.com/embarklabs/embark.git
Merge pull request #609 from embark-framework/refactor_storage_component
Refactor storage component
This commit is contained in:
commit
54f8953430
34
js/embark.js
34
js/embark.js
|
@ -1,3 +1,5 @@
|
||||||
|
import {detectSeries} from 'async';
|
||||||
|
|
||||||
var EmbarkJS = {
|
var EmbarkJS = {
|
||||||
onReady: function (cb) {
|
onReady: function (cb) {
|
||||||
if (typeof (__embarkContext) === 'undefined') {
|
if (typeof (__embarkContext) === 'undefined') {
|
||||||
|
@ -300,6 +302,38 @@ EmbarkJS.Storage.isAvailable = function () {
|
||||||
return this.currentStorage.isAvailable();
|
return this.currentStorage.isAvailable();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
EmbarkJS.Storage.setProviders = async function (dappConnOptions) {
|
||||||
|
try {
|
||||||
|
await detectSeries(dappConnOptions, async (dappConn, callback) => {
|
||||||
|
if(dappConn === '$BZZ' || dappConn.provider === 'swarm'){
|
||||||
|
let options = dappConn;
|
||||||
|
if(dappConn === '$BZZ') options = {"useOnlyGivenProvider": true};
|
||||||
|
try{
|
||||||
|
await EmbarkJS.Storage.setProvider('swarm', options);
|
||||||
|
let isAvailable = await EmbarkJS.Storage.isAvailable();
|
||||||
|
callback(null, isAvailable);
|
||||||
|
}catch(err){
|
||||||
|
callback(null, false); // catch errors for when bzz object not initialised but config has requested it to be used
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if(dappConn.provider === 'ipfs') {
|
||||||
|
// set the provider then check the connection, if true, use that provider, else, check next provider
|
||||||
|
try{
|
||||||
|
await EmbarkJS.Storage.setProvider('ipfs', dappConn);
|
||||||
|
let isAvailable = await EmbarkJS.Storage.isAvailable();
|
||||||
|
callback(null, isAvailable);
|
||||||
|
} catch(err) {
|
||||||
|
callback(null, false); // catch but keep looping by not passing err to callback
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, function(err, result){
|
||||||
|
if(!result) throw new Error('Could not connect to a storage provider using any of the dappConnections in the storage config');
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error('Failed to connect to a storage provider: ' + err.message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
EmbarkJS.Messages = {};
|
EmbarkJS.Messages = {};
|
||||||
|
|
||||||
EmbarkJS.Messages.Providers = {};
|
EmbarkJS.Messages.Providers = {};
|
||||||
|
|
|
@ -253,17 +253,9 @@ class Engine {
|
||||||
}
|
}
|
||||||
|
|
||||||
storageService(_options) {
|
storageService(_options) {
|
||||||
this.registerModule('storage', {
|
this.registerModule('storage', {plugins: this.plugins});
|
||||||
storageConfig: this.config.storageConfig,
|
this.registerModule('ipfs');
|
||||||
webServerConfig: this.config.webServerConfig,
|
this.registerModule('swarm');
|
||||||
blockchainConfig: this.config.blockchainConfig,
|
|
||||||
host: _options.host,
|
|
||||||
port: _options.port,
|
|
||||||
servicesMonitor: this.servicesMonitor,
|
|
||||||
events: this.events,
|
|
||||||
logger: this.logger,
|
|
||||||
context: this.context
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
web3Service(options) {
|
web3Service(options) {
|
||||||
|
|
|
@ -24,7 +24,7 @@ Logger.prototype.error = function () {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.events.emit("log", "error", ...arguments);
|
this.events.emit("log", "error", ...arguments);
|
||||||
this.logFunction(...Array.from(arguments).map(t => t.red));
|
this.logFunction(...Array.from(arguments).map(t => { return t ? t.red : t; }));
|
||||||
this.writeToFile("[error]: ", ...arguments);
|
this.writeToFile("[error]: ", ...arguments);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ Logger.prototype.warn = function () {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.events.emit("log", "warning", ...arguments);
|
this.events.emit("log", "warning", ...arguments);
|
||||||
this.logFunction(...Array.from(arguments).map(t => t.yellow));
|
this.logFunction(...Array.from(arguments).map(t => { return t ? t.yellow : t; }));
|
||||||
this.writeToFile("[warning]: ", ...arguments);
|
this.writeToFile("[warning]: ", ...arguments);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ Logger.prototype.info = function () {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.events.emit("log", "info", ...arguments);
|
this.events.emit("log", "info", ...arguments);
|
||||||
this.logFunction(...Array.from(arguments).map(t => t.green));
|
this.logFunction(...Array.from(arguments).map(t => { return t ? t.green : t; }));
|
||||||
this.writeToFile("[info]: ", ...arguments);
|
this.writeToFile("[info]: ", ...arguments);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
23
lib/index.js
23
lib/index.js
|
@ -309,7 +309,6 @@ class Embark {
|
||||||
|
|
||||||
let platform = engine.config.storageConfig.upload.provider;
|
let platform = engine.config.storageConfig.upload.provider;
|
||||||
|
|
||||||
let cmdPlugin;
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
|
||||||
function startServices(callback) {
|
function startServices(callback) {
|
||||||
|
@ -324,34 +323,16 @@ class Embark {
|
||||||
engine.startService("codeGenerator");
|
engine.startService("codeGenerator");
|
||||||
callback();
|
callback();
|
||||||
},
|
},
|
||||||
function setupStoragePlugin(callback) {
|
function listLoadedPlugin(callback) {
|
||||||
let pluginList = engine.plugins.listPlugins();
|
let pluginList = engine.plugins.listPlugins();
|
||||||
if (pluginList.length > 0) {
|
if (pluginList.length > 0) {
|
||||||
engine.logger.info(__("loaded plugins") + ": " + pluginList.join(", "));
|
engine.logger.info(__("loaded plugins") + ": " + pluginList.join(", "));
|
||||||
}
|
}
|
||||||
|
|
||||||
// check use has input existing storage plugin
|
|
||||||
let cmdPlugins = engine.plugins.getPluginsFor('uploadCmds');
|
|
||||||
|
|
||||||
if (cmdPlugins.length > 0) {
|
|
||||||
cmdPlugin = cmdPlugins.find((pluginCmd) => {
|
|
||||||
return pluginCmd.uploadCmds.some(uploadCmd => {
|
|
||||||
return uploadCmd.cmd === platform;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (!cmdPlugin) {
|
|
||||||
return callback({message: __('platform "{{platform}}" is specified as the upload provider, however no plugins have registered an upload command for "{{platform}}".', {platform: platform})});
|
|
||||||
}
|
|
||||||
callback();
|
callback();
|
||||||
},
|
},
|
||||||
function deploy(callback) {
|
function deploy(callback) {
|
||||||
engine.events.on('outputDone', function () {
|
engine.events.on('outputDone', function () {
|
||||||
cmdPlugin.uploadCmds[0].cb()
|
engine.events.request("storage:upload", callback);
|
||||||
.then((success) => {
|
|
||||||
callback(null, success);
|
|
||||||
})
|
|
||||||
.catch(callback);
|
|
||||||
});
|
});
|
||||||
engine.events.on('check:backOnline:Ethereum', function () {
|
engine.events.on('check:backOnline:Ethereum', function () {
|
||||||
engine.logger.info(__('Ethereum node detected') + '..');
|
engine.logger.info(__('Ethereum node detected') + '..');
|
||||||
|
|
|
@ -2,43 +2,42 @@ const UploadIPFS = require('./upload.js');
|
||||||
const utils = require('../../utils/utils.js');
|
const utils = require('../../utils/utils.js');
|
||||||
const fs = require('../../core/fs.js');
|
const fs = require('../../core/fs.js');
|
||||||
const IpfsApi = require('ipfs-api');
|
const IpfsApi = require('ipfs-api');
|
||||||
const _ = require('underscore');
|
const StorageProcessesLauncher = require('../../processes/storageProcesses/storageProcessesLauncher');
|
||||||
|
|
||||||
class IPFS {
|
class IPFS {
|
||||||
|
|
||||||
constructor(embark, options) {
|
constructor(embark, options) {
|
||||||
|
const self = this;
|
||||||
this.logger = embark.logger;
|
this.logger = embark.logger;
|
||||||
this.events = embark.events;
|
this.events = embark.events;
|
||||||
this.buildDir = options.buildDir;
|
this.buildDir = options.buildDir;
|
||||||
this.storageConfig = embark.config.storageConfig;
|
this.storageConfig = embark.config.storageConfig;
|
||||||
this.host = options.host || this.storageConfig.upload.host;
|
|
||||||
this.port = options.port || this.storageConfig.upload.port;
|
|
||||||
this.protocol = options.protocol || this.storageConfig.upload.protocol;
|
|
||||||
this.embark = embark;
|
this.embark = embark;
|
||||||
}
|
|
||||||
|
|
||||||
commandlineDeploy() {
|
this.webServerConfig = embark.config.webServerConfig;
|
||||||
let upload_ipfs = new UploadIPFS({
|
this.blockchainConfig = embark.config.blockchainConfig;
|
||||||
buildDir: this.buildDir || 'dist/',
|
|
||||||
storageConfig: this.storageConfig.upload,
|
if (!this.isIpfsEnabledInTheConfig()) {
|
||||||
configIpfsBin: this.storageConfig.ipfs_bin || "ipfs"
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setServiceCheck();
|
||||||
|
this.addProviderToEmbarkJS();
|
||||||
|
this.addObjectToConsole();
|
||||||
|
this.registerUploadCommand();
|
||||||
|
|
||||||
|
this._checkService((err) => {
|
||||||
|
if (!err) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.logger.info("IPFS node not found, attempting to start own node");
|
||||||
|
self.startProcess(() => {});
|
||||||
});
|
});
|
||||||
|
|
||||||
this.embark.registerUploadCommand('ipfs', upload_ipfs.deploy.bind(upload_ipfs));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setServiceCheck() {
|
setServiceCheck() {
|
||||||
let self = this;
|
let self = this;
|
||||||
|
|
||||||
let storageConfig = this.storageConfig;
|
|
||||||
|
|
||||||
if (!storageConfig.enabled) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (_.findWhere(this.storageConfig.dappConnection, {'provider': 'ipfs'}) === undefined && (storageConfig.upload.provider !== 'ipfs' || storageConfig.available_providers.indexOf("ipfs") < 0)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.events.on('check:backOnline:IPFS', function () {
|
self.events.on('check:backOnline:IPFS', function () {
|
||||||
self.logger.info(__('IPFS node detected') + '..');
|
self.logger.info(__('IPFS node detected') + '..');
|
||||||
});
|
});
|
||||||
|
@ -48,14 +47,7 @@ class IPFS {
|
||||||
});
|
});
|
||||||
|
|
||||||
self.events.request("services:register", 'IPFS', function (cb) {
|
self.events.request("services:register", 'IPFS', function (cb) {
|
||||||
let url = (self.protocol || 'http') + '://' + self.host + ':' + self.port + '/api/v0/version';
|
self._checkService((err, body) => {
|
||||||
self.logger.trace(`Checking IPFS version on ${url}...`);
|
|
||||||
if(self.protocol !== 'https'){
|
|
||||||
utils.httpGetJson(url, versionCb);
|
|
||||||
} else {
|
|
||||||
utils.httpsGetJson(url, versionCb);
|
|
||||||
}
|
|
||||||
function versionCb(err, body) {
|
|
||||||
if (err) {
|
if (err) {
|
||||||
self.logger.trace("IPFS unavailable");
|
self.logger.trace("IPFS unavailable");
|
||||||
return cb({name: "IPFS ", status: 'off'});
|
return cb({name: "IPFS ", status: 'off'});
|
||||||
|
@ -66,20 +58,29 @@ class IPFS {
|
||||||
}
|
}
|
||||||
self.logger.trace("IPFS available");
|
self.logger.trace("IPFS available");
|
||||||
return cb({name: "IPFS ", status: 'on'});
|
return cb({name: "IPFS ", status: 'on'});
|
||||||
}
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_getNodeUrl() {
|
||||||
|
if (this.storageConfig.upload.provider === 'ipfs') {
|
||||||
|
return utils.buildUrlFromConfig(this.storageConfig.upload) + '/api/v0/version';
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let connection of this.storageConfig.dappConnection) {
|
||||||
|
if (connection.provider === 'ipfs') {
|
||||||
|
return utils.buildUrlFromConfig(connection) + '/api/v0/version';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkService(cb) {
|
||||||
|
let url = this._getNodeUrl();
|
||||||
|
utils.getJson(url, cb);
|
||||||
|
}
|
||||||
|
|
||||||
addProviderToEmbarkJS() {
|
addProviderToEmbarkJS() {
|
||||||
const self = this;
|
const self = this;
|
||||||
// TODO: make this a shouldAdd condition
|
|
||||||
if (this.storageConfig === {}) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.storageConfig.available_providers.indexOf('ipfs') < 0 || _.findWhere(this.storageConfig.dappConnection, {'provider': 'ipfs'}) === undefined || this.storageConfig.enabled !== true) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.events.request("version:get:ipfs-api", function(ipfsApiVersion) {
|
self.events.request("version:get:ipfs-api", function(ipfsApiVersion) {
|
||||||
let currentIpfsApiVersion = require('../../../package.json').dependencies["ipfs-api"];
|
let currentIpfsApiVersion = require('../../../package.json').dependencies["ipfs-api"];
|
||||||
|
@ -102,6 +103,37 @@ class IPFS {
|
||||||
this.events.emit("runcode:register", "ipfs", ipfs);
|
this.events.emit("runcode:register", "ipfs", ipfs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
startProcess(callback) {
|
||||||
|
let self = this;
|
||||||
|
const storageProcessesLauncher = new StorageProcessesLauncher({
|
||||||
|
logger: self.logger,
|
||||||
|
events: self.events,
|
||||||
|
storageConfig: self.storageConfig,
|
||||||
|
webServerConfig: self.webServerConfig,
|
||||||
|
blockchainConfig: self.blockchainConfig
|
||||||
|
});
|
||||||
|
self.logger.trace(`Storage module: Launching ipfs process...`);
|
||||||
|
return storageProcessesLauncher.launchProcess('ipfs', callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
registerUploadCommand() {
|
||||||
|
const self = this;
|
||||||
|
this.embark.registerUploadCommand('ipfs', (cb) => {
|
||||||
|
let upload_ipfs = new UploadIPFS({
|
||||||
|
buildDir: self.buildDir || 'dist/',
|
||||||
|
storageConfig: self.storageConfig,
|
||||||
|
configIpfsBin: self.storageConfig.ipfs_bin || "ipfs"
|
||||||
|
});
|
||||||
|
|
||||||
|
upload_ipfs.deploy(cb);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
isIpfsEnabledInTheConfig() {
|
||||||
|
let {enabled, available_providers, dappConnection} = this.storageConfig;
|
||||||
|
return enabled && (available_providers.indexOf('ipfs') > 0 || dappConnection.find(c => c.provider === 'ipfs'));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = IPFS;
|
module.exports = IPFS;
|
||||||
|
|
|
@ -11,51 +11,49 @@ class IPFS {
|
||||||
this.configIpfsBin = this.storageConfig.ipfs_bin || "ipfs";
|
this.configIpfsBin = this.storageConfig.ipfs_bin || "ipfs";
|
||||||
}
|
}
|
||||||
|
|
||||||
deploy() {
|
deploy(cb) {
|
||||||
return new Promise((resolve, reject) => {
|
console.log("deploying to ipfs!");
|
||||||
console.log("deploying to ipfs!");
|
let self = this;
|
||||||
let self = this;
|
async.waterfall([
|
||||||
async.waterfall([
|
function findBinary(callback) {
|
||||||
function findBinary(callback) {
|
let ipfs_bin = shelljs.which(self.configIpfsBin);
|
||||||
let ipfs_bin = shelljs.which(self.configIpfsBin);
|
|
||||||
|
|
||||||
if (ipfs_bin === 'ipfs not found' || !ipfs_bin) {
|
if (ipfs_bin === 'ipfs not found' || !ipfs_bin) {
|
||||||
console.log(('=== WARNING: ' + self.configIpfsBin + ' ' + __('not found or not in the path. Guessing %s for path', '~/go/bin/ipfs')).yellow);
|
console.log(('=== WARNING: ' + self.configIpfsBin + ' ' + __('not found or not in the path. Guessing %s for path', '~/go/bin/ipfs')).yellow);
|
||||||
ipfs_bin = "~/go/bin/ipfs";
|
ipfs_bin = "~/go/bin/ipfs";
|
||||||
}
|
|
||||||
|
|
||||||
callback(null, ipfs_bin);
|
|
||||||
},
|
|
||||||
function runCommand(ipfs_bin, callback) {
|
|
||||||
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
|
|
||||||
console.log(("=== " + __("adding %s to ipfs", self.buildDir)).green);
|
|
||||||
console.debug(cmd);
|
|
||||||
shelljs.exec(cmd, {silent:true}, function(code, stdout, stderr){ // {silent:true}: don't echo cmd output so it can be controlled via logLevel
|
|
||||||
console.log(stdout.green);
|
|
||||||
callback(stderr, stdout);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
function getHashFromOutput(result, callback) {
|
|
||||||
let rows = result.split("\n");
|
|
||||||
let dir_row = rows[rows.length - 2];
|
|
||||||
let dir_hash = dir_row.split(" ")[1];
|
|
||||||
|
|
||||||
callback(null, dir_hash);
|
|
||||||
},
|
|
||||||
function printUrls(dir_hash, callback) {
|
|
||||||
console.log(("=== " + __("DApp available at") + " http://localhost:8080/ipfs/" + dir_hash + "/").green);
|
|
||||||
console.log(("=== " + __("DApp available at") + " http://ipfs.infura.io/ipfs/" + dir_hash + "/").green);
|
|
||||||
|
|
||||||
callback();
|
|
||||||
}
|
}
|
||||||
], function (err, _result) {
|
|
||||||
if (err) {
|
callback(null, ipfs_bin);
|
||||||
console.log(__("error uploading to ipfs").red);
|
},
|
||||||
console.log(err);
|
function runCommand(ipfs_bin, callback) {
|
||||||
reject(err);
|
let cmd = `"${ipfs_bin}" add -r ${self.buildDir}`;
|
||||||
}
|
console.log(("=== " + __("adding %s to ipfs", self.buildDir)).green);
|
||||||
else resolve(__('successfully uploaded to ipfs'));
|
console.debug(cmd);
|
||||||
});
|
shelljs.exec(cmd, {silent:true}, function(code, stdout, stderr){ // {silent:true}: don't echo cmd output so it can be controlled via logLevel
|
||||||
|
console.log(stdout.green);
|
||||||
|
callback(stderr, stdout);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function getHashFromOutput(result, callback) {
|
||||||
|
let rows = result.split("\n");
|
||||||
|
let dir_row = rows[rows.length - 2];
|
||||||
|
let dir_hash = dir_row.split(" ")[1];
|
||||||
|
|
||||||
|
callback(null, dir_hash);
|
||||||
|
},
|
||||||
|
function printUrls(dir_hash, callback) {
|
||||||
|
console.log(("=== " + __("DApp available at") + " http://localhost:8080/ipfs/" + dir_hash + "/").green);
|
||||||
|
console.log(("=== " + __("DApp available at") + " http://ipfs.infura.io/ipfs/" + dir_hash + "/").green);
|
||||||
|
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
], function (err, _result) {
|
||||||
|
if (err) {
|
||||||
|
console.log(__("error uploading to ipfs").red);
|
||||||
|
console.log(err);
|
||||||
|
cb(err);
|
||||||
|
}
|
||||||
|
else cb(null, __('successfully uploaded to ipfs'));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
/* global EmbarkJS */
|
|
||||||
|
|
||||||
import {detectSeries} from 'async';
|
|
||||||
|
|
||||||
let __embarkStorage = {};
|
|
||||||
|
|
||||||
__embarkStorage.setProviders = async function (dappConnOptions) {
|
|
||||||
try {
|
|
||||||
await detectSeries(dappConnOptions, async (dappConn, callback) => {
|
|
||||||
if(dappConn === '$BZZ' || dappConn.provider === 'swarm'){
|
|
||||||
let options = dappConn;
|
|
||||||
if(dappConn === '$BZZ') options = {"useOnlyGivenProvider": true};
|
|
||||||
try{
|
|
||||||
await EmbarkJS.Storage.setProvider('swarm', options);
|
|
||||||
let isAvailable = await EmbarkJS.Storage.isAvailable();
|
|
||||||
callback(null, isAvailable);
|
|
||||||
}catch(err){
|
|
||||||
callback(null, false); // catch errors for when bzz object not initialised but config has requested it to be used
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if(dappConn.provider === 'ipfs') {
|
|
||||||
// set the provider then check the connection, if true, use that provider, else, check next provider
|
|
||||||
try{
|
|
||||||
await EmbarkJS.Storage.setProvider('ipfs', dappConn);
|
|
||||||
let isAvailable = await EmbarkJS.Storage.isAvailable();
|
|
||||||
callback(null, isAvailable);
|
|
||||||
} catch(err) {
|
|
||||||
callback(null, false); // catch but keep looping by not passing err to callback
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, function(err, result){
|
|
||||||
if(!result) throw new Error('Could not connect to a storage provider using any of the dappConnections in the storage config');
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
throw new Error('Failed to connect to a storage provider: ' + err.message);
|
|
||||||
}
|
|
||||||
};
|
|
|
@ -1,226 +1,42 @@
|
||||||
|
|
||||||
const utils = require('../../utils/utils.js');
|
|
||||||
const fs = require('../../core/fs.js');
|
|
||||||
const _ = require('underscore');
|
|
||||||
const async = require('async');
|
|
||||||
const StorageProcessesLauncher = require('../../processes/storageProcesses/storageProcessesLauncher');
|
|
||||||
const constants = require('../../constants');
|
|
||||||
|
|
||||||
class Storage {
|
class Storage {
|
||||||
constructor(embark, options){
|
constructor(embark, options){
|
||||||
this._embark = embark;
|
this.embark = embark;
|
||||||
this._options = options;
|
this.storageConfig = embark.config.storageConfig;
|
||||||
this._storageConfig = options.storageConfig;
|
this.plugins = options.plugins;
|
||||||
this._webServerConfig = options.webServerConfig;
|
|
||||||
this._blockchainConfig = options.blockchainConfig;
|
|
||||||
this._servicesMonitor = options.servicesMonitor;
|
|
||||||
this._events = options.events;
|
|
||||||
this._logger = options.logger;
|
|
||||||
|
|
||||||
if(!this._storageConfig.enabled) return;
|
if (!this.storageConfig.enabled) return;
|
||||||
|
|
||||||
// filter list of dapp connections based on available_providers set in config
|
this.handleUploadCommand();
|
||||||
let hasSwarm = _.contains(this._storageConfig.available_providers, 'swarm'); // don't need to eval this in every loop iteration
|
|
||||||
// contains valid dapp storage providers
|
|
||||||
this._validDappProviders = _.filter(this._storageConfig.dappConnection, (conn) => {
|
|
||||||
return _.contains(this._storageConfig.available_providers, conn.provider) || (conn === '$BZZ' && hasSwarm);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.initStorageForEmbark();
|
|
||||||
this.initStorageForDapp();
|
|
||||||
|
|
||||||
// don't start storage processes on build command, only on upload or run
|
|
||||||
if(_.contains(options.context, constants.contexts.upload) || _.contains(options.context, constants.contexts.run)){
|
|
||||||
this.startStorageProcesses();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_checkStorageEndpoint(platform, callback) {
|
|
||||||
let checkFn;
|
|
||||||
let self = this;
|
|
||||||
self._logger.trace(`Storage module: Checking ${platform} availability...`);
|
|
||||||
_.find(self._servicesMonitor.checkList, (value, key) => {
|
|
||||||
if(key.toLowerCase() === platform.toLowerCase()){
|
|
||||||
checkFn = value;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!checkFn || typeof checkFn.fn !== 'function') {
|
|
||||||
self._logger.trace(`Storage module: Check for ${platform} node does not exist.`);
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
|
|
||||||
checkFn.fn(function (serviceCheckResult) {
|
|
||||||
if (!serviceCheckResult.status || serviceCheckResult.status === 'off') {
|
|
||||||
self._logger.trace(`Storage module: ${platform} node not available.`);
|
|
||||||
return callback('No node');
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_startStorageNode(platform, callback) {
|
|
||||||
let self = this;
|
|
||||||
const storageProcessesLauncher = new StorageProcessesLauncher({
|
|
||||||
logger: self._logger,
|
|
||||||
events: self._events,
|
|
||||||
storageConfig: self._storageConfig,
|
|
||||||
webServerConfig: self._webServerConfig,
|
|
||||||
blockchainConfig: self._blockchainConfig
|
|
||||||
});
|
|
||||||
self._logger.trace(`Storage module: Launching ${platform} process...`);
|
|
||||||
return storageProcessesLauncher.launchProcess(platform.toLowerCase(), (err) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initializes a storage provider for Embark upload
|
|
||||||
initStorageForEmbark(){
|
|
||||||
let storageProviderCls = require(`../${this._storageConfig.upload.provider}/index.js`);
|
|
||||||
let uploadProvider = new storageProviderCls(this._embark, this._options); /*eslint no-new: "off"*/
|
|
||||||
|
|
||||||
if(typeof uploadProvider.commandlineDeploy == 'function') uploadProvider.commandlineDeploy();
|
|
||||||
if(typeof uploadProvider.setServiceCheck == 'function') uploadProvider.setServiceCheck();
|
|
||||||
if(typeof uploadProvider.addObjectToConsole == 'function') uploadProvider.addObjectToConsole();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initializes a storage provider for EmbarkJS
|
|
||||||
*
|
|
||||||
* @return {void}
|
|
||||||
*/
|
|
||||||
initStorageForDapp(){
|
|
||||||
// now we need to add instantiate any dappConnection/available_providers storage providers to add
|
|
||||||
// their provider code to embarkjs
|
|
||||||
this._validDappProviders.forEach(dappConn => {
|
|
||||||
if(!dappConn.provider) return;
|
|
||||||
let storageProviderCls = require(`../${dappConn.provider}/index.js`);
|
|
||||||
|
|
||||||
// override options with dappConnection settings
|
|
||||||
let storageOptions = this._options;
|
|
||||||
storageOptions.protocol = dappConn.protocol;
|
|
||||||
storageOptions.host = dappConn.host;
|
|
||||||
storageOptions.port = dappConn.port;
|
|
||||||
|
|
||||||
// then instantiate the storage provdier class
|
|
||||||
let storageProvider = new storageProviderCls(this._embark, storageOptions); /*eslint no-new: "off"*/
|
|
||||||
|
|
||||||
// register the service check so we can use it to check if the process is running before spawning it
|
|
||||||
// check that it hasn't already been done above
|
|
||||||
if(dappConn.provider !== this._storageConfig.upload.provider){
|
|
||||||
if(typeof storageProvider.setServiceCheck == 'function') storageProvider.setServiceCheck();
|
|
||||||
}
|
|
||||||
|
|
||||||
// add __embarkSwarm and __embarkIPFS objects to EmbarkJS
|
|
||||||
if(typeof storageProvider.addProviderToEmbarkJS == 'function') storageProvider.addProviderToEmbarkJS();
|
|
||||||
});
|
|
||||||
|
|
||||||
// add the storage provider code (__embarkStorage) to embarkjs
|
|
||||||
this.addProviderToEmbarkJS();
|
|
||||||
|
|
||||||
// add the code to call setProviders in embarkjs after embark is ready
|
|
||||||
this.addSetProviders();
|
this.addSetProviders();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
handleUploadCommand() {
|
||||||
* Adds the storage provider code (__embarkStorage) to embarkjs
|
const self = this;
|
||||||
*
|
this.embark.events.setCommandHandler('storage:upload', (cb) => {
|
||||||
* @returns {void}
|
let platform = self.storageConfig.upload.provider;
|
||||||
*/
|
|
||||||
addProviderToEmbarkJS(){
|
|
||||||
// TODO: make this a shouldAdd condition
|
|
||||||
if (this._storageConfig === {} || !this._storageConfig.dappConnection || !this._storageConfig.dappConnection.length) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let code = "\n" + fs.readFileSync(utils.joinPath(__dirname, 'embarkjs.js')).toString();
|
let uploadCmds = self.plugins.getPluginsProperty('uploadCmds', 'uploadCmds');
|
||||||
|
for (let uploadCmd of uploadCmds) {
|
||||||
|
if (uploadCmd.cmd === platform) {
|
||||||
|
return uploadCmd.cb.call(uploadCmd.cb, cb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
this._embark.addCodeToEmbarkJS(code);
|
cb({message: __('platform "{{platform}}" is specified as the upload provider, however no plugins have registered an upload command for "{{platform}}".', {platform: platform})});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds the code to call setProviders in embarkjs after embark is ready
|
|
||||||
*
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
addSetProviders() {
|
addSetProviders() {
|
||||||
|
let code = `\nEmbarkJS.Storage.setProviders(${JSON.stringify(this.storageConfig.dappConnection || [])});`;
|
||||||
|
|
||||||
let code = `\n__embarkStorage.setProviders(${JSON.stringify(this._validDappProviders)});`;
|
|
||||||
let shouldInit = (storageConfig) => {
|
let shouldInit = (storageConfig) => {
|
||||||
return (this._validDappProviders !== undefined && this._validDappProviders.length > 0 && storageConfig.enabled === true);
|
return storageConfig.enabled;
|
||||||
};
|
};
|
||||||
|
|
||||||
this._embark.addProviderInit('storage', code, shouldInit);
|
this.embark.addProviderInit('storage', code, shouldInit);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkStorageService(platform, url, callback) {
|
|
||||||
const self = this;
|
|
||||||
|
|
||||||
// start the upload storage node
|
|
||||||
self._checkStorageEndpoint(platform, function (err) {
|
|
||||||
if (!err) {
|
|
||||||
return callback(null);
|
|
||||||
}
|
|
||||||
self._startStorageNode(platform, (err) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
// Check endpoint again to see if really did start
|
|
||||||
self._checkStorageEndpoint(platform, (err) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
callback(null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
startStorageProcesses(){
|
|
||||||
let platform = this._storageConfig.upload.provider;
|
|
||||||
let self = this;
|
|
||||||
let withErrors = false;
|
|
||||||
|
|
||||||
async.waterfall([
|
|
||||||
function _checkStorageService(callback){
|
|
||||||
self.checkStorageService(platform, utils.buildUrlFromConfig(self._storageConfig.upload), (err) => {
|
|
||||||
// log error and continue
|
|
||||||
if(err){
|
|
||||||
self._logger.error(err);
|
|
||||||
withErrors = true;
|
|
||||||
}
|
|
||||||
callback(null);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
function checkDappConnectionStorageService(callback){
|
|
||||||
// start any dappConnection storage nodes
|
|
||||||
async.each(self._validDappProviders, function(dappConn, cb) {
|
|
||||||
if(!dappConn.provider || dappConn.provider === platform) {
|
|
||||||
return cb(null);
|
|
||||||
} // don't start the process we've just started above
|
|
||||||
|
|
||||||
self.checkStorageService(dappConn.provider, utils.buildUrlFromConfig(dappConn), (err) => {
|
|
||||||
// log error and continue
|
|
||||||
if(err){
|
|
||||||
self._logger.error(err);
|
|
||||||
withErrors = true;
|
|
||||||
}
|
|
||||||
cb(null);
|
|
||||||
});
|
|
||||||
}, callback);
|
|
||||||
}
|
|
||||||
], function (){
|
|
||||||
let strComplete = __('Finished starting all storage providers');
|
|
||||||
if(withErrors){
|
|
||||||
strComplete += ', ' + __('with errors.');
|
|
||||||
return self._logger.warn(strComplete);
|
|
||||||
}
|
|
||||||
self._logger.info(strComplete + '.');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Storage;
|
module.exports = Storage;
|
||||||
|
|
|
@ -2,49 +2,49 @@ const UploadSwarm = require('./upload.js');
|
||||||
const utils = require('../../utils/utils.js');
|
const utils = require('../../utils/utils.js');
|
||||||
const fs = require('../../core/fs.js');
|
const fs = require('../../core/fs.js');
|
||||||
const Web3Bzz = require('web3-bzz');
|
const Web3Bzz = require('web3-bzz');
|
||||||
const _ = require('underscore');
|
const StorageProcessesLauncher = require('../../processes/storageProcesses/storageProcessesLauncher');
|
||||||
|
|
||||||
class Swarm {
|
class Swarm {
|
||||||
|
|
||||||
constructor(embark, options) {
|
constructor(embark, _options) {
|
||||||
this.logger = embark.logger;
|
this.logger = embark.logger;
|
||||||
this.events = embark.events;
|
this.events = embark.events;
|
||||||
this.buildDir = options.buildDir;
|
this.buildDir = embark.config.buildDir;
|
||||||
this.storageConfig = embark.config.storageConfig;
|
this.storageConfig = embark.config.storageConfig;
|
||||||
this.host = options.host || this.storageConfig.host;
|
this.host = this.storageConfig.host;
|
||||||
this.port = options.port || this.storageConfig.port;
|
this.port = this.storageConfig.port;
|
||||||
this.embark = embark;
|
this.embark = embark;
|
||||||
|
|
||||||
this.providerUrl = utils.buildUrl(options.protocol || options.storageConfig.upload.protocol, options.host || options.storageConfig.upload.host, options.port || options.storageConfig.upload.port);
|
this.webServerConfig = embark.config.webServerConfig;
|
||||||
|
this.blockchainConfig = embark.config.blockchainConfig;
|
||||||
|
|
||||||
this.getUrl = options.storageConfig.upload.getUrl || this.providerUrl + '/bzz:/';
|
this.providerUrl = utils.buildUrl(this.storageConfig.upload.protocol, this.storageConfig.upload.host, this.storageConfig.upload.port);
|
||||||
|
|
||||||
|
this.getUrl = this.storageConfig.upload.getUrl || this.providerUrl + '/bzz:/';
|
||||||
|
|
||||||
|
if (!this.isSwarmEnabledInTheConfig()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
this.bzz = new Web3Bzz(this.providerUrl);
|
this.bzz = new Web3Bzz(this.providerUrl);
|
||||||
}
|
|
||||||
|
|
||||||
commandlineDeploy() {
|
this.setServiceCheck();
|
||||||
this.upload_swarm = new UploadSwarm({
|
this.addProviderToEmbarkJS();
|
||||||
buildDir: this.buildDir || 'dist/',
|
this.startProcess(() => {});
|
||||||
storageConfig: this.storageConfig,
|
this.registerUploadCommand();
|
||||||
getUrl: this.getUrl,
|
|
||||||
bzz: this.bzz
|
this._checkService((err) => {
|
||||||
|
if (!err) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.logger.info("Swarm node not found, attempting to start own node");
|
||||||
|
self.startProcess(() => {});
|
||||||
});
|
});
|
||||||
|
|
||||||
this.embark.registerUploadCommand('swarm', this.upload_swarm.deploy.bind(this.upload_swarm));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setServiceCheck() {
|
setServiceCheck() {
|
||||||
let self = this;
|
let self = this;
|
||||||
|
|
||||||
let storageConfig = this.storageConfig;
|
|
||||||
|
|
||||||
if (!storageConfig.enabled) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (_.findWhere(this.storageConfig.dappConnection, {'provider': 'swarm'}) === undefined && (storageConfig.upload.provider !== 'swarm' || storageConfig.available_providers.indexOf("swarm") < 0)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.events.on('check:backOnline:Swarm', function () {
|
this.events.on('check:backOnline:Swarm', function () {
|
||||||
self.logger.info(__('Swarm node detected...'));
|
self.logger.info(__('Swarm node detected...'));
|
||||||
});
|
});
|
||||||
|
@ -55,33 +55,63 @@ class Swarm {
|
||||||
|
|
||||||
self.events.request("services:register", 'Swarm', function(cb){
|
self.events.request("services:register", 'Swarm', function(cb){
|
||||||
self.logger.trace(`Checking Swarm availability on ${self.bzz.currentProvider}...`);
|
self.logger.trace(`Checking Swarm availability on ${self.bzz.currentProvider}...`);
|
||||||
self.bzz.isAvailable().then(result => {
|
self._checkService((err, result) => {
|
||||||
self.logger.trace("Swarm " + (result ? '':'un') + "available");
|
if (err) {
|
||||||
|
self.logger.trace("Check Swarm availability error: " + err);
|
||||||
|
return cb({name: "Swarm ", status: 'off'});
|
||||||
|
}
|
||||||
|
self.logger.trace("Swarm " + (result ? '':'on') + "available");
|
||||||
return cb({name: "Swarm ", status: result ? 'on':'off'});
|
return cb({name: "Swarm ", status: result ? 'on':'off'});
|
||||||
}).catch(err => {
|
|
||||||
self.logger.trace("Check Swarm availability error: " + err);
|
|
||||||
return cb({name: "Swarm ", status: 'off'});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_checkService(cb) {
|
||||||
|
this.bzz.isAvailable().then(result => {
|
||||||
|
cb(null, result);
|
||||||
|
}).catch(cb);
|
||||||
|
}
|
||||||
|
|
||||||
addProviderToEmbarkJS() {
|
addProviderToEmbarkJS() {
|
||||||
// TODO: make this a shouldAdd condition
|
|
||||||
if (this.storageConfig === {}) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.storageConfig.available_providers.indexOf('swarm') < 0 || _.findWhere(this.storageConfig.dappConnection, {'provider': 'swarm'}) === undefined || this.storageConfig.enabled !== true) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let code = "";
|
let code = "";
|
||||||
code += "\n" + fs.readFileSync(utils.joinPath(__dirname, 'embarkjs.js')).toString();
|
code += "\n" + fs.readFileSync(utils.joinPath(__dirname, 'embarkjs.js')).toString();
|
||||||
code += "\nEmbarkJS.Storage.registerProvider('swarm', __embarkSwarm);";
|
code += "\nEmbarkJS.Storage.registerProvider('swarm', __embarkSwarm);";
|
||||||
|
|
||||||
this.embark.addCodeToEmbarkJS(code);
|
this.embark.addCodeToEmbarkJS(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
startProcess(callback) {
|
||||||
|
let self = this;
|
||||||
|
const storageProcessesLauncher = new StorageProcessesLauncher({
|
||||||
|
logger: self.logger,
|
||||||
|
events: self.events,
|
||||||
|
storageConfig: self.storageConfig,
|
||||||
|
webServerConfig: self.webServerConfig,
|
||||||
|
blockchainConfig: self.blockchainConfig
|
||||||
|
});
|
||||||
|
self.logger.trace(`Storage module: Launching swarm process...`);
|
||||||
|
return storageProcessesLauncher.launchProcess('swarm', callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
registerUploadCommand(cb) {
|
||||||
|
const self = this;
|
||||||
|
this.embark.registerUploadCommand('swarm', () => {
|
||||||
|
let upload_swarm = new UploadSwarm({
|
||||||
|
buildDir: self.buildDir || 'dist/',
|
||||||
|
storageConfig: self.storageConfig,
|
||||||
|
getUrl: self.getUrl,
|
||||||
|
bzz: self.bzz
|
||||||
|
});
|
||||||
|
|
||||||
|
upload_swarm.deploy(cb);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
isSwarmEnabledInTheConfig() {
|
||||||
|
let {enabled, available_providers, dappConnection} = this.storageConfig;
|
||||||
|
return enabled && (available_providers.indexOf('swarm') > 0 || dappConnection.find(c => c.provider === 'swarm'));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Swarm;
|
module.exports = Swarm;
|
||||||
|
|
||||||
|
|
|
@ -6,45 +6,42 @@ class Swarm {
|
||||||
this.options = options;
|
this.options = options;
|
||||||
this.buildDir = options.buildDir || 'dist/';
|
this.buildDir = options.buildDir || 'dist/';
|
||||||
this.bzz = options.bzz;
|
this.bzz = options.bzz;
|
||||||
this.storageConfig = options.storageConfig;
|
|
||||||
this.getUrl = options.getUrl;
|
this.getUrl = options.getUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
deploy() {
|
deploy(cb) {
|
||||||
return new Promise((resolve, reject) => {
|
console.log(__("deploying to swarm!"));
|
||||||
console.log(__("deploying to swarm!"));
|
let self = this;
|
||||||
let self = this;
|
let bzz = this.bzz;
|
||||||
let bzz = this.bzz;
|
async.waterfall([
|
||||||
async.waterfall([
|
function runCommand(callback) {
|
||||||
function runCommand(callback) {
|
console.log(("=== " + __("adding %s to swarm", self.buildDir)).green);
|
||||||
console.log(("=== " + __("adding %s to swarm", self.buildDir)).green);
|
bzz.upload({
|
||||||
bzz.upload({
|
path: self.buildDir, // path to data / file / directory
|
||||||
path: self.buildDir, // path to data / file / directory
|
kind: "directory", // could also be "file" or "data"
|
||||||
kind: "directory", // could also be "file" or "data"
|
defaultFile: "index.html" // optional, and only for kind === "directory"
|
||||||
defaultFile: "index.html" // optional, and only for kind === "directory"
|
})
|
||||||
})
|
|
||||||
.then((success) => {
|
.then((success) => {
|
||||||
callback(null, success);
|
callback(null, success);
|
||||||
})
|
})
|
||||||
.catch(callback);
|
.catch(callback);
|
||||||
},
|
},
|
||||||
function printUrls(dir_hash, callback) {
|
function printUrls(dir_hash, callback) {
|
||||||
if (!dir_hash) {
|
if (!dir_hash) {
|
||||||
return callback('No directory hash was returned');
|
return callback('No directory hash was returned');
|
||||||
}
|
}
|
||||||
console.log(("=== " + __("DApp available at") + ` ${self.getUrl}${dir_hash}/`).green);
|
console.log(("=== " + __("DApp available at") + ` ${self.getUrl}${dir_hash}/`).green);
|
||||||
console.log(("=== " + __("DApp available at") + ` http://swarm-gateways.net/bzz:/${dir_hash}`).green);
|
console.log(("=== " + __("DApp available at") + ` http://swarm-gateways.net/bzz:/${dir_hash}`).green);
|
||||||
|
|
||||||
callback();
|
callback();
|
||||||
}
|
}
|
||||||
], function (err, _result) {
|
], function (err, _result) {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.log(__("error uploading to swarm").red);
|
console.log(__("error uploading to swarm").red);
|
||||||
console.log(err);
|
console.log(err);
|
||||||
return reject(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
resolve(__('successfully uploaded to swarm'));
|
cb(null, __('successfully uploaded to swarm'));
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,6 +78,13 @@ function httpsGetJson(url, callback) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getJson(url, cb) {
|
||||||
|
if (url.indexOf('https') === 0) {
|
||||||
|
return httpsGetJson(url, cb);
|
||||||
|
}
|
||||||
|
httpGetJson(url, cb);
|
||||||
|
}
|
||||||
|
|
||||||
function pingEndpoint(host, port, type, protocol, origin, callback) {
|
function pingEndpoint(host, port, type, protocol, origin, callback) {
|
||||||
const options = {
|
const options = {
|
||||||
protocolVersion: 13,
|
protocolVersion: 13,
|
||||||
|
@ -321,6 +328,7 @@ module.exports = {
|
||||||
httpsGet,
|
httpsGet,
|
||||||
httpGetJson,
|
httpGetJson,
|
||||||
httpsGetJson,
|
httpsGetJson,
|
||||||
|
getJson,
|
||||||
hexToNumber,
|
hexToNumber,
|
||||||
pingEndpoint,
|
pingEndpoint,
|
||||||
decodeParams,
|
decodeParams,
|
||||||
|
|
Loading…
Reference in New Issue