do a process for each file

This commit is contained in:
Jonathan Rainville 2018-05-10 10:43:10 -04:00
parent ed1d0ce67c
commit 31e82e31d0
3 changed files with 1949 additions and 1434 deletions

View File

@ -3,7 +3,6 @@ const async = require('async');
const child_process = require('child_process');
const utils = require('../utils/utils.js');
const constants = require('../constants');
const File = require('../core/file');
require("babel-preset-react");
require("babel-preset-es2015");
@ -53,12 +52,9 @@ class Pipeline {
}, next);
},
function assetFileWrite(next) {
// limit:1 due to issues when downloading required files such as web3.js
async.eachOfLimit(self.assetFiles, 1, function (files, targetFile, cb) {
// limit:1 due to issues when downloading required files such as web3.js
async.mapLimit(files, 1,
async.eachOf(self.assetFiles, function (files, targetFile, cb) {
async.map(files,
function (file, fileCb) {
file = new File(file); // Re-instantiate a File as through the process, we lose its prototype
self.logger.trace("reading " + file.filename);
if (file.filename.indexOf('.js') < 0) {
@ -92,16 +88,6 @@ class Pipeline {
});
},
function checkFile(next) {
fs.access('./.embark/' + file.filename, (err) => {
if (err) {
self.logger.error("couldn't find file: " + file.filename);
return next("couldn't find file: " + file.filename);
}
next();
});
},
function readFile(next) {
fs.readFile('./.embark/' + file.filename, (err, data) => {
if (err) {

3376
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -48,7 +48,6 @@
"ipfs-api": "17.2.4",
"live-plugin-manager": "https://github.com/iurimatias/live-plugin-manager.git",
"merge": "^1.2.0",
"mocha": "^2.2.5",
"orbit-db": "^0.17.3",
"parse-json": "^4.0.0",
"promptly": "^2.1.0",