Reputation: 12538
I'm a fan of the files object format
files: {
'dest/a.js': ['src/aa.js', 'src/aaa.js'], // key: value
'dest/a1.js': ['src/aa1.js', 'src/aaa1.js'],
}
I have a gulp task that concats source files like
gulp.task('cat', function() {
gulp.src( <value-goes-here> )
.
<many pipeline steps>
.
.pipe(concat(<key-goes-here>))
.pipe(gulp.dest('target/')
.
<more pipeline steps to be run on 'dest/a.js' and 'dest/a1.js'>
.
});
Is there a streaming way to extend this task so that I get 1 bundle file for each key-value in files
?
I would like to NOT create one task per key-value pair, as I would like to continue piping more steps even after the last .pipe(gulp.dest('target/');
If I'm approaching this problem in wrong way, is there a better way?
Thank you in advance!
Rob Rich's answer works, Heres working version :
var Q = require('q');
var gulp = require('gulp');
var concat = require('gulp-concat');
var files = {
'a.js': ['src/aa.js', 'src/aaa.js'],
'a1.js': ['src/aa1.js', 'src/aaa1.js'],
};
gulp.task('cat', function() {
var promises = Object.keys(files).map(function (key) {
var deferred = Q.defer();
var val = files[key];
console.log(val);
gulp.src(val)
.pipe(concat(key))
.pipe(gulp.dest('dest/'))
.on('end', function () {
deferred.resolve();
});
return deferred.promise;
});
return Q.all(promises);
});
Upvotes: 0
Views: 758
Reputation: 13205
Try this:
var Q = require('q');
gulp.task('cat', function() {
var promises = Object.keys(files).map(function (key) {
var deferred = Q.defer();
var val = files[key];
gulp.src(val)
.
<many pipeline steps>
.
.pipe(concat(key))
.pipe(gulp.dest('target/')
.
<more pipeline steps to be run on 'dest/a.js' and 'dest/a1.js'>
.
.on('end', function () {
deferred.resolve();
});
return deferred.promise;
});
return Q.all(promises);
});
You can also accomplish a similar scenario by using streams instead of promises by using combined-stream
or stream-combiner
packages.
Upvotes: 2