Reputation: 41
I'd like to know how I could resolve and go the next array value only after the first stream has ended, my current code just executes streams for all urls instead of doing it one by one
const downloadUrl = (url) => {
console.log(url);
console.log("Started :" + url.name);
const writer = fs.createWriteStream(url.name);
return new Promise(resolve => {
return axios({
url: url.url,
responseType: 'stream',
})
.then(response => {
const totalLength = response.headers['content-length'];
const stream = response.data;
let chunksDone = 0;
stream.on('data', (chunk) => {
const chunkLength = chunk.length;
chunksDone += chunkLength;
const percent = ((chunksDone / totalLength) * 100).toFixed(2);
printProgress(url.name, percent)
writer.write(new Buffer(chunk));
})
return streamToPromise(stream);
})
.then(() => {
debugger;
writer.end();
})
.catch(log);
})
}
const streamToPromise = (stream) => {
return new Promise(function (resolve, reject) {
stream.on("end", resolve);
stream.on("error", reject);
})
}
const urls = ['some.mp3','someother.mp3'];
const promise = urls.reduce(function (acc, item) {
return downloadUrl(item)
}, Promise.resolve);
Upvotes: 1
Views: 392
Reputation: 225174
return new Promise(resolve => {
return axios({
url: url.url,
responseType: 'stream',
})
…
}
This returns a promise that never resolves. Drop the new Promise
wrapper and return axios(…
directly.
const promise = urls.reduce(function (acc, item) {
return downloadUrl(item)
}, Promise.resolve);
This doesn’t wait for any promises. It looks like you were going for:
const promise = urls.reduce(function (acc, item) {
return acc.then(() => downloadUrl(item));
}, Promise.resolve());
Also, there shouldn’t be a need to copy chunk
with new Buffer
before writing it.
Upvotes: 1