Psi
Psi

Reputation: 474

Promise.all() with limit?

Is there a way/pattern to implement let res = Promise.all([...p], limit)?

Especially the last point creates my headaches.

My current solution is to split the promises-array into chunks of limit size and chain them. The disadvantages here is that the second bunch don't start until all Promises from bunch 1 has been resolved.

Upvotes: 10

Views: 7093

Answers (3)

youurayy
youurayy

Reputation: 1665

I ended up coding the following:

const parallelLimit = (asyncFuncs, limit) => {
  return new Promise((res, rej) => {
    let open = 0
    const rets = [], errs = []
    const exec = () => { while(ff.length && open < limit) ff.shift()() }
    const ff = asyncFuncs.map(f => {
      return () => {
        open++
        f()
          .then(r => rets.push(r))
          .catch(e => errs.push(e))
          .finally(_ => {
            open--
            exec()
            if(!ff.length && !open)
              return errs.length ? rej(errs) : res(rets)
        })
      }
    })
    exec()
  })
}

// test:

const t = Date.now()
const funcs = [...Array(10).keys()].map(idx => {
  return () => {
    return new Promise((res, rej) => {
      setTimeout(() => {
        console.log(`idx: ${idx} time: ${Date.now() - t}`)
        res(idx)
      }, 1000)
    })
  }
})

parallelLimit(funcs, 2)
  .then(res => console.log(`then ${res.join(',')}`))
  .catch(errs => console.log(`catch ${errs.join(',')}`))

Upvotes: 0

JLRishe
JLRishe

Reputation: 101690

The sequenceWithParallelism function in the bluebird-as extension appears to be designed for precisely the functionality you want. As written, it uses bluebird for its implementation, but I don't see anything bluebird-specific in the actual contents of the sequenceWithParallelism function, so you could just excerpt that function and use it in your own code, like:

sequenceWithParallelism(limit, f => f())([..p])
    .then(function () {

    });

Oddly, the function doesn't seem to be designed to resolve to the results of all of the promises, so if you need that, you'd probably need to make a few adjustments.

Upvotes: 1

Psi
Psi

Reputation: 474

I came up with the idea of creating n = limit chains which run in parallel and append as long as there are promises:

let promises = [];
for(let i=0; i<11; i++) promises[i] = () => {
  console.log('Construct:',i);
  return new Promise(resolve => {
    setTimeout(function() {
      console.log('Resolve:',i);
      resolve(i);
    }, Math.round(Math.random() * (2000 - 500) + 2000));
  });
}


function parallelLimit(promiseFactories, limit) {
  let result = [];
  let cnt = 0;

  function chain(promiseFactories) {
    if(!promiseFactories.length) return;
    let i = cnt++; // preserve order in result
    return promiseFactories.shift()().then((res) => {
      result[i] = res; // save result
      return chain(promiseFactories); // append next promise
    });
  }

  let arrChains = [];
  while(limit-- > 0 && promiseFactories.length > 0) {
    // create `limit` chains which run in parallel
    arrChains.push(chain(promiseFactories));
  }

  // return when all arrChains are finished
  return Promise.all(arrChains).then(() => result);
}


parallelLimit(promises, 4).then(console.log);

Excited to read your comments and suggestions :)

Upvotes: 8

Related Questions