Guerric P
Guerric P

Reputation: 31833

What are the benefits of using yield as asynchronous control flow?

Angular's Service worker uses yield for asynchronous control flow. Every generator function is then passed to this function:

var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
    return new (P || (P = Promise))(function (resolve, reject) {
        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
        step((generator = generator.apply(thisArg, _arguments || [])).next());
    });
};

Which is pretty similar to Babel's _asyncToGenerator

function _asyncToGenerator(fn) {
    return function () {
        var gen = fn.apply(this, arguments);
        return new Promise(function (resolve, reject) {
            function step(key, arg) {
                try {
                    var info = gen[key](arg);
                    var value = info.value;
                } catch (error) {
                    reject(error);
                    return;
                }
                if (info.done) {
                    resolve(value);
                } else {
                    return Promise.resolve(value).then(function (value) {
                        return step("next", value);
                    }, function (err) {
                        return step("throw", err);
                    });
                }
            }

            return step("next");
        });
    };
}

What are the benefits of using yield for asynchronous control flow? I saw absolutely no difference between yield and await in terms of browser support on caniuse.com.

In other terms

What's the point using this:

var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
    return new (P || (P = Promise))(function (resolve, reject) {
        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
        step((generator = generator.apply(thisArg, _arguments || [])).next());
    });
};

__awaiter(this, void 0, void 0, function* () {
    const foo = yield 3;
    const bar = yield new Promise(resolve => resolve(7));
    const baz = bar * foo;
    console.log(baz);
});

Instead of this:

(async function () {
    const foo = await 3;
    const bar = await new Promise(resolve => resolve('7'));
    const baz = bar * foo;
    console.log(baz);
})();

Given this:

Upvotes: 1

Views: 309

Answers (1)

Randy Casburn
Randy Casburn

Reputation: 14185

The difference comes down the the fact that Promises are meant to perform 1-off tasks, where generators are designed to repeat a task until the list of tasks has been exhausted. If the list never exhausts, then the generator will continue as if it were a non-time-based (setInterval()) iterator that can pause between operations.

This can been seen in the MDN example for a generator:

function* idMaker() {
  var index = 0;
  while (true)
    yield index++;
}

var gen = idMaker(); // "Generator { }"

console.log(gen.next().value); // 0
console.log(gen.next().value); // 1
console.log(gen.next().value); // 2

It the while loop produced a Promise object, it would be similar to the source samples you provided. The generator would then produce a control flow for async operations by producing the Promise objects repeatedly as they are needed.

The logic you provided in your samples simply pushes this scenario a bit further by fulfilling the Promise contract when required.

Upvotes: 1

Related Questions