*Async Generators()
ES2018
But first, generators
function *sequence(start=0) {
while(true) {
start++
yield start;
}
}
const s = sequence(20);
for(x of s) {
console.log(x);
if (x > 50) break;
}
//20
//21
//22
//23
//24
//...
//51
Use cases
- Simplifying code for recursive algorithms (much cleaner)
- Streaming huge chunks of data, efficiently
- Easily construct infinite sequences
Recursive code
const fs = require('fs');
const path = require('path');
function *walk(root = '.', folders) {
try {
for (const file of fs.readdirSync(root)) {
yield *walk(path.join(root, file), folders);
}
if (folders) yield root;
} catch (error) {
if (error.code != 'ENOTDIR') throw error;
yield root;
}
}
module.exports = walk;
Infinite Sequences
function *sequence(start=0) {
while(true) {
start++
yield start;
}
}
const s = sequence(20);
for(x of s) {
console.log(x);
if (x > 50) break;
}
//20
//21
//22
//23
//24
//...
//51
Streaming Data
async function* splitLines(chunksAsync) {
let previous = '';
for await (const chunk of chunksAsync) {
previous += chunk;
let eolIndex;
while ((eolIndex = previous.indexOf('\n')) >= 0) {
const line = previous.slice(0, eolIndex);
yield line;
previous = previous.slice(eolIndex+1);
}
}
if (previous.length > 0) {
yield previous;
}
}
Async with Promises
const p = new Promise((resolve, reject) => {
fetch('cnn.com').then(resolve)
});
p.then((contentOfCNN) => {
return ignoreContent(contentOfCNN);
})
.then((resultOfIgnoring) => {
return doSomethingElse(resultOfIgnoring);
})
.then((resultOfSomethingElse) => {
return doAnotherThingButNotTheSame(resultOfSomethingElse);
})
.then((resultOfAnotherThing) => {
return OkOneLastThingIPromise(resultOfAnotherThing);
})
.then((resultOfOneLastThing) => {
return resultOfOneLastThing;
})
Oh god, make it stawp
async/await saves the day, kinda
function resolveAfter2Seconds(x) {
return new Promise(resolve => {
setTimeout(() => {
resolve(x);
}, 2000);
});
};
var add = async function(x) { // async function expression assigned to a variable
var a = await resolveAfter2Seconds(20);
var b = await resolveAfter2Seconds(30);
return x + a + b;
};
//https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/async_function
But what about iteration???
Oh no, more problems
var promises = [];
for(var i = 0; i < 5; i++) {
var promise = $http.get('/data' + i);
promises.push(promise);
}
$q.all(promises).then(doSomethingAfterAllRequests);
//https://daveceddia.com/waiting-for-promises-in-a-loop/
Not terrible, but far from pretty and the true meaning of the code is hidden behind the semantics of promises.
Enter Async Iteration
//http://2ality.com/2016/10/asynchronous-iteration.html
//The interface:
interface AsyncIterable {
[Symbol.asyncIterator]() : AsyncIterator;
}
interface AsyncIterator {
next() : Promise<IteratorResult>;
}
interface IteratorResult {
value: any;
done: boolean;
}
//Usage:
async function f() {
for await (const x of createAsyncIterable(['a', 'b'])) {
console.log(x);
}
}
// Output:
// a
// b
Finally!
But what about rejections?
function createRejectingIterable() {
return {
[Symbol.asyncIterator]() {
return this;
},
next() {
return Promise.reject(new Error('Problem!'));
},
};
}
(async function () { // (A)
try {
for await (const x of createRejectingIterable()) {
console.log(x);
}
} catch (e) {
console.error(e);
// Error: Problem!
}
})(); // (B)
//http://2ality.com/2016/10/asynchronous-iteration.html
Real life use case - reading huge files with out overflowing memory
Recently at work we needed to import very large chunks of client data into our production database.
Being a poor startup, we can't afford beefy servers with huge amounts of RAM so that we can simply load the files into memory and process away.
We were forced to use the small, free tier servers offered by AWS, which meant fitting the processing into no more than a couple gigs of RAM at best.
What to do???
Stream it, of course! (with generators)
const fs = require('fs')
async function* get_lines(path) {
const stream = fs.createReadStream(path, {
encoding: 'utf8', // null -> buffers, 'utf8' -> strings with that encoding
highWaterMark: 1024 // maximum size of each chunk (buffer or string)
});
counter = 0
for await (const chunk of stream) {
counter++
// console.log(`Read: ${chunk}`);
yield `${counter}: ${chunk}\n`;
}
}
async function main() {
lines = get_lines('./lorem.txt');
for await (const line of lines) {
console.log(line);
}
}
main();
*Async Generators()
By signupskm
*Async Generators()
- 879