JavaScript Generators Tutorial - Section 3: Advanced Patterns
// Array approach - stores all values in memory
function createArray(n) {
const arr = [];
for (let i = 0; i < n; i++) {
arr.push(i * i);
}
return arr;
}
// Generator approach - computes on demand
function* createGenerator(n) {
for (let i = 0; i < n; i++) {
yield i * i;
}
}
// Memory comparison
const arr = createArray(1000000); // ~8MB memory
const gen = createGenerator(1000000); // ~100 bytes
// Process only what you need
for (const value of gen) {
if (value > 100) break; // Can stop early
}
// Eager evaluation - processes everything
function eagerPipeline(data) {
return data
.map(x => x * 2) // Creates new array
.filter(x => x > 10) // Creates new array
.map(x => x + 1) // Creates new array
.slice(0, 5); // Creates new array
}
// Lazy evaluation - processes only what's needed
function* lazyPipeline(data) {
let count = 0;
for (const x of data) {
const doubled = x * 2;
if (doubled > 10) {
const result = doubled + 1;
yield result;
if (++count >= 5) break;
}
}
}
const data = Array.from({ length: 1000000 }, (_, i) => i);
console.time('eager');
eagerPipeline(data);
console.timeEnd('eager'); // ~200ms
console.time('lazy');
[...lazyPipeline(data)];
console.timeEnd('lazy'); // ~5ms
// Bad: Processes all items even if not needed
function* inefficient(items) {
const processed = items.map(expensiveOperation);
for (const item of processed) {
yield item;
}
}
// Good: Processes items on demand
function* efficient(items) {
for (const item of items) {
yield expensiveOperation(item);
}
}
function expensiveOperation(x) {
// Simulate expensive computation
return x * x;
}
function* search(items, predicate) {
for (const item of items) {
if (predicate(item)) {
yield item;
return; // Stop after first match
}
}
}
const numbers = Array.from({ length: 1000000 }, (_, i) => i);
const result = search(numbers, n => n > 500000);
console.log(result.next().value); // 500001
// Generator stops, doesn't process remaining 499,999 items
function memoizeGenerator(generatorFn) {
const cache = new Map();
return function* (...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
yield* cache.get(key);
return;
}
const results = [];
for (const value of generatorFn(...args)) {
results.push(value);
yield value;
}
cache.set(key, results);
};
}
const fibonacci = memoizeGenerator(function* (n) {
let [a, b] = [0, 1];
for (let i = 0; i < n; i++) {
yield a;
[a, b] = [b, a + b];
}
});
// First call: computes and caches
console.log([...fibonacci(10)]);
// Second call: returns from cache
console.log([...fibonacci(10)]);
// Process items in batches for better performance
async function* batchProcess(items, batchSize = 100) {
for (let i = 0; i < items.length; i += batchSize) {
const batch = items.slice(i, i + batchSize);
// Process batch in parallel
const results = await Promise.all(
batch.map(item => processItem(item))
);
yield* results;
}
}
async function processItem(item) {
// Simulate async processing
await new Promise(resolve => setTimeout(resolve, 10));
return item * 2;
}
// Usage
(async () => {
const items = Array.from({ length: 1000 }, (_, i) => i);
for await (const result of batchProcess(items, 50)) {
console.log(result);
}
})();
// When NOT to use generators
function simpleLoop(n) {
const results = [];
for (let i = 0; i < n; i++) {
results.push(i);
}
return results;
}
// Generator has overhead for simple cases
function* generatorLoop(n) {
for (let i = 0; i < n; i++) {
yield i;
}
}
// Benchmark
console.time('simple');
simpleLoop(1000000);
console.timeEnd('simple'); // ~10ms
console.time('generator');
[...generatorLoop(1000000)];
console.timeEnd('generator'); // ~50ms
// Use generators when:
// - Working with large datasets
// - Need lazy evaluation
// - Want to stop early
// - Processing infinite sequences
function* monitoredGenerator(iterable) {
let count = 0;
const start = performance.now();
for (const item of iterable) {
count++;
yield item;
if (count % 1000 === 0) {
const elapsed = performance.now() - start;
console.log(`Processed ${count} items in ${elapsed.toFixed(2)}ms`);
}
}
const total = performance.now() - start;
console.log(`Total: ${count} items in ${total.toFixed(2)}ms`);
}
const data = Array.from({ length: 10000 }, (_, i) => i);
for (const item of monitoredGenerator(data)) {
// Process items
}
class DataPipeline {
constructor(source) {
this.source = source;
}
*filter(predicate) {
for (const item of this.source) {
if (predicate(item)) {
yield item;
}
}
}
*map(transform) {
for (const item of this.source) {
yield transform(item);
}
}
*take(n) {
let count = 0;
for (const item of this.source) {
if (count++ >= n) break;
yield item;
}
}
*batch(size) {
let batch = [];
for (const item of this.source) {
batch.push(item);
if (batch.length === size) {
yield batch;
batch = [];
}
}
if (batch.length > 0) {
yield batch;
}
}
toArray() {
return [...this.source];
}
forEach(fn) {
for (const item of this.source) {
fn(item);
}
}
}
// Usage - memory efficient, lazy evaluation
const data = Array.from({ length: 1000000 }, (_, i) => i);
const pipeline = new DataPipeline(data);
const result = pipeline
.filter(n => n % 2 === 0)
.map(n => n * n)
.take(10)
.toArray();
console.log(result); // [0, 4, 16, 36, 64, 100, 144, 196, 256, 324]
// Processed only 20 items instead of 1,000,000!
You've completed the JavaScript Generators tutorial! You now have the skills to build efficient, memory-optimized applications using generators.
Thank you for completing this JavaScript Generators tutorial. You now have powerful tools for building efficient, scalable applications. Keep practicing and happy coding! 🎉