JavaScript Closures Tutorial - Section 3: Common Use Cases
Memoization is an optimization technique that stores the results of expensive function calls and returns the cached result when the same inputs occur again. Closures make this elegant and efficient.
// Without memoization
function fibonacci(n) {
if (n <= 1) return n;
return fibonacci(n - 1) + fibonacci(n - 2);
}
console.time('fib');
console.log(fibonacci(40)); // Very slow!
console.timeEnd('fib');
// With memoization
function memoize(fn) {
const cache = {};
return function(...args) {
const key = JSON.stringify(args);
if (key in cache) {
return cache[key];
}
const result = fn.apply(this, args);
cache[key] = result;
return result;
};
}
const memoizedFib = memoize(function(n) {
if (n <= 1) return n;
return memoizedFib(n - 1) + memoizedFib(n - 2);
});
console.time('memoFib');
console.log(memoizedFib(40)); // Much faster!
console.timeEnd('memoFib');
function createMemoizedFunction(fn) {
const cache = new Map();
return function(...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
console.log('Cache hit for:', args);
return cache.get(key);
}
console.log('Computing for:', args);
const result = fn(...args);
cache.set(key, result);
return result;
};
}
// Expensive calculation
function expensiveCalculation(a, b) {
let result = 0;
for (let i = 0; i < 1000000; i++) {
result += a * b;
}
return result;
}
const memoized = createMemoizedFunction(expensiveCalculation);
memoized(5, 10); // Computing...
memoized(5, 10); // Cache hit!
memoized(3, 7); // Computing...
memoized(5, 10); // Cache hit!
function memoizeWithLimit(fn, maxSize = 100) {
const cache = new Map();
const keys = [];
return function(...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn.apply(this, args);
// Add to cache
cache.set(key, result);
keys.push(key);
// Remove oldest if over limit
if (keys.length > maxSize) {
const oldestKey = keys.shift();
cache.delete(oldestKey);
}
return result;
};
}
const limitedMemo = memoizeWithLimit(expensiveCalculation, 3);
limitedMemo(1, 1);
limitedMemo(2, 2);
limitedMemo(3, 3);
limitedMemo(4, 4); // This will evict (1, 1)
function memoizeWithTTL(fn, ttl = 5000) {
const cache = new Map();
return function(...args) {
const key = JSON.stringify(args);
const now = Date.now();
if (cache.has(key)) {
const cached = cache.get(key);
if (now - cached.timestamp < ttl) {
console.log('Cache hit (fresh)');
return cached.value;
} else {
console.log('Cache expired');
cache.delete(key);
}
}
console.log('Computing...');
const result = fn.apply(this, args);
cache.set(key, {
value: result,
timestamp: now
});
return result;
};
}
const ttlMemo = memoizeWithTTL(expensiveCalculation, 2000);
ttlMemo(5, 10); // Computing...
ttlMemo(5, 10); // Cache hit (fresh)
setTimeout(() => {
ttlMemo(5, 10); // Cache expired, Computing...
}, 2500);
function createAPICache(ttl = 60000) {
const cache = new Map();
return {
get: async function(url, options = {}) {
const key = JSON.stringify({ url, options });
const now = Date.now();
// Check cache
if (cache.has(key)) {
const cached = cache.get(key);
if (now - cached.timestamp < ttl) {
console.log('Returning cached response');
return cached.data;
}
cache.delete(key);
}
// Fetch fresh data
console.log('Fetching from API');
const response = await fetch(url, options);
const data = await response.json();
// Cache it
cache.set(key, {
data,
timestamp: now
});
return data;
},
clear: function() {
cache.clear();
},
delete: function(url) {
const keysToDelete = [];
for (const key of cache.keys()) {
if (key.includes(url)) {
keysToDelete.push(key);
}
}
keysToDelete.forEach(key => cache.delete(key));
}
};
}
const apiCache = createAPICache(30000); // 30 second TTL
// Usage
apiCache.get('/api/users/1').then(user => console.log(user));
apiCache.get('/api/users/1').then(user => console.log(user)); // Cached!
function createComputedCache() {
const cache = new Map();
const dependencies = new Map();
return {
compute: function(key, computeFn, deps = []) {
// Check if dependencies changed
if (cache.has(key)) {
const oldDeps = dependencies.get(key);
const depsChanged = !oldDeps ||
deps.some((dep, i) => dep !== oldDeps[i]);
if (!depsChanged) {
console.log(`Using cached: ${key}`);
return cache.get(key);
}
}
// Compute new value
console.log(`Computing: ${key}`);
const value = computeFn();
cache.set(key, value);
dependencies.set(key, deps);
return value;
},
invalidate: function(key) {
cache.delete(key);
dependencies.delete(key);
},
clear: function() {
cache.clear();
dependencies.clear();
}
};
}
const computed = createComputedCache();
let firstName = 'John';
let lastName = 'Doe';
const fullName = () => computed.compute(
'fullName',
() => `${firstName} ${lastName}`,
[firstName, lastName]
);
console.log(fullName()); // Computing: fullName
console.log(fullName()); // Using cached: fullName
firstName = 'Jane';
console.log(fullName()); // Computing: fullName (deps changed)
function createLRUCache(maxSize) {
const cache = new Map();
return {
get: function(key) {
if (!cache.has(key)) {
return undefined;
}
// Move to end (most recently used)
const value = cache.get(key);
cache.delete(key);
cache.set(key, value);
return value;
},
set: function(key, value) {
// Delete if exists (to reinsert at end)
if (cache.has(key)) {
cache.delete(key);
}
// Add to end
cache.set(key, value);
// Evict oldest if over limit
if (cache.size > maxSize) {
const firstKey = cache.keys().next().value;
cache.delete(firstKey);
}
},
has: function(key) {
return cache.has(key);
},
clear: function() {
cache.clear();
},
size: function() {
return cache.size;
}
};
}
const lru = createLRUCache(3);
lru.set('a', 1);
lru.set('b', 2);
lru.set('c', 3);
console.log(lru.size()); // 3
lru.set('d', 4); // Evicts 'a'
console.log(lru.has('a')); // false
console.log(lru.has('d')); // true
function memoizeAsync(fn) {
const cache = new Map();
const pending = new Map();
return async function(...args) {
const key = JSON.stringify(args);
// Return cached value
if (cache.has(key)) {
console.log('Cache hit');
return cache.get(key);
}
// Return pending promise if already computing
if (pending.has(key)) {
console.log('Waiting for pending computation');
return pending.get(key);
}
// Compute and cache
console.log('Computing...');
const promise = fn.apply(this, args);
pending.set(key, promise);
try {
const result = await promise;
cache.set(key, result);
return result;
} finally {
pending.delete(key);
}
};
}
const fetchUser = memoizeAsync(async (id) => {
const response = await fetch(`/api/users/${id}`);
return response.json();
});
// Multiple simultaneous calls only trigger one fetch
Promise.all([
fetchUser(1),
fetchUser(1),
fetchUser(1)
]).then(results => {
console.log('All results:', results);
// Only one "Computing..." log
});
function memoizeIf(fn, shouldCache) {
const cache = new Map();
return function(...args) {
const key = JSON.stringify(args);
// Check if we should use cache for these args
if (!shouldCache(...args)) {
return fn.apply(this, args);
}
if (cache.has(key)) {
return cache.get(key);
}
const result = fn.apply(this, args);
cache.set(key, result);
return result;
};
}
// Only cache for numbers > 10
const selectiveFib = memoizeIf(
function(n) {
if (n <= 1) return n;
return selectiveFib(n - 1) + selectiveFib(n - 2);
},
(n) => n > 10
);
selectiveFib(5); // Not cached
selectiveFib(15); // Cached
selectiveFib(15); // Cache hit
function createCacheWithStats(fn) {
const cache = new Map();
let hits = 0;
let misses = 0;
const memoized = function(...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
hits++;
return cache.get(key);
}
misses++;
const result = fn.apply(this, args);
cache.set(key, result);
return result;
};
memoized.stats = function() {
const total = hits + misses;
return {
hits,
misses,
total,
hitRate: total > 0 ? (hits / total * 100).toFixed(2) + '%' : '0%',
cacheSize: cache.size
};
};
memoized.clearStats = function() {
hits = 0;
misses = 0;
};
memoized.clearCache = function() {
cache.clear();
};
return memoized;
}
const tracked = createCacheWithStats(expensiveCalculation);
tracked(1, 2);
tracked(1, 2);
tracked(3, 4);
tracked(1, 2);
console.log(tracked.stats());
// { hits: 2, misses: 2, total: 4, hitRate: '50.00%', cacheSize: 2 }
You've completed Section 3 on Common Use Cases! In the next section, we'll explore advanced concepts including the classic closure pitfall in loops, memory management, and performance considerations.