Learning Objectives

  • Measure async performance accurately
  • Identify and fix performance bottlenecks
  • Optimize Promise-based code
  • Avoid common anti-patterns

Measuring Performance

Using console.time()

console.time('fetchUsers');
const users = await fetchUsers();
console.timeEnd('fetchUsers');
// fetchUsers: 1234.56ms

Using Performance API

const start = performance.now();
await fetchUsers();
const end = performance.now();
console.log(`Execution time: ${end - start}ms`);

Performance Profiler

class PerformanceProfiler {
  constructor() {
    this.metrics = new Map();
  }
  
  async measure(name, fn) {
    const start = performance.now();
    
    try {
      const result = await fn();
      const duration = performance.now() - start;
      
      this.recordMetric(name, duration, true);
      return result;
    } catch (error) {
      const duration = performance.now() - start;
      this.recordMetric(name, duration, false);
      throw error;
    }
  }
  
  recordMetric(name, duration, success) {
    if (!this.metrics.has(name)) {
      this.metrics.set(name, {
        count: 0,
        totalTime: 0,
        minTime: Infinity,
        maxTime: 0,
        failures: 0
      });
    }
    
    const metric = this.metrics.get(name);
    metric.count++;
    metric.totalTime += duration;
    metric.minTime = Math.min(metric.minTime, duration);
    metric.maxTime = Math.max(metric.maxTime, duration);
    
    if (!success) {
      metric.failures++;
    }
  }
  
  getReport() {
    const report = [];
    
    for (const [name, metric] of this.metrics.entries()) {
      report.push({
        name,
        calls: metric.count,
        avgTime: (metric.totalTime / metric.count).toFixed(2),
        minTime: metric.minTime.toFixed(2),
        maxTime: metric.maxTime.toFixed(2),
        totalTime: metric.totalTime.toFixed(2),
        failureRate: ((metric.failures / metric.count) * 100).toFixed(1)
      });
    }
    
    return report;
  }
  
  printReport() {
    console.table(this.getReport());
  }
}

// Usage
const profiler = new PerformanceProfiler();

await profiler.measure('fetchUser', () => fetchUser(1));
await profiler.measure('fetchPosts', () => fetchPosts(1));

profiler.printReport();

Common Performance Anti-Patterns

❌ Anti-Pattern 1: Unnecessary Awaits

// Bad: Unnecessary await
async function bad() {
  return await fetchData(); // Redundant await
}

// Good: Return Promise directly
async function good() {
  return fetchData();
}

// Exception: Use await in try/catch
async function withErrorHandling() {
  try {
    return await fetchData(); // Needed to catch errors
  } catch (error) {
    console.error(error);
    throw error;
  }
}

❌ Anti-Pattern 2: Sequential When Parallel is Possible

// Bad: Sequential (3 seconds)
async function bad() {
  const user = await fetchUser(1);     // 1s
  const posts = await fetchPosts(1);   // 1s
  const comments = await fetchComments(1); // 1s
  return { user, posts, comments };
}

// Good: Parallel (1 second)
async function good() {
  const [user, posts, comments] = await Promise.all([
    fetchUser(1),
    fetchPosts(1),
    fetchComments(1)
  ]);
  return { user, posts, comments };
}

❌ Anti-Pattern 3: Creating Promises in Loops

// Bad: Creates Promises sequentially
async function bad(ids) {
  const results = [];
  for (const id of ids) {
    results.push(await fetchUser(id)); // Waits for each
  }
  return results;
}

// Good: Create all Promises, then await
async function good(ids) {
  return await Promise.all(
    ids.map(id => fetchUser(id))
  );
}

❌ Anti-Pattern 4: Not Reusing Promises

// Bad: Multiple calls to same Promise
async function bad() {
  const user1 = await fetchUser(1);
  const user2 = await fetchUser(1); // Duplicate request!
  return [user1, user2];
}

// Good: Reuse Promise
async function good() {
  const userPromise = fetchUser(1);
  const [user1, user2] = await Promise.all([
    userPromise,
    userPromise
  ]);
  return [user1, user2];
}

Optimization Techniques

1. Batch Requests

// Instead of individual requests
async function fetchUsersIndividually(ids) {
  return await Promise.all(
    ids.map(id => fetch(`/api/users/${id}`))
  );
}

// Batch into single request
async function fetchUsersBatch(ids) {
  const response = await fetch('/api/users/batch', {
    method: 'POST',
    body: JSON.stringify({ ids })
  });
  return await response.json();
}

2. Request Deduplication

const pendingRequests = new Map();

async function fetchWithDedup(url) {
  if (pendingRequests.has(url)) {
    return await pendingRequests.get(url);
  }
  
  const promise = fetch(url).then(r => r.json());
  pendingRequests.set(url, promise);
  
  try {
    const result = await promise;
    return result;
  } finally {
    pendingRequests.delete(url);
  }
}

3. Lazy Loading

class LazyData {
  constructor(fetchFn) {
    this.fetchFn = fetchFn;
    this.promise = null;
  }
  
  async get() {
    if (!this.promise) {
      this.promise = this.fetchFn();
    }
    return await this.promise;
  }
}

// Usage
const userData = new LazyData(() => fetchUser(1));

// Only fetches when first accessed
const user = await userData.get();

// Subsequent calls reuse same Promise
const sameUser = await userData.get();

4. Streaming Responses

async function* streamUsers(ids) {
  for (const id of ids) {
    yield await fetchUser(id);
  }
}

// Process as data arrives
for await (const user of streamUsers(userIds)) {
  displayUser(user); // Show immediately
}

5. Prefetching

class Prefetcher {
  constructor() {
    this.cache = new Map();
  }
  
  prefetch(key, fetchFn) {
    if (!this.cache.has(key)) {
      this.cache.set(key, fetchFn());
    }
  }
  
  async get(key) {
    if (!this.cache.has(key)) {
      throw new Error('Not prefetched');
    }
    return await this.cache.get(key);
  }
}

// Usage
const prefetcher = new Prefetcher();

// Prefetch on page load
prefetcher.prefetch('user', () => fetchUser(1));
prefetcher.prefetch('posts', () => fetchPosts(1));

// Later: instant access
const user = await prefetcher.get('user');

Memory Optimization

Avoid Memory Leaks

// Bad: Accumulates Promises indefinitely
const promises = [];
setInterval(() => {
  promises.push(fetchData()); // Memory leak!
}, 1000);

// Good: Limit array size
const MAX_PROMISES = 100;
const promises = [];

setInterval(() => {
  if (promises.length >= MAX_PROMISES) {
    promises.shift(); // Remove oldest
  }
  promises.push(fetchData());
}, 1000);

Clean Up Cancelled Operations

async function fetchWithCleanup(signal) {
  const resources = [];
  
  try {
    const data = await fetch('/api/data', { signal });
    resources.push(data);
    return await data.json();
  } finally {
    // Clean up even if cancelled
    resources.forEach(r => r.close?.());
  }
}

Real-World Optimization Example

class OptimizedDashboard {
  constructor() {
    this.cache = new Map();
    this.profiler = new PerformanceProfiler();
  }
  
  async loadDashboard(userId) {
    return await this.profiler.measure('loadDashboard', async () => {
      // Critical data: fetch immediately
      const criticalData = await this.profiler.measure(
        'criticalData',
        () => this.fetchCriticalData(userId)
      );
      
      // Non-critical: fetch in parallel
      const [analytics, notifications] = await Promise.all([
        this.profiler.measure('analytics', 
          () => this.fetchAnalytics(userId)
        ).catch(() => null), // Don't fail on optional data
        
        this.profiler.measure('notifications',
          () => this.fetchNotifications(userId)
        ).catch(() => [])
      ]);
      
      // Prefetch for next page
      this.prefetchNextPage(userId);
      
      return {
        ...criticalData,
        analytics,
        notifications
      };
    });
  }
  
  async fetchCriticalData(userId) {
    const cacheKey = `critical-${userId}`;
    
    if (this.cache.has(cacheKey)) {
      return this.cache.get(cacheKey);
    }
    
    const data = await fetch(`/api/users/${userId}/critical`)
      .then(r => r.json());
    
    this.cache.set(cacheKey, data);
    return data;
  }
  
  async fetchAnalytics(userId) {
    // Use stale data while revalidating
    const cached = this.cache.get(`analytics-${userId}`);
    
    if (cached && Date.now() - cached.timestamp < 60000) {
      // Revalidate in background
      this.revalidateAnalytics(userId);
      return cached.data;
    }
    
    return await this.revalidateAnalytics(userId);
  }
  
  async revalidateAnalytics(userId) {
    const data = await fetch(`/api/users/${userId}/analytics`)
      .then(r => r.json());
    
    this.cache.set(`analytics-${userId}`, {
      data,
      timestamp: Date.now()
    });
    
    return data;
  }
  
  async fetchNotifications(userId) {
    return await fetch(`/api/users/${userId}/notifications`)
      .then(r => r.json());
  }
  
  prefetchNextPage(userId) {
    // Don't await - fire and forget
    fetch(`/api/users/${userId}/settings`)
      .then(r => r.json())
      .then(data => {
        this.cache.set(`settings-${userId}`, data);
      })
      .catch(() => {}); // Ignore errors
  }
  
  getPerformanceReport() {
    return this.profiler.getReport();
  }
}

// Usage
const dashboard = new OptimizedDashboard();
const data = await dashboard.loadDashboard(1);

// Check performance
console.table(dashboard.getPerformanceReport());

Performance Checklist

  1. ✅ Use parallel execution for independent operations
  2. ✅ Implement caching with appropriate TTL
  3. ✅ Deduplicate in-flight requests
  4. ✅ Batch API requests when possible
  5. ✅ Use lazy loading for non-critical data
  6. ✅ Prefetch predictable next requests
  7. ✅ Implement request cancellation
  8. ✅ Monitor and profile performance
  9. ✅ Avoid memory leaks
  10. ✅ Use streaming for large datasets

Key Takeaways

  • Measure first before optimizing
  • ✅ Use parallel execution for independent operations
  • Cache expensive operations
  • Deduplicate redundant requests
  • Batch multiple requests
  • Profile to identify bottlenecks
  • ✅ Avoid common anti-patterns

Next Steps

Ready to put everything together? Check out the course project to build a complete application!