Learning Objectives

  • Implement retry mechanisms for failed operations
  • Master exponential backoff strategy
  • Handle transient failures gracefully
  • Build production-ready retry logic

Why Retry Logic?

Network requests and external services can fail temporarily due to:

Retry logic makes your application resilient to transient failures.

Basic Retry Pattern

async function retry(fn, retries = 3) {
  for (let i = 0; i < retries; i++) {
    try {
      return await fn();
    } catch (error) {
      if (i === retries - 1) {
        throw error;
      }
      console.log(`Attempt ${i + 1} failed, retrying...`);
    }
  }
}

// Usage
const data = await retry(() => fetch('/api/data'), 3);

Retry with Delay

async function retryWithDelay(fn, retries = 3, delay = 1000) {
  for (let i = 0; i < retries; i++) {
    try {
      return await fn();
    } catch (error) {
      if (i === retries - 1) {
        throw error;
      }
      
      console.log(`Attempt ${i + 1} failed, waiting ${delay}ms...`);
      await new Promise(resolve => setTimeout(resolve, delay));
    }
  }
}

// Usage
const data = await retryWithDelay(
  () => fetch('/api/data'),
  3,
  1000
);

Exponential Backoff

Exponential backoff increases the delay between retries exponentially, reducing server load:

async function retryWithExponentialBackoff(
  fn,
  retries = 5,
  baseDelay = 1000,
  maxDelay = 30000
) {
  for (let i = 0; i < retries; i++) {
    try {
      return await fn();
    } catch (error) {
      if (i === retries - 1) {
        throw error;
      }
      
      // Calculate exponential delay: 1s, 2s, 4s, 8s, 16s...
      const delay = Math.min(baseDelay * Math.pow(2, i), maxDelay);
      
      console.log(`Attempt ${i + 1} failed, waiting ${delay}ms...`);
      await new Promise(resolve => setTimeout(resolve, delay));
    }
  }
}

// Usage
const data = await retryWithExponentialBackoff(
  () => fetch('/api/data'),
  5,    // 5 retries
  1000, // Start with 1 second
  30000 // Max 30 seconds
);

Exponential Backoff with Jitter

Add randomness to prevent thundering herd problem:

async function retryWithJitter(
  fn,
  retries = 5,
  baseDelay = 1000,
  maxDelay = 30000
) {
  for (let i = 0; i < retries; i++) {
    try {
      return await fn();
    } catch (error) {
      if (i === retries - 1) {
        throw error;
      }
      
      // Exponential backoff with jitter
      const exponentialDelay = baseDelay * Math.pow(2, i);
      const jitter = Math.random() * exponentialDelay;
      const delay = Math.min(exponentialDelay + jitter, maxDelay);
      
      console.log(`Retry ${i + 1}/${retries} after ${Math.round(delay)}ms`);
      await new Promise(resolve => setTimeout(resolve, delay));
    }
  }
}

// Usage
const data = await retryWithJitter(() => fetch('/api/data'));

Conditional Retry

Only retry on specific error types:

function isRetryableError(error) {
  // Retry on network errors and 5xx server errors
  return (
    error.name === 'NetworkError' ||
    (error.status >= 500 && error.status < 600) ||
    error.status === 429 // Rate limit
  );
}

async function retryOnCondition(fn, retries = 3, delay = 1000) {
  for (let i = 0; i < retries; i++) {
    try {
      return await fn();
    } catch (error) {
      if (i === retries - 1 || !isRetryableError(error)) {
        throw error;
      }
      
      console.log(`Retryable error, attempt ${i + 2}/${retries}`);
      await new Promise(resolve => setTimeout(resolve, delay * (i + 1)));
    }
  }
}

// Usage
const data = await retryOnCondition(async () => {
  const response = await fetch('/api/data');
  if (!response.ok) {
    const error = new Error('HTTP Error');
    error.status = response.status;
    throw error;
  }
  return response.json();
});

Retry with Progress Callback

async function retryWithProgress(
  fn,
  retries = 3,
  onRetry = null
) {
  for (let i = 0; i < retries; i++) {
    try {
      return await fn();
    } catch (error) {
      if (i === retries - 1) {
        throw error;
      }
      
      if (onRetry) {
        onRetry(i + 1, retries, error);
      }
      
      const delay = 1000 * Math.pow(2, i);
      await new Promise(resolve => setTimeout(resolve, delay));
    }
  }
}

// Usage
const data = await retryWithProgress(
  () => fetch('/api/data'),
  3,
  (attempt, total, error) => {
    console.log(`Retry ${attempt}/${total}: ${error.message}`);
    updateUI(`Retrying... (${attempt}/${total})`);
  }
);

Retry Class

class RetryStrategy {
  constructor(options = {}) {
    this.maxRetries = options.maxRetries || 3;
    this.baseDelay = options.baseDelay || 1000;
    this.maxDelay = options.maxDelay || 30000;
    this.useJitter = options.useJitter !== false;
    this.shouldRetry = options.shouldRetry || (() => true);
  }
  
  async execute(fn) {
    let lastError;
    
    for (let attempt = 0; attempt < this.maxRetries; attempt++) {
      try {
        return await fn();
      } catch (error) {
        lastError = error;
        
        if (attempt === this.maxRetries - 1 || !this.shouldRetry(error)) {
          throw error;
        }
        
        const delay = this.calculateDelay(attempt);
        console.log(`Retry ${attempt + 1}/${this.maxRetries} after ${delay}ms`);
        await this.sleep(delay);
      }
    }
    
    throw lastError;
  }
  
  calculateDelay(attempt) {
    let delay = this.baseDelay * Math.pow(2, attempt);
    
    if (this.useJitter) {
      delay += Math.random() * delay;
    }
    
    return Math.min(delay, this.maxDelay);
  }
  
  sleep(ms) {
    return new Promise(resolve => setTimeout(resolve, ms));
  }
}

// Usage
const retry = new RetryStrategy({
  maxRetries: 5,
  baseDelay: 1000,
  maxDelay: 30000,
  useJitter: true,
  shouldRetry: (error) => error.status >= 500
});

const data = await retry.execute(() => fetch('/api/data'));

Real-World Example: API Client with Retry

class APIClient {
  constructor(baseURL, retryOptions = {}) {
    this.baseURL = baseURL;
    this.retry = new RetryStrategy(retryOptions);
  }
  
  async request(endpoint, options = {}) {
    return await this.retry.execute(async () => {
      const url = `${this.baseURL}${endpoint}`;
      const response = await fetch(url, options);
      
      if (!response.ok) {
        const error = new Error(`HTTP ${response.status}`);
        error.status = response.status;
        error.response = response;
        throw error;
      }
      
      return await response.json();
    });
  }
  
  async get(endpoint) {
    return await this.request(endpoint);
  }
  
  async post(endpoint, data) {
    return await this.request(endpoint, {
      method: 'POST',
      headers: { 'Content-Type': 'application/json' },
      body: JSON.stringify(data)
    });
  }
}

// Usage
const api = new APIClient('https://api.example.com', {
  maxRetries: 3,
  baseDelay: 1000,
  shouldRetry: (error) => error.status >= 500 || error.status === 429
});

try {
  const users = await api.get('/users');
  console.log(users);
} catch (error) {
  console.error('All retries failed:', error);
}

Retry with Circuit Breaker

class CircuitBreaker {
  constructor(threshold = 5, timeout = 60000) {
    this.failureCount = 0;
    this.threshold = threshold;
    this.timeout = timeout;
    this.state = 'CLOSED'; // CLOSED, OPEN, HALF_OPEN
    this.nextAttempt = Date.now();
  }
  
  async execute(fn) {
    if (this.state === 'OPEN') {
      if (Date.now() < this.nextAttempt) {
        throw new Error('Circuit breaker is OPEN');
      }
      this.state = 'HALF_OPEN';
    }
    
    try {
      const result = await fn();
      this.onSuccess();
      return result;
    } catch (error) {
      this.onFailure();
      throw error;
    }
  }
  
  onSuccess() {
    this.failureCount = 0;
    this.state = 'CLOSED';
  }
  
  onFailure() {
    this.failureCount++;
    
    if (this.failureCount >= this.threshold) {
      this.state = 'OPEN';
      this.nextAttempt = Date.now() + this.timeout;
      console.log(`Circuit breaker OPEN for ${this.timeout}ms`);
    }
  }
}

// Combine retry with circuit breaker
const circuitBreaker = new CircuitBreaker(5, 60000);
const retry = new RetryStrategy({ maxRetries: 3 });

async function resilientFetch(url) {
  return await circuitBreaker.execute(async () => {
    return await retry.execute(() => fetch(url));
  });
}

Best Practices

  1. Use exponential backoff to reduce server load
  2. Add jitter to prevent thundering herd
  3. Set maximum delays to avoid infinite waits
  4. Only retry transient errors (network, 5xx)
  5. Don't retry on client errors (4xx)
  6. Log retry attempts for monitoring
  7. Set reasonable retry limits (3-5 attempts)
  8. Consider circuit breakers for failing services

Key Takeaways

  • ✅ Retry logic makes applications resilient
  • ✅ Use exponential backoff to reduce load
  • ✅ Add jitter to prevent synchronized retries
  • ✅ Only retry transient failures
  • ✅ Combine with circuit breakers for better reliability
  • ✅ Set maximum retry limits and delays

Next Steps

Next, we'll learn about rate limiting to control request frequency!