Scaling Django with Redis and Caching
python backend django performance
Django is fast enough for most applications. But when traffic grows, database queries become the bottleneck. Caching with Redis can 10x your throughput.
Setting Up Redis
# Docker
docker run -d -p 6379:6379 redis
# Or install locally
brew install redis # macOS
sudo apt install redis-server # Ubuntu
# Django integration
pip install django-redis
# settings.py
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
}
}
}
Basic Caching
Low-Level Cache API
from django.core.cache import cache
# Set
cache.set('my_key', 'my_value', timeout=300) # 5 minutes
# Get
value = cache.get('my_key')
# Get with default
value = cache.get('my_key', 'default_value')
# Delete
cache.delete('my_key')
# Increment/Decrement
cache.incr('counter')
cache.decr('counter')
Caching Expensive Queries
from django.core.cache import cache
def get_active_products():
cache_key = 'active_products'
products = cache.get(cache_key)
if products is None:
products = list(Product.objects.filter(active=True))
cache.set(cache_key, products, timeout=60 * 15) # 15 minutes
return products
Cache Decorator
from django.views.decorators.cache import cache_page
@cache_page(60 * 15) # 15 minutes
def product_list(request):
products = Product.objects.all()
return render(request, 'products.html', {'products': products})
Cache Invalidation
The hard part. When data changes, cache must update.
Manual Invalidation
from django.core.cache import cache
class Product(models.Model):
name = models.CharField(max_length=100)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
cache.delete('active_products')
cache.delete(f'product_{self.id}')
Signal-Based Invalidation
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
@receiver([post_save, post_delete], sender=Product)
def invalidate_product_cache(sender, instance, **kwargs):
cache.delete('active_products')
cache.delete(f'product_{instance.id}')
Versioned Keys
def get_cache_version():
return cache.get('products_version', 1)
def increment_cache_version():
try:
cache.incr('products_version')
except ValueError:
cache.set('products_version', 1)
def get_products():
version = get_cache_version()
key = f'products_v{version}'
products = cache.get(key)
if products is None:
products = list(Product.objects.all())
cache.set(key, products)
return products
# When products change:
increment_cache_version() # Old cache becomes orphaned
Template Fragment Caching
{% load cache %}
{% cache 500 sidebar %}
{# Expensive template fragment #}
{% for item in items %}
{{ item.render }}
{% endfor %}
{% endcache %}
{# With variables #}
{% cache 500 sidebar request.user.id %}
User-specific sidebar
{% endcache %}
Per-View Caching
Vary by Headers
from django.views.decorators.vary import vary_on_headers
@vary_on_headers('User-Agent')
@cache_page(60 * 15)
def my_view(request):
# Different cache for different user agents
pass
Vary by Cookie
from django.views.decorators.vary import vary_on_cookie
@vary_on_cookie
@cache_page(60 * 15)
def my_view(request):
# Different cache for different users
pass
Session Backend
Store sessions in Redis:
# settings.py
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_CACHE_ALIAS = 'default'
Caching Patterns
Cache-Aside (Lazy Loading)
def get_user_profile(user_id):
key = f'profile_{user_id}'
profile = cache.get(key)
if profile is None:
profile = UserProfile.objects.get(user_id=user_id)
cache.set(key, profile, 3600)
return profile
Write-Through
def update_user_profile(user_id, data):
profile = UserProfile.objects.get(user_id=user_id)
for key, value in data.items():
setattr(profile, key, value)
profile.save()
# Update cache immediately
cache.set(f'profile_{user_id}', profile, 3600)
Cache Stampede Prevention
import time
from django.core.cache import cache
def get_expensive_data():
key = 'expensive_data'
lock_key = f'{key}_lock'
data = cache.get(key)
if data is not None:
return data
# Try to acquire lock
if cache.add(lock_key, 1, timeout=60):
try:
data = compute_expensive_data()
cache.set(key, data, 3600)
return data
finally:
cache.delete(lock_key)
else:
# Another process is computing, wait
time.sleep(0.1)
return get_expensive_data()
Monitoring
# Redis CLI
redis-cli info
# Key stats
redis-cli --stat
# Monitor commands
redis-cli monitor
# Memory usage
redis-cli info memory
Django Debug Toolbar
pip install django-debug-toolbar
Shows cache hits/misses per request.
Common Mistakes
Caching User-Specific Data Globally
# Bad - same cache for all users
@cache_page(3600)
def dashboard(request):
return render(request, 'dashboard.html', {
'user_data': request.user.get_data()
})
# Good - vary by user
@cache_page(3600, key_prefix='dashboard')
@vary_on_cookie
def dashboard(request):
...
Not Setting Timeouts
# Bad - lives forever
cache.set('key', value)
# Good - explicit timeout
cache.set('key', value, timeout=3600)
Caching Things That Change Frequently
If data changes every request, don’t cache it.
Final Thoughts
Caching is powerful but adds complexity. Start simple:
- Add Redis
- Cache expensive queries
- Add cache invalidation
- Monitor hit rates
Most Django performance problems are solved by 3-4 well-placed caches.
The fastest request is the one you don’t make.