Welcome to Part 8 of our comprehensive NGINX on Ubuntu series! We’ll explore caching strategies and performance optimization techniques to dramatically improve your website’s speed and reduce server load.
Understanding NGINX Caching
NGINX offers multiple caching mechanisms to store frequently requested content in memory or on disk, reducing backend server load and improving response times.
graph TD A[Client Request] --> B[NGINX Cache] B --> C{Cache Hit?} C -->|Yes| D[Serve from CacheFast Response] C -->|No| E[Forward to Backend] E --> F[Backend Processing] F --> G[Store in Cache] G --> H[Return to Client] I[Cache Types] --> J[Proxy Cache] I --> K[FastCGI Cache] I --> L[Static File Cache] I --> M[Microcache] N[Cache Benefits] --> O[Faster Response Times] N --> P[Reduced Server Load] N --> Q[Better User Experience] N --> R[Lower Bandwidth Usage] style B fill:#e1f5fe style C fill:#fff3e0 style D fill:#e8f5e8 style G fill:#e8f5e8
Setting Up Proxy Cache
Proxy cache stores responses from backend servers, perfect for API responses and dynamic content.
# Configure proxy cache in nginx.conf
sudo nano /etc/nginx/nginx.conf
# Add to http block in nginx.conf
http {
# Proxy cache configuration
proxy_cache_path /var/cache/nginx/proxy
levels=1:2
keys_zone=api_cache:100m
max_size=1g
inactive=60m
use_temp_path=off;
proxy_cache_path /var/cache/nginx/static
levels=1:2
keys_zone=static_cache:50m
max_size=2g
inactive=24h
use_temp_path=off;
# Create cache directories
# sudo mkdir -p /var/cache/nginx/{proxy,static}
# sudo chown -R www-data:www-data /var/cache/nginx
}
Proxy Cache Virtual Host
# Create cached proxy configuration
sudo nano /etc/nginx/sites-available/cached-proxy.example.com
upstream backend_api {
server 127.0.0.1:3000;
server 127.0.0.1:3001;
keepalive 32;
}
server {
listen 80;
server_name cached-proxy.example.com;
# API with caching
location /api/ {
proxy_pass http://backend_api/;
# Cache configuration
proxy_cache api_cache;
proxy_cache_valid 200 302 10m;
proxy_cache_valid 404 1m;
proxy_cache_valid 500 502 503 504 30s;
# Cache behavior
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
proxy_cache_revalidate on;
proxy_cache_lock on;
proxy_cache_lock_timeout 5s;
# Cache key
proxy_cache_key "$scheme$request_method$host$request_uri$is_args$args";
# Bypass cache for certain conditions
proxy_cache_bypass $http_pragma $http_authorization $cookie_nocache $arg_nocache;
proxy_no_cache $http_pragma $http_authorization $cookie_nocache $arg_nocache;
# Headers
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# Add cache status header
add_header X-Cache-Status $upstream_cache_status always;
add_header X-Cache-Key "$scheme$request_method$host$request_uri" always;
# Performance headers
expires 10m;
add_header Cache-Control "public, must-revalidate";
}
# Static content with long cache
location /assets/ {
proxy_pass http://backend_api/assets/;
proxy_cache static_cache;
proxy_cache_valid 200 24h;
proxy_cache_valid 404 1m;
proxy_cache_use_stale error timeout updating;
proxy_set_header Host $host;
add_header X-Cache-Status $upstream_cache_status always;
expires 1y;
add_header Cache-Control "public, immutable";
}
# Cache purge endpoint
location ~ /cache-purge(/.*) {
allow 127.0.0.1;
allow 192.168.1.0/24;
deny all;
proxy_cache_purge api_cache "$scheme$request_method$host$1$is_args$args";
}
}
FastCGI Cache for PHP
FastCGI cache stores PHP responses, dramatically improving PHP application performance.
# Add FastCGI cache to nginx.conf
http {
# FastCGI cache configuration
fastcgi_cache_path /var/cache/nginx/fastcgi
levels=1:2
keys_zone=php_cache:100m
max_size=1g
inactive=60m
use_temp_path=off;
# Cache key for FastCGI
fastcgi_cache_key "$scheme$request_method$host$request_uri";
# Create cache directory
# sudo mkdir -p /var/cache/nginx/fastcgi
# sudo chown -R www-data:www-data /var/cache/nginx/fastcgi
}
# FastCGI cache virtual host
sudo nano /etc/nginx/sites-available/fastcgi-cache.example.com
server {
listen 80;
server_name fastcgi-cache.example.com;
root /var/www/phpapp/public_html;
index index.php index.html;
# Skip cache for specific conditions
set $skip_cache 0;
# Skip cache for POST requests
if ($request_method = POST) {
set $skip_cache 1;
}
# Skip cache for URLs with query parameters
if ($query_string != "") {
set $skip_cache 1;
}
# Skip cache for admin/login pages
if ($request_uri ~* "/(admin|login|register|checkout)") {
set $skip_cache 1;
}
# Skip cache for logged-in users
if ($http_cookie ~* "logged_in|user_|wordpress_logged_in") {
set $skip_cache 1;
}
location / {
try_files $uri $uri/ /index.php?$query_string;
}
location ~ \.php$ {
try_files $uri =404;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_pass unix:/var/run/php/php8.3-fpm.sock;
fastcgi_index index.php;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
# FastCGI cache
fastcgi_cache php_cache;
fastcgi_cache_valid 200 60m;
fastcgi_cache_valid 404 10m;
fastcgi_cache_use_stale error timeout updating;
# Cache control
fastcgi_cache_bypass $skip_cache;
fastcgi_no_cache $skip_cache;
# Cache headers
add_header X-Cache-Status $upstream_cache_status always;
add_header X-Cache-Skip $skip_cache always;
# Performance settings
fastcgi_buffer_size 128k;
fastcgi_buffers 256 16k;
fastcgi_busy_buffers_size 256k;
fastcgi_temp_file_write_size 256k;
fastcgi_read_timeout 300;
}
# Cache purge endpoint
location ~ /cache-purge {
allow 127.0.0.1;
allow 192.168.1.0/24;
deny all;
fastcgi_cache_purge php_cache "$scheme$request_method$host$request_uri";
}
}
Static File Optimization
# Create optimized static file configuration
sudo nano /etc/nginx/snippets/static-files.conf
# Static file optimization snippet
# Images
location ~* \.(jpg|jpeg|png|gif|ico|svg|webp|avif)$ {
expires 1y;
add_header Cache-Control "public, immutable";
add_header Vary "Accept-Encoding";
access_log off;
# Enable compression for SVG
location ~* \.svg$ {
gzip on;
gzip_types image/svg+xml;
}
# WebP fallback
location ~* \.(jpg|jpeg|png)$ {
try_files $uri$webp_suffix $uri =404;
}
}
# Fonts
location ~* \.(woff|woff2|ttf|eot|otf)$ {
expires 1y;
add_header Cache-Control "public, immutable";
add_header Access-Control-Allow-Origin "*";
access_log off;
}
# CSS and JavaScript
location ~* \.(css|js)$ {
expires 1w;
add_header Cache-Control "public";
add_header Vary "Accept-Encoding";
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_types
text/css
text/javascript
application/javascript
application/json;
}
# Documents
location ~* \.(pdf|doc|docx|xls|xlsx|ppt|pptx)$ {
expires 1w;
add_header Cache-Control "public";
access_log off;
}
# Media files
location ~* \.(mp4|webm|ogg|mp3|wav|flac)$ {
expires 1M;
add_header Cache-Control "public";
access_log off;
# Streaming support
mp4;
mp4_buffer_size 1m;
mp4_max_buffer_size 5m;
}
Microcaching Strategy
Microcaching stores content for very short periods (seconds), perfect for high-traffic dynamic content.
graph TD A[High Traffic Request] --> B[Microcache Check] B --> C{Cache Hit?} C -->|Yes - within 30s| D[Serve from CacheInstant Response] C -->|No - expired| E[Generate New Content] E --> F[Cache for 30 seconds] F --> G[Serve to Client] H[Benefits] --> I[Handles Traffic Spikes] H --> J[Reduces Backend Load] H --> K[Near Real-time Content] style B fill:#e1f5fe style D fill:#e8f5e8 style F fill:#fff3e0
# Microcaching configuration
sudo nano /etc/nginx/sites-available/microcache.example.com
upstream app_backend {
server 127.0.0.1:3000;
server 127.0.0.1:3001;
keepalive 32;
}
# Add to nginx.conf http block:
# proxy_cache_path /var/cache/nginx/microcache levels=1:2 keys_zone=microcache:10m max_size=100m inactive=60m use_temp_path=off;
server {
listen 80;
server_name microcache.example.com;
# Microcaching for dynamic content
location / {
proxy_pass http://app_backend;
# Microcache settings
proxy_cache microcache;
proxy_cache_valid 200 30s; # Cache for 30 seconds
proxy_cache_valid 404 10s;
proxy_cache_valid 500 502 503 504 5s;
# Cache key includes user agent for personalization
proxy_cache_key "$scheme$request_method$host$request_uri$http_user_agent";
# Serve stale content during backend issues
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
proxy_cache_background_update on;
proxy_cache_lock on;
# Skip cache for authenticated users
proxy_cache_bypass $http_authorization $cookie_session;
proxy_no_cache $http_authorization $cookie_session;
# Headers
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# Cache status
add_header X-Cache-Status $upstream_cache_status always;
add_header X-Cache-Date $upstream_http_date always;
}
# API endpoints with shorter cache
location /api/ {
proxy_pass http://app_backend/api/;
proxy_cache microcache;
proxy_cache_valid 200 10s; # Very short cache
proxy_cache_valid 404 5s;
proxy_cache_key "$scheme$request_method$host$request_uri$args";
proxy_cache_bypass $http_authorization $arg_nocache;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
add_header X-Cache-Status $upstream_cache_status always;
}
}
Cache Management and Purging
# Create cache management script
sudo nano /usr/local/bin/nginx-cache-manager.sh
#!/bin/bash
# NGINX Cache Management Script
CACHE_DIR="/var/cache/nginx"
LOG_FILE="/var/log/nginx/cache-manager.log"
log_message() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}
show_cache_stats() {
log_message "=== NGINX Cache Statistics ==="
if [ -d "$CACHE_DIR" ]; then
for cache_type in proxy fastcgi static microcache; do
if [ -d "$CACHE_DIR/$cache_type" ]; then
local size=$(du -sh "$CACHE_DIR/$cache_type" 2>/dev/null | cut -f1)
local files=$(find "$CACHE_DIR/$cache_type" -type f 2>/dev/null | wc -l)
log_message "$cache_type cache: $size ($files files)"
fi
done
else
log_message "Cache directory not found: $CACHE_DIR"
fi
}
purge_cache() {
local cache_type="$1"
if [ -z "$cache_type" ]; then
log_message "Purging all caches..."
rm -rf "$CACHE_DIR"/*
sudo systemctl reload nginx
log_message "All caches purged"
else
if [ -d "$CACHE_DIR/$cache_type" ]; then
log_message "Purging $cache_type cache..."
rm -rf "$CACHE_DIR/$cache_type"/*
log_message "$cache_type cache purged"
else
log_message "Cache type not found: $cache_type"
fi
fi
}
clean_old_cache() {
log_message "Cleaning old cache files..."
# Remove files older than 7 days
find "$CACHE_DIR" -type f -mtime +7 -delete 2>/dev/null
# Remove empty directories
find "$CACHE_DIR" -type d -empty -delete 2>/dev/null
log_message "Old cache files cleaned"
}
case "${1:-stats}" in
stats)
show_cache_stats
;;
purge)
purge_cache "$2"
;;
clean)
clean_old_cache
;;
*)
echo "Usage: $0 {stats|purge [cache_type]|clean}"
echo "Cache types: proxy, fastcgi, static, microcache"
;;
esac
# Make executable: sudo chmod +x /usr/local/bin/nginx-cache-manager.sh
Performance Optimization Settings
# Create performance optimization snippet
sudo nano /etc/nginx/snippets/performance.conf
# Performance optimization settings
# Gzip compression
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_proxied any;
gzip_comp_level 6;
gzip_types
text/plain
text/css
text/xml
text/javascript
text/json
application/json
application/javascript
application/xml+rss
application/atom+xml
image/svg+xml;
# Brotli compression (if module available)
# brotli on;
# brotli_comp_level 6;
# brotli_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
# File descriptors
open_file_cache max=10000 inactive=60s;
open_file_cache_valid 30s;
open_file_cache_min_uses 2;
open_file_cache_errors on;
# TCP optimization
tcp_nopush on;
tcp_nodelay on;
# Timeouts
keepalive_timeout 65;
keepalive_requests 1000;
client_body_timeout 12;
client_header_timeout 12;
send_timeout 10;
# Buffer sizes
client_body_buffer_size 128k;
client_header_buffer_size 1k;
large_client_header_buffers 4 4k;
client_max_body_size 64m;
# Rate limiting
limit_req_zone $binary_remote_addr zone=general:10m rate=10r/s;
limit_conn_zone $binary_remote_addr zone=conn_limit:10m;
Cache Monitoring and Analytics
# Create cache analytics script
sudo nano /usr/local/bin/cache-analytics.sh
#!/bin/bash
# Cache Analytics Script
ACCESS_LOG="/var/log/nginx/access.log"
analyze_cache_performance() {
echo "=== Cache Hit Rate Analysis ==="
echo "Based on last 1000 requests:"
echo
# Cache hit rate
local total_requests=$(tail -1000 "$ACCESS_LOG" | wc -l)
local cache_hits=$(tail -1000 "$ACCESS_LOG" | grep -c "X-Cache-Status.*HIT")
local cache_misses=$(tail -1000 "$ACCESS_LOG" | grep -c "X-Cache-Status.*MISS")
if [ "$total_requests" -gt 0 ]; then
local hit_rate=$((cache_hits * 100 / total_requests))
echo "Total requests: $total_requests"
echo "Cache hits: $cache_hits"
echo "Cache misses: $cache_misses"
echo "Hit rate: $hit_rate%"
echo
# Response time comparison
echo "=== Response Time Analysis ==="
local avg_hit_time=$(tail -1000 "$ACCESS_LOG" | grep "X-Cache-Status.*HIT" | awk '{print $NF}' | awk '{sum+=$1; count++} END {if(count>0) print sum/count; else print 0}')
local avg_miss_time=$(tail -1000 "$ACCESS_LOG" | grep "X-Cache-Status.*MISS" | awk '{print $NF}' | awk '{sum+=$1; count++} END {if(count>0) print sum/count; else print 0}')
echo "Average hit response time: ${avg_hit_time}s"
echo "Average miss response time: ${avg_miss_time}s"
echo
# Most cached URLs
echo "=== Most Cached URLs ==="
tail -1000 "$ACCESS_LOG" | grep "X-Cache-Status.*HIT" | awk '{print $7}' | sort | uniq -c | sort -nr | head -5
else
echo "No requests found in access log"
fi
}
analyze_cache_performance
# Make executable: sudo chmod +x /usr/local/bin/cache-analytics.sh
Testing and Validation
# Create cache directories
sudo mkdir -p /var/cache/nginx/{proxy,fastcgi,static,microcache}
sudo chown -R www-data:www-data /var/cache/nginx
# Enable cached sites
sudo ln -s /etc/nginx/sites-available/cached-proxy.example.com /etc/nginx/sites-enabled/
sudo ln -s /etc/nginx/sites-available/fastcgi-cache.example.com /etc/nginx/sites-enabled/
# Test configuration
sudo nginx -t
# Reload NGINX
sudo systemctl reload nginx
# Test cache functionality
curl -H "Host: cached-proxy.example.com" http://localhost/api/test -v
# Check cache headers
curl -I -H "Host: cached-proxy.example.com" http://localhost/api/test
# Test cache hit (second request)
curl -H "Host: cached-proxy.example.com" http://localhost/api/test -v
# Monitor cache performance
/usr/local/bin/nginx-cache-manager.sh stats
# Analyze cache performance
/usr/local/bin/cache-analytics.sh
# Load test with caching
ab -n 1000 -c 10 -H "Host: cached-proxy.example.com" http://localhost/api/test
Cache Warming Strategy
# Create cache warming script
sudo nano /usr/local/bin/cache-warmer.sh
#!/bin/bash
# Cache Warming Script
SITE_URL="http://cached-proxy.example.com"
URLS=(
"/api/popular"
"/api/trending"
"/api/latest"
"/assets/main.css"
"/assets/app.js"
)
warm_cache() {
echo "Warming cache for $SITE_URL"
for url in "${URLS[@]}"; do
echo "Warming: $url"
curl -s -H "Host: cached-proxy.example.com" "http://localhost$url" > /dev/null
sleep 1
done
echo "Cache warming completed"
}
# Run cache warming
warm_cache
# Make executable: sudo chmod +x /usr/local/bin/cache-warmer.sh
What’s Next?
Excellent! You’ve implemented comprehensive caching strategies that will dramatically improve your website’s performance. Your NGINX setup now includes proxy caching, FastCGI caching, static file optimization, and microcaching.
Coming up in Part 9: NGINX Security Hardening and Access Control
References
This is Part 8 of our 22-part NGINX series. Your website is now lightning fast with comprehensive caching! Next, we’ll focus on security hardening. Questions about caching? Share them in the comments!