from django.db import models
from django.contrib.postgres.indexes import GinIndex, BTreeIndex
class Order(models.Model):
customer = models.ForeignKey('Customer', on_delete=models.CASCADE)
status = models.CharField(max_length=20, db_index=True)
total = models.DecimalField(max_digits=10, decimal_places=2)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
completed_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [
# Compound index for common query
models.Index(fields=['customer', 'status']),
# Descending order index
models.Index(fields=['-created_at']),
# Partial index (PostgreSQL)
models.Index(
fields=['completed_at'],
condition=models.Q(status='completed'),
name='completed_orders_idx'
),
# Covering index (includes extra columns)
models.Index(
fields=['status'],
include=['total', 'created_at'],
name='status_cover_idx'
),
]
Database indexes dramatically speed up queries. I add indexes to frequently-filtered fields via db_index=True or Meta.indexes. Compound indexes help queries filtering on multiple fields together. For text search on PostgreSQL, I use GinIndex with SearchVector. I check if indexes are used with EXPLAIN. Too many indexes slow down writes, so I profile before adding. Partial indexes with condition reduce index size. For large tables, I create indexes concurrently in production. This is the first optimization for slow queries.