import csv
from django.http import StreamingHttpResponse, FileResponse
class Echo:
"""Helper for writing to streaming response."""
def write(self, value):
return value
def export_large_csv(request):
"""Stream large CSV without loading all data in memory."""
def generate_rows():
"""Generator yielding CSV rows."""
from products.models import Product
# Yield header
yield ['ID', 'Name', 'Price', 'Stock']
# Stream products in chunks
for product in Product.objects.iterator(chunk_size=1000):
yield [
str(product.id),
product.name,
str(product.price),
str(product.stock)
]
pseudo_buffer = Echo()
writer = csv.writer(pseudo_buffer)
rows = (writer.writerow(row) for row in generate_rows())
response = StreamingHttpResponse(
rows,
content_type='text/csv'
)
response['Content-Disposition'] = 'attachment; filename="products.csv"'
return response
def download_file(request, file_path):
"""Serve file download efficiently."""
response = FileResponse(
open(file_path, 'rb'),
content_type='application/octet-stream'
)
response['Content-Disposition'] = f'attachment; filename="{os.path.basename(file_path)}"'
return response
Streaming responses serve large files without loading them entirely into memory. I use StreamingHttpResponse or FileResponse for file downloads. For CSV generation, I yield rows incrementally. The generator pattern keeps memory usage constant regardless of file size. I set appropriate content-type and content-disposition headers. For video streaming, I implement range request support. This enables serving large datasets or media files efficiently without server memory limits.