Most real AI apps need multiple services: API server, database, cache, maybe a model server. Docker Compose runs them all together with one command.
Docker Compose defines your entire application stack in one YAML file and starts everything with docker compose up. For AI apps, a common stack is: Python API + PostgreSQL database + Redis cache.
version: '3.8'
services:
api:
build: .
ports:
- "8000:8000"
environment:
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
- DATABASE_URL=postgresql://user:password@db:5432/chatbot
- REDIS_URL=redis://redis:6379
depends_on:
- db
- redis
volumes:
- .:/app # Hot reload during development
db:
image: postgres:15-alpine
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
- POSTGRES_DB=chatbot
volumes:
- postgres_data:/var/lib/postgresql/data
ports:
- "5432:5432"
redis:
image: redis:7-alpine
ports:
- "6379:6379"
volumes:
postgres_data:docker compose up # Start all services
docker compose up -d # Start in background (detached)
docker compose down # Stop and remove containers
docker compose down -v # Also remove volumes (data)
docker compose logs api # View logs for api service
docker compose exec api bash # Shell into api container
docker compose ps # Status of all servicesimport psycopg2
import os
def get_db():
return psycopg2.connect(os.environ.get('DATABASE_URL'))
def load_history(session_id):
with get_db() as conn:
with conn.cursor() as cur:
cur.execute(
'SELECT role, content FROM messages '
'WHERE session_id = %s ORDER BY created_at',
(session_id,)
)
return [{'role': r, 'content': c} for r, c in cur.fetchall()]
def save_message(session_id, role, content):
with get_db() as conn:
with conn.cursor() as cur:
cur.execute(
'INSERT INTO messages (session_id, role, content) '
'VALUES (%s, %s, %s)',
(session_id, role, content)
)
conn.commit()docker compose up and watch all three services start.docker compose exec db psql -U user -d chatbot to connect to Postgres.docker compose down -v and docker compose up — notice the database is fresh each time.