Day 2 writes the Dockerfile that packages your AI app. You will containerize a Python Flask API that calls an AI model.
# Start from official Python image
FROM python:3.11-slim
# Set working directory in container
WORKDIR /app
# Copy requirements first (Docker layer caching)
COPY requirements.txt .
# Install dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Expose the port your app runs on
EXPOSE 8000
# Command to run when container starts
CMD ["python", "app.py"]flask==3.0.0
anthropic==0.34.0
python-dotenv==1.0.0from flask import Flask, request, jsonify
import anthropic
import os
app = Flask(__name__)
client = anthropic.Anthropic(api_key=os.environ.get('ANTHROPIC_API_KEY'))
@app.route('/chat', methods=['POST'])
def chat():
data = request.json
message = data.get('message', '')
response = client.messages.create(
model='claude-opus-4-5',
max_tokens=1024,
messages=[{'role': 'user', 'content': message}]
)
return jsonify({'reply': response.content[0].text})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000, debug=False)# Build the image
docker build -t my-ai-app .
# Run it (passing API key as env var)
docker run -p 8000:8000 -e ANTHROPIC_API_KEY=your_key_here my-ai-app
# Test it
curl -X POST http://localhost:8000/chat -H "Content-Type: application/json" -d '{"message": "Hello!"}'docker build -t my-ai-app .