The shell from Day 1 sends fake responses. Today you'll connect it to the Claude API, handle async state properly, and deal with errors and loading states in a way that feels smooth to users.
Your API key would be visible to anyone who opened DevTools. Always route AI API calls through a backend proxy — even a simple one.
# Install a simple Express server
npm install express cors
npm install -D nodemon
const express = require('express');
const cors = require('cors');
const app = express();
app.use(cors({ origin: 'http://localhost:5173' }));
app.use(express.json());
const Anthropic = require('@anthropic-ai/sdk');
const client = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY });
app.post('/api/chat', async (req, res) => {
const { messages } = req.body;
try {
const response = await client.messages.create({
model: 'claude-3-haiku-20240307',
max_tokens: 1024,
messages: messages, // array of {role, content}
});
res.json({ content: response.content[0].text });
} catch (err) {
res.status(500).json({ error: err.message });
}
});
app.listen(3001, () => console.log('API server on :3001'));
# In a .env file (git-ignored)
ANTHROPIC_API_KEY=sk-ant-...
# Run the server
node server.js
async function sendMessage(userText) {
const userMsg = { role: 'user', content: userText };
setMessages(prev => [...prev, userMsg]);
setIsLoading(true);
setError(null);
try {
const response = await fetch('http://localhost:3001/api/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
messages: [...messages, userMsg] // send full history
})
});
if (!response.ok) throw new Error(`HTTP ${response.status}`);
const data = await response.json();
setMessages(prev => [...prev, {
id: Date.now(),
role: 'assistant',
content: data.content
}]);
} catch (err) {
setError('Failed to get response. Try again.');
} finally {
setIsLoading(false); // always clear loading, even on error
}
}
messages array including the new user message.function TypingIndicator() {
return (
);
}
// In the messages render:
{messages.map(msg => )}
{isLoading && }
{error && {error}}
import { useState, useCallback } from 'react';
// Without useCallback, sendMessage recreates on every render
// ChatInput would re-render unnecessarily
const sendMessage = useCallback(async (userText) => {
// ... the function body
}, [messages]); // only recreate when messages changes
server.js with the Express proxy and install @anthropic-ai/sdk..env file and add .env to .gitignore.App.jsx to call your proxy instead of the fake response.isLoading state and disable the input while waiting.finally to clear loading state — it runs whether the request succeeds or fails.Add a system prompt input at the top of the UI that lets users customize Claude's behavior for the conversation. When set, include it as a system parameter in the API call.