Recipes
Stream Claude in Next.js

Stream Claude in Next.js

Complete setup from install to streaming Claude responses in a Next.js App Router project.

Install

npm install @react-ai-stream/react @react-ai-stream/ui

Add your Anthropic key to .env.local:

ANTHROPIC_API_KEY=sk-ant-...

API route

app/api/chat/route.ts
import { NextRequest } from 'next/server'
 
export const runtime = 'edge'
 
export async function POST(req: NextRequest) {
  const { messages } = await req.json()
 
  const response = await fetch('https://api.anthropic.com/v1/messages', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      'x-api-key': process.env.ANTHROPIC_API_KEY!,
      'anthropic-version': '2023-06-01',
    },
    body: JSON.stringify({
      model: 'claude-sonnet-4-6',
      max_tokens: 1024,
      system: 'You are a helpful assistant.',
      messages: messages.filter((m: { role: string }) => m.role !== 'system'),
      stream: true,
    }),
  })
 
  if (!response.ok) {
    const body = await response.text()
    return new Response(JSON.stringify({ error: body }), {
      status: response.status,
      headers: { 'Content-Type': 'application/json' },
    })
  }
 
  const enc = new TextEncoder()
  const stream = new ReadableStream({
    async start(controller) {
      const send = (d: object) => controller.enqueue(enc.encode(`data: ${JSON.stringify(d)}\n\n`))
      const reader = response.body!.getReader()
      const decoder = new TextDecoder()
      let buf = ''
      while (true) {
        const { done, value } = await reader.read()
        if (done) break
        buf += decoder.decode(value, { stream: true })
        const parts = buf.split('\n\n')
        buf = parts.pop() ?? ''
        for (const part of parts) {
          for (const line of part.split('\n')) {
            if (!line.startsWith('data: ')) continue
            try {
              const ev = JSON.parse(line.slice(6))
              if (ev.type === 'content_block_delta' && ev.delta?.type === 'text_delta')
                send({ type: 'text', text: ev.delta.text })
              else if (ev.type === 'message_stop') {
                send({ type: 'done' })
                controller.close()
                return
              }
            } catch { /* skip */ }
          }
        }
      }
      send({ type: 'done' })
      controller.close()
    },
  })
 
  return new Response(stream, {
    headers: { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache' },
  })
}

Component

app/page.tsx
'use client'
import { useAIChat } from '@react-ai-stream/react'
import { Chat } from '@react-ai-stream/ui'
import '@react-ai-stream/ui/styles'
 
export default function Page() {
  const { messages, sendMessage, loading, stop } = useAIChat({
    endpoint: '/api/chat',
  })
 
  return (
    <div style={{ height: '80vh', maxWidth: 720, margin: '0 auto', padding: 24 }}>
      <Chat messages={messages} onSend={sendMessage} onStop={stop} loading={loading} />
    </div>
  )
}

That's it. Claude streams token by token into the Chat component.