Templates
Next.js + OpenAI

Next.js + OpenAI

A complete Next.js 15 App Router app streaming GPT-4o-mini responses via the RAIS protocol.

Scaffold this automatically: npx create-ai-stream-app → choose OpenAI.

Install

npm install @react-ai-stream/react @react-ai-stream/ui

Server — /app/api/chat/route.ts

import { NextRequest } from 'next/server'
 
export const runtime = 'edge'
 
interface Message { role: string; content: string }
 
function chunk(data: object) {
  return new TextEncoder().encode(`data: ${JSON.stringify(data)}\n\n`)
}
 
export async function POST(req: NextRequest) {
  const { messages } = (await req.json()) as { messages: Message[] }
  const apiKey = process.env.OPENAI_API_KEY!
 
  const upstream = await fetch('https://api.openai.com/v1/chat/completions', {
    method: 'POST',
    headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${apiKey}` },
    body: JSON.stringify({ model: 'gpt-4o-mini', messages, stream: true }),
    signal: req.signal,
  })
 
  const stream = new ReadableStream<Uint8Array>({
    async start(controller) {
      const reader = upstream.body!.getReader()
      const dec = new TextDecoder()
      let buf = ''
      while (true) {
        const { done, value } = await reader.read()
        if (done) break
        buf += dec.decode(value, { stream: true })
        const parts = buf.split('\n\n')
        buf = parts.pop() ?? ''
        for (const part of parts) {
          const line = part.split('\n').find(l => l.startsWith('data: '))
          if (!line) continue
          const data = line.slice(6).trim()
          if (data === '[DONE]') { controller.enqueue(chunk({ type: 'done' })); controller.close(); return }
          try {
            const parsed = JSON.parse(data)
            const text = parsed.choices?.[0]?.delta?.content
            if (text) controller.enqueue(chunk({ type: 'text', text }))
          } catch { /* skip */ }
        }
      }
      controller.enqueue(chunk({ type: 'done' }))
      controller.close()
    },
  })
 
  return new Response(stream, {
    headers: { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache' },
  })
}

Client — /app/page.tsx

'use client'
import { Chat } from '@react-ai-stream/ui'
import '@react-ai-stream/ui/styles'
import { useAIChat } from '@react-ai-stream/react'
 
export default function Page() {
  const chat = useAIChat({ endpoint: '/api/chat' })
  return (
    <main style={{ height: '100dvh', display: 'flex', flexDirection: 'column' }}>
      <Chat {...chat} placeholder="Ask GPT-4o anything…" />
    </main>
  )
}

.env.local

OPENAI_API_KEY=sk-...