Examples

Explore real-world examples and common use cases for AI-UI-Kit components.

Interactive Demo

Try our components with full functionality:

Loading playground...

Code Examples

Basic Chat Implementation

BasicChat.tsx
import { Chat } from 'aichatkit'
import { useState } from 'react'
 
function BasicChat() {
  const [messages, setMessages] = useState([
    {
      id: '1',
      role: 'assistant',
      content: 'Hello! How can I help you today?',
      timestamp: new Date()
    }
  ])
 
  const handleSendMessage = async (content: string) => {
    // Add user message
    const userMessage = {
      id: Date.now().toString(),
      role: 'user' as const,
      content,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, userMessage])
 
    // Simulate AI response
    setTimeout(() => {
      const aiMessage = {
        id: (Date.now() + 1).toString(),
        role: 'assistant' as const,
        content: `I understand you said: "${content}". How can I help you further?`,
        timestamp: new Date()
      }
      setMessages(prev => [...prev, aiMessage])
    }, 1000)
  }
 
  return (
    <div style={{ height: '600px' }}>
      <Chat
        messages={messages}
        onSendMessage={handleSendMessage}
        config={{
          placeholder: "Type your message...",
          enableRetry: true,
          enableCopy: true
        }}
      />
    </div>
  )
}

OpenAI Integration

OpenAIChat.tsx
import { Chat } from 'aichatkit'
import { useState } from 'react'
import OpenAI from 'openai'
 
const openai = new OpenAI({
  apiKey: process.env.NEXT_PUBLIC_OPENAI_API_KEY,
  dangerouslyAllowBrowser: true
})
 
function OpenAIChat() {
  const [messages, setMessages] = useState([])
  const [isLoading, setIsLoading] = useState(false)
 
  const handleSendMessage = async (content: string) => {
    const userMessage = {
      id: Date.now().toString(),
      role: 'user' as const,
      content,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, userMessage])
    setIsLoading(true)
 
    try {
      const completion = await openai.chat.completions.create({
        messages: [
          { role: 'system', content: 'You are a helpful assistant.' },
          ...messages.map(msg => ({ role: msg.role, content: msg.content })),
          { role: 'user', content }
        ],
        model: 'gpt-3.5-turbo',
      })
 
      const aiMessage = {
        id: (Date.now() + 1).toString(),
        role: 'assistant' as const,
        content: completion.choices[0].message.content || 'Sorry, I could not generate a response.',
        timestamp: new Date()
      }
      setMessages(prev => [...prev, aiMessage])
    } catch (error) {
      const errorMessage = {
        id: (Date.now() + 1).toString(),
        role: 'error' as const,
        content: 'Failed to get response. Please try again.',
        timestamp: new Date()
      }
      setMessages(prev => [...prev, errorMessage])
    } finally {
      setIsLoading(false)
    }
  }
 
  return (
    <Chat
      messages={messages}
      onSendMessage={handleSendMessage}
      isThinking={isLoading}
      config={{
        placeholder: "Ask me anything...",
        enableRetry: true,
        enableCopy: true,
        enableEdit: true
      }}
    />
  )
}

Streaming Response

StreamingChat.tsx
import { Chat } from 'aichatkit'
import { useState } from 'react'
 
function StreamingChat() {
  const [messages, setMessages] = useState([])
  const [streamingMessageId, setStreamingMessageId] = useState<string | null>(null)
 
  const handleSendMessage = async (content: string) => {
    const userMessage = {
      id: Date.now().toString(),
      role: 'user' as const,
      content,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, userMessage])
 
    // Create placeholder for streaming message
    const assistantId = (Date.now() + 1).toString()
    const assistantMessage = {
      id: assistantId,
      role: 'assistant' as const,
      content: '',
      timestamp: new Date()
    }
    setMessages(prev => [...prev, assistantMessage])
    setStreamingMessageId(assistantId)
 
    // Simulate streaming
    const words = "This is a simulated streaming response that appears word by word to demonstrate the streaming functionality of AI-UI-Kit.".split(' ')
    
    for (let i = 0; i < words.length; i++) {
      await new Promise(resolve => setTimeout(resolve, 100))
      
      setMessages(prev => 
        prev.map(msg => 
          msg.id === assistantId 
            ? { ...msg, content: words.slice(0, i + 1).join(' ') }
            : msg
        )
      )
    }
 
    setStreamingMessageId(null)
  }
 
  return (
    <Chat
      messages={messages}
      onSendMessage={handleSendMessage}
      streamingMessageId={streamingMessageId}
      config={{
        placeholder: "Type to see streaming response...",
        enableRetry: true
      }}
    />
  )
}

Custom Styled Chat

CustomStyledChat.tsx
import { Chat } from 'aichatkit'
import { useState } from 'react'
 
function CustomStyledChat() {
  const [messages, setMessages] = useState([])
 
  const handleSendMessage = (content: string) => {
    const newMessage = {
      id: Date.now().toString(),
      content,
      role: 'user' as const,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, newMessage])
  }
 
  return (
    <div 
      style={{
        height: '600px',
        background: 'linear-gradient(135deg, #667eea 0%, #764ba2 100%)',
        padding: '20px',
        borderRadius: '16px'
      }}
    >
      <style jsx>{`
        .custom-chat {
          --ai-background: rgba(255, 255, 255, 0.95);
          --ai-surface: rgba(255, 255, 255, 0.8);
          --ai-primary: #667eea;
          --ai-border: rgba(255, 255, 255, 0.3);
          --ai-message-user-bg: #667eea;
          --ai-message-assistant-bg: rgba(255, 255, 255, 0.9);
          backdrop-filter: blur(10px);
          border-radius: 12px;
          box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
        }
      `}</style>
      
      <Chat
        className="custom-chat"
        messages={messages}
        onSendMessage={handleSendMessage}
        config={{
          placeholder: "Message with custom styling...",
          enableRetry: true,
          enableCopy: true
        }}
      />
    </div>
  )
}

File Upload Chat

FileUploadChat.tsx
import { Chat } from 'aichatkit'
import { useState } from 'react'
 
function FileUploadChat() {
  const [messages, setMessages] = useState([])
 
  const handleSendMessage = (content: string) => {
    const userMessage = {
      id: Date.now().toString(),
      role: 'user' as const,
      content,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, userMessage])
 
    // Simulate file processing response
    setTimeout(() => {
      const aiMessage = {
        id: (Date.now() + 1).toString(),
        role: 'assistant' as const,
        content: content.includes('file') 
          ? 'I can see you mentioned a file. File upload functionality will be available in the full component.'
          : `You said: "${content}"`,
        timestamp: new Date()
      }
      setMessages(prev => [...prev, aiMessage])
    }, 1000)
  }
 
  const handleFileUpload = (files: FileList) => {
    const fileMessage = {
      id: Date.now().toString(),
      role: 'user' as const,
      content: `Uploaded ${files.length} file(s): ${Array.from(files).map(f => f.name).join(', ')}`,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, fileMessage])
  }
 
  return (
    <Chat
      messages={messages}
      onSendMessage={handleSendMessage}
      onFileUpload={handleFileUpload}
      config={{
        placeholder: "Send a message or upload files...",
        enableFileUpload: true,
        acceptedFileTypes: ['image/*', '.pdf', '.txt'],
        maxFileSize: 5 * 1024 * 1024, // 5MB
        enableRetry: true
      }}
    />
  )
}

Component Demos

Individual Message Component

Loading component...

Chat Input Component

Loading component...

Full Chat Component

Loading component...

Integration Examples

With Vercel AI SDK

VercelAIChat.tsx
import { useChat } from 'ai/react'
import { Chat } from 'aichatkit'
 
function VercelAIChat() {
  const { messages, input, handleInputChange, handleSubmit, isLoading } = useChat()
 
  // Convert Vercel AI messages to AI-UI-Kit format
  const formattedMessages = messages.map(msg => ({
    id: msg.id,
    role: msg.role as 'user' | 'assistant',
    content: msg.content,
    timestamp: new Date(msg.createdAt || Date.now())
  }))
 
  return (
    <Chat
      messages={formattedMessages}
      onSendMessage={(content) => {
        handleInputChange({ target: { value: content } } as any)
        handleSubmit()
      }}
      isThinking={isLoading}
      config={{
        placeholder: "Chat with AI...",
        enableRetry: true,
        enableCopy: true
      }}
    />
  )
}

With Anthropic Claude

ClaudeChat.tsx
import { Chat } from 'aichatkit'
import { useState } from 'react'
 
function ClaudeChat() {
  const [messages, setMessages] = useState([])
  const [isLoading, setIsLoading] = useState(false)
 
  const handleSendMessage = async (content: string) => {
    const userMessage = {
      id: Date.now().toString(),
      role: 'user' as const,
      content,
      timestamp: new Date()
    }
    setMessages(prev => [...prev, userMessage])
    setIsLoading(true)
 
    try {
      const response = await fetch('/api/claude', {
        method: 'POST',
        headers: { 'Content-Type': 'application/json' },
        body: JSON.stringify({ 
          messages: [...messages, userMessage]
        })
      })
 
      const data = await response.json()
      
      const aiMessage = {
        id: (Date.now() + 1).toString(),
        role: 'assistant' as const,
        content: data.content,
        timestamp: new Date()
      }
      setMessages(prev => [...prev, aiMessage])
    } catch (error) {
      // Handle error
    } finally {
      setIsLoading(false)
    }
  }
 
  return (
    <Chat
      messages={messages}
      onSendMessage={handleSendMessage}
      isThinking={isLoading}
      config={{
        placeholder: "Chat with Claude...",
        enableRetry: true
      }}
    />
  )
}

Best Practices

Message Handling

// ✅ Good: Handle errors gracefully
const handleSendMessage = async (content: string) => {
  try {
    // Add user message immediately
    const userMessage = { id: Date.now().toString(), role: 'user', content, timestamp: new Date() }
    setMessages(prev => [...prev, userMessage])
    
    // Call API
    const response = await callAI(content)
    
    // Add AI response
    const aiMessage = { id: (Date.now() + 1).toString(), role: 'assistant', content: response, timestamp: new Date() }
    setMessages(prev => [...prev, aiMessage])
  } catch (error) {
    // Add error message
    const errorMessage = { id: (Date.now() + 1).toString(), role: 'error', content: 'Failed to send message', timestamp: new Date() }
    setMessages(prev => [...prev, errorMessage])
  }
}
 
// ❌ Bad: No error handling
const handleSendMessage = async (content: string) => {
  const response = await callAI(content) // Could throw
  setMessages(prev => [...prev, response])
}

Performance Optimization

// ✅ Good: Memoize expensive operations
const MemoizedChat = React.memo(Chat)
 
// ✅ Good: Use keys for message lists
{messages.map(message => (
  <Message key={message.id} message={message} />
))}
 
// ✅ Good: Implement virtual scrolling for large lists
import { FixedSizeList as List } from 'react-window'

All examples are fully functional and can be copied directly into your project. Make sure to install the required dependencies and set up your API keys.

Next Steps