Ai apiQuestion answer mode

Using `inkeep-qa` models with the Vercel AI SDK

Using useChat and a route

useChat is cross-platform hook that helps you create a conversational experience with the Vercel AI SDK with React, Svelte, Vue, and Solid frontends.

To use useChat, first create an API route using Next.js. Here's where we'll call Inkeep.

API Route

app/api/chat/route.ts
import { LinksTool } from '@/lib/chat/inkeep-qa-schema'
import { createOpenAI } from '@ai-sdk/openai'
import { streamText } from 'ai'
 
export const runtime = 'edge'
 
const openai = createOpenAI({
  apiKey: process.env.INKEEP_API_KEY,
  baseURL: 'https://api.inkeep.com/v1'
})
 
export async function POST(req: Request) {
  const reqJson = await req.json()
 
  const result = await streamText({
    model: openai('inkeep-qa-sonnet-3-5'),
    messages: reqJson.messages.map((message: any) => ({
      role: message.role,
      content: message.content,
      name: 'inkeep-qa-user-message',
      id: message.id
    })),
     tools: { // to get the citation information
      provideLinks: {
        ...LinksTool
      }
    },
    toolChoice: 'auto'
  })
 
  return result.toAIStreamResponse()
}

Client

app/page.tsx
'use client'
 
import { useChat } from 'ai/react'
 
export default function Page() {
  const { messages, isLoading, input, handleSubmit, handleInputChange } =
    useChat({
      streamMode: 'stream-data',
      sendExtraMessageFields: true,
      onResponse(response) {
        if (response.status === 401) {
          console.error(response.statusText)
        }
      }
    })
 
  return (
    <>
      {messages.map(message => (
        <div key={message.id}>
          {message.role === 'user' ? 'User: ' : 'AI: '}
          {message.content}
        </div>
      ))}
 
      {isLoading && <div>Loading...</div>}
 
      <form onSubmit={handleSubmit}>
        <input name="prompt" value={input} onChange={handleInputChange} />
        <button type="submit">Submit</button>
      </form>
    </>
  )
}

Using Server Actions (AI SDK Actions)

streamUI is another way to use the Vercel AI SDK, but with React Server Components. This lets you stream entire UI components, not just the text that is then parsed and rendered on the client.

This example illustrates how to render assistant messages and a "sources" list provided by the provideLinks tool.

Server Action

import 'server-only'
 
import { createAI, getMutableAIState, streamUI } from 'ai/rsc'
import { createOpenAI } from '@ai-sdk/openai'
 
import { Message } from 'ai'
import { nanoid } from './utils'
import { LinksTool } from './inkeep-qa-schema'
 
const openai = createOpenAI({
  apiKey: process.env.INKEEP_API_KEY,
  baseURL: 'https://api.inkeep.com/v1'
})
 
async function submitUserMessage(content: string) {
  'use server'
 
  const aiState = getMutableAIState<typeof AI>()
 
  aiState.update({
    ...aiState.get(),
    messages: [
      ...aiState.get().messages,
      {
        id: nanoid(),
        role: 'user',
        content
      }
    ]
  })
 
  const answerMessageId = nanoid()
 
  const result = await streamUI({
    model: openai('inkeep-qa-sonnet-3-5'),
    messages: [
      ...aiState.get().messages.map((message: any) => ({
        role: message.role,
        content: message.content,
        name: 'inkeep-qa-user-message',
        id: message.id
      }))
    ],
    text: ({ content }) => {
      const assistantAnswerMessage = {
        id: answerMessageId,
        role: 'assistant',
        content,
        name: 'inkeep-qa-assistant-message'
      } as Message
 
      const currentMessages = aiState.get().messages
      const lastMessage = currentMessages[currentMessages.length - 1]
 
      aiState.update({
        ...aiState.get(),
        messages:
          lastMessage?.id === answerMessageId
            ? [...currentMessages.slice(0, -1), assistantAnswerMessage]
            : [...currentMessages, assistantAnswerMessage]
      })
 
      return <div>{assistantAnswerMessage.content}</div>
    },
    tools: {
      provideLinks: {
        ...LinksTool,
        generate: async ({ links }) => { // render sources UI once tool is complete
          const currentMessages = aiState.get().messages
          const lastMessage = currentMessages[currentMessages.length - 1]
          const lastMessageWithToolResults = {
            ...lastMessage,
            toolInvocations: [
              {
                toolName: 'provideLinks',
                result: links
              }
            ]
          } as Message
 
          aiState.done({
            ...aiState.get(),
            messages: [
              ...currentMessages.slice(0, -1),
              lastMessageWithToolResults
            ]
          })
 
          return <div>{lastMessageWithToolResults.content}</div>
        }
      }
    },
    toolChoice: 'auto'
  })
 
  return {
    id: nanoid(),
    display: result.value
  }
}
 
export type AIState = {
  chatId: string
  messages: Message[]
}
 
export type UIState = {
  id: string
  display: React.ReactNode
}[]
 
export const AI = createAI<AIState, UIState>({
  actions: {
    submitUserMessage
  },
  initialUIState: [],
  initialAIState: { chatId: nanoid(), messages: [] }
})
import { Chat } from '@/components/chat'
import { AI } from '@/lib/chat/actions'
import { nanoid } from '@/lib/chat/utils'
 
export default async function IndexPage() {
  const id = nanoid()
 
  return (
    <AI initialAIState={{ chatId: id, messages: [] }}>
      <Chat id={id} />
    </AI>
  )
}

Client

app/components/chat.tsx
'use client'
 
import { useUIState, useAIState, useActions } from 'ai/rsc'
import { Message } from 'ai'
import { UIState } from '@/lib/chat/actions'
import { nanoid } from '@/lib/chat/utils'
 
export interface ChatProps extends React.ComponentProps<'div'> {
  initialMessages?: Message[]
  id?: string
}
 
export function Chat({ id, className }: ChatProps) {
  const [messages] = useUIState()
 
  return (
    <div className="group w-full overflow-auto pl-0 peer-[[data-state=open]]:lg:pl-[250px] peer-[[data-state=open]]:xl:pl-[300px]">
      <div className={'pb-[200px] pt-4 md:pt-10'}>
        <ChatList messages={messages} />
        <div className="w-full h-px" />
      </div>
    </div>
  )
}
 
export interface ChatList {
  messages: UIState
}
 
export function ChatList({ messages }: ChatList) {
  const [messagesUIState, setMessagesUIState] = useUIState()
  const { submitUserMessage } = useActions()
 
  const handleSubmit = async (e: any) => {
    e.preventDefault()
 
    const form = e.currentTarget
    const input = form.elements.namedItem('prompt') as HTMLInputElement
 
    const value = input.value.trim()
    if (!value) return
 
    input.value = '' // Clear the input after submission
 
    const userMessage = {
      id: nanoid(),
      content: value,
      role: 'user'
    } as Message
 
    // Optimistically add user message UI
    setMessagesUIState(currentMessages => [
      ...currentMessages,
      {
        id: nanoid(),
        display: <div>{userMessage.content}</div>
      }
    ])
 
    // Submit and get response message
    const responseMessage = await submitUserMessage(value)
    setMessagesUIState(currentMessages => [...currentMessages, responseMessage])
  }
 
  return (
    <>
      <div className="relative mx-auto max-w-2xl px-4">
        {messages.map(message => (
          <div key={message.id}>{message.display}</div>
        ))}
      </div>
 
      <form onSubmit={handleSubmit}>
        <input name="prompt" />
        <button type="submit">Submit</button>
      </form>
    </>
  )
}

On this page