Reasoning

A collapsible component that displays AI reasoning content, automatically opening during streaming and closing when finished.

The Reasoning component is a collapsible component that displays AI reasoning content, automatically opening during streaming and closing when finished.

Install using CLI

AI Elements Vue
shadcn-vue CLI
npx ai-elements-vue@latest add reasoning

Install Manually

Copy and paste the following code in the same folder.

Reasoning.vue
ReasoningTrigger.vue
ReasoningContent.vue
context.ts
index.ts
<script setup lang="ts">
import type { HTMLAttributes } from 'vue'
import { Collapsible } from '@repo/shadcn-vue/components/ui/collapsible'
import { cn } from '@repo/shadcn-vue/lib/utils'
import { useVModel } from '@vueuse/core'
import { computed, provide, ref, watch } from 'vue'
import { ReasoningKey } from './context'

interface Props {
  class?: HTMLAttributes['class']
  isStreaming?: boolean
  open?: boolean
  defaultOpen?: boolean
  duration?: number
}

const props = withDefaults(defineProps<Props>(), {
  isStreaming: false,
  defaultOpen: true,
  duration: undefined,
})

const emit = defineEmits<{
  (e: 'update:open', value: boolean): void
  (e: 'update:duration', value: number): void
}>()

const isOpen = useVModel(props, 'open', emit, {
  defaultValue: props.defaultOpen,
  passive: true,
})

const internalDuration = ref<number | undefined>(props.duration)

watch(() => props.duration, (newVal) => {
  internalDuration.value = newVal
})

function updateDuration(val: number) {
  internalDuration.value = val
  emit('update:duration', val)
}

const hasAutoClosed = ref(false)
const startTime = ref<number | null>(null)

const MS_IN_S = 1000
const AUTO_CLOSE_DELAY = 1000

watch(() => props.isStreaming, (streaming) => {
  if (streaming) {
    isOpen.value = true

    if (startTime.value === null) {
      startTime.value = Date.now()
    }
  }
  else if (startTime.value !== null) {
    const calculatedDuration = Math.ceil((Date.now() - startTime.value) / MS_IN_S)
    updateDuration(calculatedDuration)
    startTime.value = null
  }
})

watch([() => props.isStreaming, isOpen, () => props.defaultOpen, hasAutoClosed], (_, __, onCleanup) => {
  if (props.defaultOpen && !props.isStreaming && isOpen.value && !hasAutoClosed.value) {
    const timer = setTimeout(() => {
      isOpen.value = false
      hasAutoClosed.value = true
    }, AUTO_CLOSE_DELAY)

    onCleanup(() => clearTimeout(timer))
  }
}, { immediate: true })

provide(ReasoningKey, {
  isStreaming: computed(() => props.isStreaming),
  isOpen,
  setIsOpen: (val: boolean) => { isOpen.value = val },
  duration: computed(() => internalDuration.value),
})
</script>

<template>
  <Collapsible
    v-model:open="isOpen"
    :class="cn('not-prose mb-4', props.class)"
  >
    <slot />
  </Collapsible>
</template>

Usage with AI SDK

Build a chatbot with reasoning using Deepseek R1.

Add the following component to your frontend:

pages/index.vue
<script setup lang="ts">
import { useChat } from '@ai-sdk/vue'
import { ref } from 'vue'

import {
  Conversation,
  ConversationContent,
  ConversationScrollButton,
} from '@/components/ai-elements/conversation'
import { Loader } from '@/components/ai-elements/loader'
import { Message, MessageContent, MessageResponse } from '@/components/ai-elements/message'
import {
  PromptInput,
  PromptInputSubmit,
  PromptInputTextarea,
} from '@/components/ai-elements/prompt-input'
import {
  Reasoning,
  ReasoningContent,
  ReasoningTrigger,
} from '@/components/ai-elements/reasoning'

const input = ref('')

const { messages, sendMessage, status } = useChat()

function handleSubmit(e: Event) {
  e.preventDefault()

  if (!input.value.trim())
    return

  sendMessage({ text: input.value })
  input.value = ''
}

function isStreamingPart(msgIndex: number, partIndex: number) {
  const lastMsg = messages.value.at(-1)
  const msg = messages.value[msgIndex]

  if (!lastMsg || msg.id !== lastMsg.id)
    return false

  const isLastPart = partIndex === msg.parts.length - 1
  return status.value === 'streaming' && isLastPart
}
</script>

<template>
  <div class="max-w-4xl mx-auto p-6 relative size-full rounded-lg border h-[600px]">
    <div class="flex flex-col h-full">
      <Conversation>
        <ConversationContent>
          <template v-for="(message, msgIndex) in messages" :key="message.id">
            <Message :from="message.role">
              <MessageContent>
                <template v-for="(part, partIndex) in message.parts" :key="partIndex">
                  <MessageResponse v-if="part.type === 'text'">
                    {{ part.text }}
                  </MessageResponse>

                  <Reasoning
                    v-else-if="part.type === 'reasoning'"
                    class="w-full"
                    :is-streaming="isStreamingPart(msgIndex, partIndex)"
                  >
                    <ReasoningTrigger />
                    <ReasoningContent :text="part.text" />
                  </Reasoning>
                </template>
              </MessageContent>
            </Message>
          </template>

          <Loader v-if="status === 'submitted'" />
        </ConversationContent>

        <ConversationScrollButton />
      </Conversation>

      <PromptInput
        class="mt-4 w-full max-w-2xl mx-auto relative"
        @submit="handleSubmit"
      >
        <PromptInputTextarea
          v-model="input"
          placeholder="Say something..."
          class="pr-12"
        />

        <PromptInputSubmit
          :status="status === 'streaming' ? 'streaming' : 'ready'"
          :disabled="!input.trim()"
          class="absolute bottom-1 right-1"
        />
      </PromptInput>
    </div>
  </div>
</template>

Add the following route to your backend:

server/api/chat.ts
import { convertToModelMessages, streamText, UIMessage } from 'ai'

export const maxDuration = 30

export default defineEventHandler(async (event) => {
  const { model, messages }: { model: string, messages: UIMessage[] }
    = await readBody(event)

  const result = streamText({
    model: model || 'deepseek/deepseek-r1',
    messages: convertToModelMessages(messages),
  })

  return result.toUIMessageStreamResponse({
    sendReasoning: true,
  })
})

Features

  • Automatically opens when streaming content and closes when finished
  • Manual toggle control for user interaction
  • Smooth animations and transitions powered by Reka UI
  • Visual streaming indicator with pulsing animation
  • Composable architecture with separate trigger and content components
  • Built with accessibility in mind including keyboard navigation
  • Responsive design that works across different screen sizes
  • Seamlessly integrates with both light and dark themes
  • Built on top of shadcn-vue Collapsible primitives
  • TypeScript support with proper type definitions

Props

<Reasoning />

isStreamingboolean
false
Whether the reasoning is currently streaming (auto-opens and closes the panel).
classstring
''
Additional CSS classes to apply to the component.

<ReasoningTrigger />

classstring
''
Additional CSS classes to apply to the component.

<ReasoningContent />

contentstring
The content to display in the reasoning panel.
classstring
''
Additional CSS classes to apply to the component.