bookwiz.io / lib / hooks / useChatController.ts
useChatController.ts
Raw
import { useCallback, useMemo } from 'react'
import { MessageUI } from '@/lib/types/database'
import { useChat } from './useChat'
import { useUser } from './useUser'
import { useUsageLimit } from './useUsageLimit'
import { useChatStreaming } from './useChatStreaming'
import { useUsageRefresh } from '@/lib/contexts/UsageContext'
import { createChatRequestBody } from '@/lib/utils/streamingUtils'
import { composeDisplayMessages, deleteMessage } from '@/lib/utils/chatMessageUtils'

interface UseChatControllerProps {
  bookId?: string
  includedFiles: string[]
}

interface ChatControllerReturn {
  // Chat state
  currentChat: any
  messages: MessageUI[]
  allDisplayMessages: (MessageUI & { _isStreaming?: boolean })[]
  chatHistory: any[]
  isLoadingHistory: boolean
  chatError: string | null
  
  // Streaming state
  isStreaming: boolean
  streamingMessage: string
  aiPlanning: string
  currentToolExecution: any
  toolExecutionResults: any[]
  
  // Usage state
  usageInfo: any
  usageLimitError: string | null
  
  // Actions
  sendMessage: (content: string, selectedModel: string) => Promise<void>
  createNewChat: () => Promise<any>
  loadChat: (chatId: string) => Promise<void>
  updateChatTitle: (title: string) => Promise<void>
  deleteChat: (chatId: string) => Promise<void>
  updateChatModel: (model: string) => Promise<void>
  deleteMessage: (messageId: string) => Promise<void>
  setUsageLimitError: (error: string | null) => void
  stopExecution: () => void
  
  // Loading states
  isLoading: boolean
}

export function useChatController({ bookId, includedFiles }: UseChatControllerProps): ChatControllerReturn {
  const { userId } = useUser()
  const { refreshUsageStats } = useUsageRefresh()
  
  // Core hooks
  const {
    currentChat,
    messages,
    error: chatError,
    chatHistory,
    isLoadingHistory,
    createNewChat,
    loadChat,
    saveMessage,
    updateChatTitle,
    deleteChat,
    updateChatModel,
  } = useChat({ bookId, userId })

  const { 
    usageInfo, 
    usageLimitError, 
    checkUsageLimit, 
    setUsageLimitError, 
    refreshUsage 
  } = useUsageLimit(userId || null, refreshUsageStats)
  
  const {
    isStreaming,
    streamingMessage,
    aiPlanning,
    currentToolExecution,
    toolExecutionResults,
    startStreaming,
    stopStreaming,
    abortControllerRef,
    currentStreamingContentRef,
    stopExecution,
    processStreamChunk
  } = useChatStreaming()

  // Compose display messages
  const allDisplayMessages = useMemo(() => 
    composeDisplayMessages(
      messages,
      isStreaming,
      streamingMessage,
      aiPlanning,
      currentToolExecution,
      toolExecutionResults
    ), [messages, isStreaming, streamingMessage, aiPlanning, currentToolExecution, toolExecutionResults]
  )

  // Main message sending logic - simplified and extracted
  const sendMessage = useCallback(async (content: string, selectedModel: string) => {
    // Validation
    if (!content.trim()) return
    
    // Usage check
    const canProceed = await checkUsageLimit(selectedModel)
    if (!canProceed) return

    // Ensure chat exists
    let chatToUse = currentChat
    if (!chatToUse && bookId && userId) {
      chatToUse = await createNewChat()
      if (!chatToUse) {
        throw new Error('Failed to create chat')
      }
    }
    
    if (!chatToUse) {
      throw new Error('No chat available')
    }

    const newMessage: MessageUI = {
      id: Date.now(),
      type: 'user',
      content: content.trim(),
      timestamp: new Date()
    }

    // Clear previous errors
    setUsageLimitError(null)
    
    // Save user message
    await saveMessage(newMessage)
    
    // Prepare request
    const requestBody = createChatRequestBody(
      messages,
      newMessage,
      selectedModel,
      includedFiles,
      userId || '',
      bookId,
      chatToUse.id
    )

    try {
      // Start streaming
      startStreaming()
      
      const response = await fetch('/api/chat', {
        method: 'POST',
        headers: { 'Content-Type': 'application/json' },
        body: JSON.stringify(requestBody),
        signal: abortControllerRef.current?.signal,
      })

      if (!response.ok) {
        if (response.status === 429) {
          const errorData = await response.json()
          if (errorData.limitExceeded) {
            setUsageLimitError(errorData.error)
            return
          }
        }
        throw new Error('Failed to get response')
      }

      // Process stream
      const reader = response.body?.getReader()
      if (!reader) throw new Error('No response body')

      const decoder = new TextDecoder()
      const accumulatedToolResults: any[] = []

      while (true) {
        const { done, value } = await reader.read()
        if (done) break

        const chunk = decoder.decode(value, { stream: true })
        const lines = chunk.split('\n').filter(line => line.startsWith('data: '))

        for (const line of lines) {
          const data = line.slice(6).trim()
          if (!data || data === '[DONE]') continue
          
          try {
            const parsed = JSON.parse(data)
            processStreamChunk(parsed, accumulatedToolResults)
          } catch (e) {
            // Skip invalid JSON
          }
        }
      }

      // Save final AI message
      const finalMessage = currentStreamingContentRef.current || streamingMessage
      if (finalMessage?.trim()) {
        await saveMessage({
          type: 'ai',
          content: finalMessage,
          timestamp: new Date()
        })
      }

      // Refresh usage stats
      refreshUsage()

    } catch (error: any) {
      if (error.name !== 'AbortError') {
        console.error('Chat error:', error)
        throw error
      }
    } finally {
      stopStreaming()
    }
  }, [
    checkUsageLimit, currentChat, bookId, userId, createNewChat, saveMessage,
    setUsageLimitError, messages, includedFiles, startStreaming, stopStreaming,
    abortControllerRef, processStreamChunk, currentStreamingContentRef,
    streamingMessage, refreshUsage
  ])

  return {
    // Chat state
    currentChat,
    messages,
    allDisplayMessages,
    chatHistory,
    isLoadingHistory,
    chatError,
    
    // Streaming state
    isStreaming,
    streamingMessage,
    aiPlanning,
    currentToolExecution,
    toolExecutionResults,
    
    // Usage state
    usageInfo,
    usageLimitError,
    
    // Actions
    sendMessage,
    createNewChat,
    loadChat,
    updateChatTitle,
    deleteChat,
    updateChatModel,
    deleteMessage: async (messageId: string) => {
      try {
        await deleteMessage(messageId)
        
        // Remove the message from local state
        // The useChat hook will handle the state update
        // We need to trigger a refresh of the messages
        if (currentChat) {
          await loadChat(currentChat.id)
        }
      } catch (error) {
        console.error('Error deleting message:', error)
        throw error
      }
    },
    setUsageLimitError,
    stopExecution,
    
    // Loading state
    isLoading: isStreaming
  }
}