diff --git a/ee/tabby-ui/components/chat-list.tsx b/ee/tabby-ui/components/chat-list.tsx
index 06dfdf4d97c7..19da68e9b273 100644
--- a/ee/tabby-ui/components/chat-list.tsx
+++ b/ee/tabby-ui/components/chat-list.tsx
@@ -2,14 +2,19 @@ import { type Message } from 'ai'
import { MessageActionType } from '@/lib/types'
import { Separator } from '@/components/ui/separator'
-import { ChatMessage } from '@/components/chat-message'
+import { ChatMessage, MessagePendingIndicator } from '@/components/chat-message'
export interface ChatList {
messages: Message[]
handleMessageAction: (messageId: string, action: MessageActionType) => void
+ isStreamResponsePending?: boolean
}
-export function ChatList({ messages, handleMessageAction }: ChatList) {
+export function ChatList({
+ messages,
+ handleMessageAction,
+ isStreamResponsePending
+}: ChatList) {
if (!messages.length) {
return null
}
@@ -27,6 +32,12 @@ export function ChatList({ messages, handleMessageAction }: ChatList) {
)}
))}
+ {isStreamResponsePending && (
+ <>
+
+
+ >
+ )}
)
}
diff --git a/ee/tabby-ui/components/chat-message.tsx b/ee/tabby-ui/components/chat-message.tsx
index 81e346a971a1..706159d234f5 100644
--- a/ee/tabby-ui/components/chat-message.tsx
+++ b/ee/tabby-ui/components/chat-message.tsx
@@ -13,6 +13,7 @@ import { CodeBlock } from '@/components/ui/codeblock'
import { ChatMessageActions } from '@/components/chat-message-actions'
import { MemoizedReactMarkdown } from '@/components/markdown'
+import { Skeleton } from './ui/skeleton'
import { UserAvatar } from './user-avatar'
export interface ChatMessageProps {
@@ -30,11 +31,7 @@ export function ChatMessage({
className={cn('group relative mb-4 flex items-start md:-ml-12')}
{...props}
>
-
+
{message.role === 'user' ? (
) : (
@@ -92,6 +89,20 @@ export function ChatMessage({
)
}
+export function MessagePendingIndicator() {
+ return (
+
+ )
+}
+
function IconTabby({ className }: { className?: string }) {
return (
) {
- usePatchFetch()
const chats = useStore(useChatStore, state => state.chats)
+ // When the response status text is 200, the variable should be false
+ const [isStreamResponsePending, setIsStreamResponsePending] =
+ React.useState(false)
+
+ const onStreamToken = useLatest(() => {
+ if (isStreamResponsePending) {
+ setIsStreamResponsePending(false)
+ }
+ })
const useChatHelpers = useChat({
initialMessages,
@@ -44,6 +53,15 @@ function ChatRenderer(
}
})
+ usePatchFetch({
+ onStart: () => {
+ setIsStreamResponsePending(true)
+ },
+ onToken: () => {
+ onStreamToken.current()
+ }
+ })
+
const {
messages,
append,
@@ -57,6 +75,11 @@ function ChatRenderer(
const [selectedMessageId, setSelectedMessageId] = React.useState()
+ const onStop = () => {
+ setIsStreamResponsePending(false)
+ stop()
+ }
+
const onRegenerateResponse = (messageId: string) => {
const messageIndex = findIndex(messages, { id: messageId })
const prevMessage = messages?.[messageIndex - 1]
@@ -100,6 +123,13 @@ function ChatRenderer(
}
}
+ const scrollToBottom = (behavior?: ScrollBehavior) => {
+ window.scrollTo({
+ top: document.body.offsetHeight,
+ behavior
+ })
+ }
+
const handleSubmit = async (value: string) => {
if (findIndex(chats, { id }) === -1) {
addChat(id, truncateText(value))
@@ -122,12 +152,17 @@ function ChatRenderer(
}, [messages])
React.useEffect(() => {
- const scrollHeight = document.documentElement.scrollHeight
- window.scrollTo(0, scrollHeight)
+ scrollToBottom()
return () => stop()
}, [])
+ React.useLayoutEffect(() => {
+ if (isStreamResponsePending) {
+ scrollToBottom('smooth')
+ }
+ }, [isStreamResponsePending])
+
React.useImperativeHandle(
ref,
() => {
@@ -145,6 +180,7 @@ function ChatRenderer(
>
@@ -157,7 +193,7 @@ function ChatRenderer(
className="fixed inset-x-0 bottom-0 lg:ml-[280px]"
id={id}
isLoading={isLoading}
- stop={stop}
+ stop={onStop}
append={append}
reload={reload}
messages={messages}
diff --git a/ee/tabby-ui/lib/hooks/use-patch-fetch.ts b/ee/tabby-ui/lib/hooks/use-patch-fetch.ts
index b95433af0002..43766a689709 100644
--- a/ee/tabby-ui/lib/hooks/use-patch-fetch.ts
+++ b/ee/tabby-ui/lib/hooks/use-patch-fetch.ts
@@ -1,9 +1,9 @@
import { useEffect } from 'react'
-import { OpenAIStream, StreamingTextResponse } from 'ai'
+import { OpenAIStream, OpenAIStreamCallbacks, StreamingTextResponse } from 'ai'
import fetcher from '../tabby/fetcher'
-export function usePatchFetch() {
+export function usePatchFetch(callbacks?: OpenAIStreamCallbacks) {
useEffect(() => {
if (!window._originFetch) {
window._originFetch = window.fetch
@@ -26,7 +26,7 @@ export function usePatchFetch() {
headers,
customFetch: fetch,
responseFormatter(response) {
- const stream = OpenAIStream(response, undefined)
+ const stream = OpenAIStream(response, callbacks)
return new StreamingTextResponse(stream)
}
})