Merge pull request #9 from nauxliu/disable-input-when-message-is-streaming
Ignore new message when the current message is not finished yet
This commit is contained in:
		
						commit
						903c7f6806
					
				|  | @ -8,13 +8,14 @@ import { ModelSelect } from "./ModelSelect"; | ||||||
| interface Props { | interface Props { | ||||||
|   model: OpenAIModel; |   model: OpenAIModel; | ||||||
|   messages: Message[]; |   messages: Message[]; | ||||||
|  |   messageIsStreaming: boolean, | ||||||
|   loading: boolean; |   loading: boolean; | ||||||
|   lightMode: "light" | "dark"; |   lightMode: "light" | "dark"; | ||||||
|   onSend: (message: Message) => void; |   onSend: (message: Message) => void; | ||||||
|   onSelect: (model: OpenAIModel) => void; |   onSelect: (model: OpenAIModel) => void; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| export const Chat: FC<Props> = ({ model, messages, loading, lightMode, onSend, onSelect }) => { | export const Chat: FC<Props> = ({ model, messages, messageIsStreaming, loading, lightMode, onSend, onSelect }) => { | ||||||
|   const messagesEndRef = useRef<HTMLDivElement>(null); |   const messagesEndRef = useRef<HTMLDivElement>(null); | ||||||
| 
 | 
 | ||||||
|   const scrollToBottom = () => { |   const scrollToBottom = () => { | ||||||
|  | @ -57,7 +58,7 @@ export const Chat: FC<Props> = ({ model, messages, loading, lightMode, onSend, o | ||||||
|       </div> |       </div> | ||||||
| 
 | 
 | ||||||
|       <div className="h-[80px] sm:h-[140px] w-[340px] sm:w-[400px] md:w-[500px] lg:w-[700px] xl:w-[800px] mx-auto"> |       <div className="h-[80px] sm:h-[140px] w-[340px] sm:w-[400px] md:w-[500px] lg:w-[700px] xl:w-[800px] mx-auto"> | ||||||
|         <ChatInput onSend={onSend} /> |         <ChatInput messageIsStreaming={messageIsStreaming} onSend={onSend} /> | ||||||
|       </div> |       </div> | ||||||
|     </div> |     </div> | ||||||
|   ); |   ); | ||||||
|  |  | ||||||
|  | @ -3,10 +3,11 @@ import { IconSend } from "@tabler/icons-react"; | ||||||
| import { FC, KeyboardEvent, useEffect, useRef, useState } from "react"; | import { FC, KeyboardEvent, useEffect, useRef, useState } from "react"; | ||||||
| 
 | 
 | ||||||
| interface Props { | interface Props { | ||||||
|  |   messageIsStreaming: boolean, | ||||||
|   onSend: (message: Message) => void; |   onSend: (message: Message) => void; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| export const ChatInput: FC<Props> = ({ onSend }) => { | export const ChatInput: FC<Props> = ({ onSend, messageIsStreaming }) => { | ||||||
|   const [content, setContent] = useState<string>(); |   const [content, setContent] = useState<string>(); | ||||||
|   const [isTyping, setIsTyping] = useState<boolean>(false); |   const [isTyping, setIsTyping] = useState<boolean>(false); | ||||||
| 
 | 
 | ||||||
|  | @ -23,6 +24,10 @@ export const ChatInput: FC<Props> = ({ onSend }) => { | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|   const handleSend = () => { |   const handleSend = () => { | ||||||
|  |     if (messageIsStreaming) { | ||||||
|  |       return; | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|     if (!content) { |     if (!content) { | ||||||
|       alert("Please enter a message"); |       alert("Please enter a message"); | ||||||
|       return; |       return; | ||||||
|  |  | ||||||
|  | @ -11,7 +11,7 @@ export default function Home() { | ||||||
|   const [loading, setLoading] = useState<boolean>(false); |   const [loading, setLoading] = useState<boolean>(false); | ||||||
|   const [model, setModel] = useState<OpenAIModel>(OpenAIModel.GPT_3_5); |   const [model, setModel] = useState<OpenAIModel>(OpenAIModel.GPT_3_5); | ||||||
|   const [lightMode, setLightMode] = useState<"dark" | "light">("dark"); |   const [lightMode, setLightMode] = useState<"dark" | "light">("dark"); | ||||||
|   const [disabled, setDisabled] = useState<boolean>(false); |   const [messageIsStreaming, setmessageIsStreaming] = useState<boolean>(false); | ||||||
|   const [showSidebar, setShowSidebar] = useState<boolean>(true); |   const [showSidebar, setShowSidebar] = useState<boolean>(true); | ||||||
| 
 | 
 | ||||||
|   const handleSend = async (message: Message) => { |   const handleSend = async (message: Message) => { | ||||||
|  | @ -23,7 +23,7 @@ export default function Home() { | ||||||
| 
 | 
 | ||||||
|       setSelectedConversation(updatedConversation); |       setSelectedConversation(updatedConversation); | ||||||
|       setLoading(true); |       setLoading(true); | ||||||
|       setDisabled(true); |       setmessageIsStreaming(true); | ||||||
| 
 | 
 | ||||||
|       const response = await fetch("/api/chat", { |       const response = await fetch("/api/chat", { | ||||||
|         method: "POST", |         method: "POST", | ||||||
|  | @ -111,7 +111,7 @@ export default function Home() { | ||||||
| 
 | 
 | ||||||
|       localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations)); |       localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations)); | ||||||
| 
 | 
 | ||||||
|       setDisabled(false); |       setmessageIsStreaming(false); | ||||||
|     } |     } | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|  | @ -230,7 +230,7 @@ export default function Home() { | ||||||
|         <div className={`flex h-screen text-white ${lightMode}`}> |         <div className={`flex h-screen text-white ${lightMode}`}> | ||||||
|           {showSidebar ? ( |           {showSidebar ? ( | ||||||
|             <Sidebar |             <Sidebar | ||||||
|               loading={disabled} |               loading={messageIsStreaming} | ||||||
|               conversations={conversations} |               conversations={conversations} | ||||||
|               lightMode={lightMode} |               lightMode={lightMode} | ||||||
|               selectedConversation={selectedConversation} |               selectedConversation={selectedConversation} | ||||||
|  | @ -250,6 +250,7 @@ export default function Home() { | ||||||
|           )} |           )} | ||||||
| 
 | 
 | ||||||
|           <Chat |           <Chat | ||||||
|  |             messageIsStreaming={messageIsStreaming} | ||||||
|             model={model} |             model={model} | ||||||
|             messages={selectedConversation.messages} |             messages={selectedConversation.messages} | ||||||
|             loading={loading} |             loading={loading} | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue