mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2024-11-21 03:20:57 +08:00
feat: generate chat suggestions for user
This commit is contained in:
parent
b55b01cb13
commit
f75b238ebe
@ -475,3 +475,21 @@
|
||||
bottom: 30px;
|
||||
}
|
||||
}
|
||||
|
||||
.chat-suggestions {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.chat-suggestion {
|
||||
display: inline;
|
||||
white-space: nowrap;
|
||||
border-radius: 20px;
|
||||
font-size: 12px;
|
||||
background-color: var(--white);
|
||||
color: var(--black);
|
||||
border: var(--border-in-light);
|
||||
padding: 4px 10px;
|
||||
animation: slide-in ease 0.3s;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
}
|
||||
|
@ -606,6 +606,8 @@ export function Chat() {
|
||||
}
|
||||
};
|
||||
|
||||
const [suggestions, setSuggestions] = useState<string[]>([]);
|
||||
|
||||
const doSubmit = (userInput: string) => {
|
||||
if (userInput.trim() === "") return;
|
||||
const matchCommand = chatCommands.match(userInput);
|
||||
@ -616,7 +618,13 @@ export function Chat() {
|
||||
return;
|
||||
}
|
||||
setIsLoading(true);
|
||||
chatStore.onUserInput(userInput).then(() => setIsLoading(false));
|
||||
setSuggestions([]);
|
||||
chatStore.onUserInput(userInput).then(() => {
|
||||
setIsLoading(false);
|
||||
chatStore
|
||||
.getSuggestions()
|
||||
.then((suggestions) => setSuggestions(suggestions));
|
||||
});
|
||||
localStorage.setItem(LAST_INPUT_KEY, userInput);
|
||||
setUserInput("");
|
||||
setPromptHints([]);
|
||||
@ -1061,6 +1069,25 @@ export function Chat() {
|
||||
onSearch("");
|
||||
}}
|
||||
/>
|
||||
|
||||
{suggestions.length > 0 && (
|
||||
<div className={styles["chat-suggestions"]}>
|
||||
{suggestions.map((s, i) => {
|
||||
return (
|
||||
<div
|
||||
key={i}
|
||||
className={styles["chat-suggestion"] + " clickable"}
|
||||
onClick={() => {
|
||||
doSubmit(s);
|
||||
}}
|
||||
>
|
||||
{s}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className={styles["chat-input-panel-inner"]}>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
|
@ -28,6 +28,7 @@ import { useAppConfig } from "../store/config";
|
||||
import { AuthPage } from "./auth";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { api } from "../client/api";
|
||||
import { useChatStore } from "../store";
|
||||
|
||||
export function Loading(props: { noLogo?: boolean }) {
|
||||
return (
|
||||
@ -114,6 +115,7 @@ function Screen() {
|
||||
const isHome = location.pathname === Path.Home;
|
||||
const isAuth = location.pathname === Path.Auth;
|
||||
const isMobileScreen = useMobileScreen();
|
||||
const chat = useChatStore();
|
||||
|
||||
useEffect(() => {
|
||||
loadAsyncGoogleFont();
|
||||
|
@ -103,6 +103,7 @@ interface ChatStore {
|
||||
resetSession: () => void;
|
||||
getMessagesWithMemory: () => ChatMessage[];
|
||||
getMemoryPrompt: () => ChatMessage;
|
||||
getSuggestions: () => Promise<string[]>;
|
||||
|
||||
clearAllData: () => void;
|
||||
}
|
||||
@ -278,96 +279,99 @@ export const useChatStore = create<ChatStore>()(
|
||||
get().summarizeSession();
|
||||
},
|
||||
|
||||
async onUserInput(content) {
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
onUserInput(content) {
|
||||
return new Promise((resolve) => {
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
|
||||
const userContent = fillTemplateWith(content, modelConfig);
|
||||
console.log("[User Input] after template: ", userContent);
|
||||
const userContent = fillTemplateWith(content, modelConfig);
|
||||
console.log("[User Input] after template: ", userContent);
|
||||
|
||||
const userMessage: ChatMessage = createMessage({
|
||||
role: "user",
|
||||
content: userContent,
|
||||
});
|
||||
const userMessage: ChatMessage = createMessage({
|
||||
role: "user",
|
||||
content: userContent,
|
||||
});
|
||||
|
||||
const botMessage: ChatMessage = createMessage({
|
||||
role: "assistant",
|
||||
streaming: true,
|
||||
id: userMessage.id! + 1,
|
||||
model: modelConfig.model,
|
||||
});
|
||||
const botMessage: ChatMessage = createMessage({
|
||||
role: "assistant",
|
||||
streaming: true,
|
||||
id: userMessage.id! + 1,
|
||||
model: modelConfig.model,
|
||||
});
|
||||
|
||||
// get recent messages
|
||||
const recentMessages = get().getMessagesWithMemory();
|
||||
const sendMessages = recentMessages.concat(userMessage);
|
||||
const sessionIndex = get().currentSessionIndex;
|
||||
const messageIndex = get().currentSession().messages.length + 1;
|
||||
// get recent messages
|
||||
const recentMessages = get().getMessagesWithMemory();
|
||||
const sendMessages = recentMessages.concat(userMessage);
|
||||
const sessionIndex = get().currentSessionIndex;
|
||||
const messageIndex = get().currentSession().messages.length + 1;
|
||||
|
||||
// save user's and bot's message
|
||||
get().updateCurrentSession((session) => {
|
||||
const savedUserMessage = {
|
||||
...userMessage,
|
||||
content,
|
||||
};
|
||||
session.messages = session.messages.concat([
|
||||
savedUserMessage,
|
||||
botMessage,
|
||||
]);
|
||||
});
|
||||
// save user's and bot's message
|
||||
get().updateCurrentSession((session) => {
|
||||
const savedUserMessage = {
|
||||
...userMessage,
|
||||
content,
|
||||
};
|
||||
session.messages = session.messages.concat([
|
||||
savedUserMessage,
|
||||
botMessage,
|
||||
]);
|
||||
});
|
||||
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
}
|
||||
ChatControllerPool.remove(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content =
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
resolve();
|
||||
}
|
||||
ChatControllerPool.remove(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content =
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
@ -594,6 +598,62 @@ export const useChatStore = create<ChatStore>()(
|
||||
localStorage.clear();
|
||||
location.reload();
|
||||
},
|
||||
|
||||
getSuggestions() {
|
||||
return new Promise((resolve) => {
|
||||
// get last bot messages
|
||||
const messages = get().currentSession().messages;
|
||||
let lastBotMessage: ChatMessage | undefined = undefined;
|
||||
|
||||
for (let i = messages.length - 1; i >= 0; i -= 1) {
|
||||
if (messages[i].role === "assistant") {
|
||||
lastBotMessage = messages[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const botMsg = lastBotMessage?.content;
|
||||
|
||||
if (!lastBotMessage || !botMsg) return resolve([]);
|
||||
|
||||
const prompt = `
|
||||
here is bot's reponse:
|
||||
'''
|
||||
${botMsg}
|
||||
'''
|
||||
|
||||
according to bot's reponse,
|
||||
- according to the bot's message, generate three short user input suggestions
|
||||
- detect the bot's language and response in detected language
|
||||
- no other words, just response in pure json format:
|
||||
{questions: string[]}";
|
||||
`;
|
||||
|
||||
api.llm.chat({
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
config: {
|
||||
model: "gpt-3.5-turbo",
|
||||
},
|
||||
onFinish(msg) {
|
||||
try {
|
||||
const msgJson = JSON.parse(msg) as {
|
||||
questions: string[];
|
||||
};
|
||||
if (Array.isArray(msgJson.questions)) {
|
||||
resolve(msgJson.questions);
|
||||
}
|
||||
} catch {
|
||||
console.error("[Suggestions] failed to parse: ", msg);
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Chat,
|
||||
|
Loading…
Reference in New Issue
Block a user