mirror of
https://github.com/gradio-app/gradio.git
synced 2025-04-12 12:40:29 +08:00
Allow modifying the chatbot value directly in gr.ChatInterface
(#10359)
* changes * changes * add changeset * format * blank * notebook --------- Co-authored-by: gradio-pr-bot <gradio-pr-bot@users.noreply.github.com>
This commit is contained in:
parent
070cab5d2e
commit
c44da259fe
5
.changeset/chubby-regions-hug.md
Normal file
5
.changeset/chubby-regions-hug.md
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
"gradio": minor
|
||||
---
|
||||
|
||||
feat:Allow modifying the chatbot value directly in `gr.ChatInterface`
|
1
demo/chatinterface_prefill/run.ipynb
Normal file
1
demo/chatinterface_prefill/run.ipynb
Normal file
@ -0,0 +1 @@
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_prefill"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import random\n", "\n", "def prefill_chatbot(choice):\n", " if choice == \"Greeting\":\n", " return [\n", " {\"role\": \"user\", \"content\": \"Hi there!\"},\n", " {\"role\": \"assistant\", \"content\": \"Hello! How can I assist you today?\"}\n", " ]\n", " elif choice == \"Complaint\":\n", " return [\n", " {\"role\": \"user\", \"content\": \"I'm not happy with the service.\"},\n", " {\"role\": \"assistant\", \"content\": \"I'm sorry to hear that. Can you please tell me more about the issue?\"}\n", " ]\n", " else:\n", " return []\n", "\n", "def random_response(message, history):\n", " return random.choice([\"Yes\", \"No\"])\n", "\n", "with gr.Blocks() as demo:\n", " radio = gr.Radio([\"Greeting\", \"Complaint\", \"Blank\"])\n", " chat = gr.ChatInterface(random_response, type=\"messages\")\n", " radio.change(prefill_chatbot, radio, chat.chatbot_value)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
27
demo/chatinterface_prefill/run.py
Normal file
27
demo/chatinterface_prefill/run.py
Normal file
@ -0,0 +1,27 @@
|
||||
import gradio as gr
|
||||
import random
|
||||
|
||||
def prefill_chatbot(choice):
|
||||
if choice == "Greeting":
|
||||
return [
|
||||
{"role": "user", "content": "Hi there!"},
|
||||
{"role": "assistant", "content": "Hello! How can I assist you today?"}
|
||||
]
|
||||
elif choice == "Complaint":
|
||||
return [
|
||||
{"role": "user", "content": "I'm not happy with the service."},
|
||||
{"role": "assistant", "content": "I'm sorry to hear that. Can you please tell me more about the issue?"}
|
||||
]
|
||||
else:
|
||||
return []
|
||||
|
||||
def random_response(message, history):
|
||||
return random.choice(["Yes", "No"])
|
||||
|
||||
with gr.Blocks() as demo:
|
||||
radio = gr.Radio(["Greeting", "Complaint", "Blank"])
|
||||
chat = gr.ChatInterface(random_response, type="messages")
|
||||
radio.change(prefill_chatbot, radio, chat.chatbot_value)
|
||||
|
||||
if __name__ == "__main__":
|
||||
demo.launch()
|
@ -367,13 +367,19 @@ class ChatInterface(Blocks):
|
||||
# Hide the stop button at the beginning, and show it with the given value during the generator execution.
|
||||
self.original_stop_btn = self.textbox.stop_btn
|
||||
self.textbox.stop_btn = False
|
||||
|
||||
self.chatbot_state = State(self.chatbot.value if self.chatbot.value else [])
|
||||
self.fake_api_btn = Button("Fake API", visible=False)
|
||||
self.api_response = JSON(
|
||||
label="Response", visible=False
|
||||
) # Used to store the response from the API call
|
||||
|
||||
# Used internally to store the chatbot value when it differs from the value displayed in the chatbot UI.
|
||||
# For example, when a user submits a message, the chatbot UI is immediately updated with the user message,
|
||||
# but the chatbot_state value is not updated until the submit_fn is called.
|
||||
self.chatbot_state = State(self.chatbot.value if self.chatbot.value else [])
|
||||
|
||||
# Provided so that developers can update the chatbot value from other events outside of `gr.ChatInterface`.
|
||||
self.chatbot_value = State(self.chatbot.value if self.chatbot.value else [])
|
||||
|
||||
def _render_footer(self):
|
||||
if self.examples:
|
||||
self.examples_handler = Examples(
|
||||
@ -502,9 +508,9 @@ class ChatInterface(Blocks):
|
||||
submit_fn = self._stream_fn if self.is_generator else self._submit_fn
|
||||
|
||||
synchronize_chat_state_kwargs = {
|
||||
"fn": lambda x: x,
|
||||
"fn": lambda x: (x, x),
|
||||
"inputs": [self.chatbot],
|
||||
"outputs": [self.chatbot_state],
|
||||
"outputs": [self.chatbot_state, self.chatbot_value],
|
||||
"show_api": False,
|
||||
"queue": False,
|
||||
}
|
||||
@ -702,6 +708,13 @@ class ChatInterface(Blocks):
|
||||
self.chatbot.feedback_options = self.flagging_options
|
||||
self.chatbot.like(flagging_callback.flag, self.chatbot)
|
||||
|
||||
self.chatbot_value.change(
|
||||
lambda x: x,
|
||||
[self.chatbot_value],
|
||||
[self.chatbot],
|
||||
show_api=False,
|
||||
).then(**synchronize_chat_state_kwargs)
|
||||
|
||||
def _setup_stop_events(
|
||||
self, event_triggers: list[Callable], events_to_cancel: list[Dependency]
|
||||
) -> None:
|
||||
|
@ -14,7 +14,7 @@ This tutorial uses `gr.ChatInterface()`, which is a high-level abstraction that
|
||||
$ pip install --upgrade gradio
|
||||
```
|
||||
|
||||
## OpenAI-API compatible endpoints
|
||||
## Note for OpenAI-API compatible endpoints
|
||||
|
||||
If you have a chat server serving an OpenAI-API compatible endpoint (e.g. Ollama), you can spin up a ChatInterface in a single line of Python. First, also run `pip install openai`. Then, with your own URL, model, and optional token:
|
||||
|
||||
@ -376,6 +376,12 @@ This example illustrates how to use preset responses:
|
||||
|
||||
$code_chatinterface_options
|
||||
|
||||
## Modifying the Chatbot Value Directly
|
||||
|
||||
You may wish to modify the value of the chatbot with your own events, other than those prebuilt in the `gr.ChatInterface`. For example, you could create a dropdown that prefills the chat history with certain conversations or add a separate button to clear the conversation history. The `gr.ChatInterface` supports these events, but you need to use the `gr.ChatInterface.chatbot_value` as the input or output component in such events. In this example, we use a `gr.Radio` component to prefill the the chatbot with certain conversations:
|
||||
|
||||
$code_chatinterface_prefill
|
||||
|
||||
## Using Your Chatbot via API
|
||||
|
||||
Once you've built your Gradio chat interface and are hosting it on [Hugging Face Spaces](https://hf.space) or somewhere else, then you can query it with a simple API at the `/chat` endpoint. The endpoint just expects the user's message and will return the response, internally keeping track of the message history.
|
||||
|
Loading…
x
Reference in New Issue
Block a user