Fix chatinterface e2e test (#9104)

* Refactor test

* comment

* Fix image

* add changeset

* add changeset

---------

Co-authored-by: gradio-pr-bot <gradio-pr-bot@users.noreply.github.com>
This commit is contained in:
Freddy Boulton 2024-08-14 10:42:34 -04:00 committed by GitHub
parent efdc3231a7
commit cf02f7d785
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 20 additions and 3 deletions

View File

@ -0,0 +1,5 @@
---
"website": patch
---
feat:Fix chatinterface e2e test

View File

@ -1 +1 @@
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_streaming_echo/messages_testcase.py"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "runs = 0\n", "\n", "def slow_echo(message, history):\n", " global runs # i didn't want to add state or anything to this demo\n", " runs = runs + 1\n", " for i in range(len(message)):\n", " yield f\"Run {runs} - You typed: \" + message[: i + 1]\n", "\n", "demo = gr.ChatInterface(slow_echo)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_streaming_echo/messages_testcase.py"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "runs = 0\n", "\n", "def reset_runs():\n", " global runs\n", " runs = 0\n", "\n", "def slow_echo(message, history):\n", " global runs # i didn't want to add state or anything to this demo\n", " runs = runs + 1\n", " for i in range(len(message)):\n", " yield f\"Run {runs} - You typed: \" + message[: i + 1]\n", "\n", "chat = gr.ChatInterface(slow_echo, fill_height=True)\n", "\n", "with gr.Blocks() as demo:\n", " chat.render()\n", " # We reset the global variable to minimize flakes\n", " # this works because CI runs only one test at at time\n", " # need to use gr.State if we want to parallelize this test\n", " # currently chatinterface does not support that\n", " demo.unload(reset_runs)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}

View File

@ -2,13 +2,25 @@ import gradio as gr
runs = 0
def reset_runs():
global runs
runs = 0
def slow_echo(message, history):
global runs # i didn't want to add state or anything to this demo
runs = runs + 1
for i in range(len(message)):
yield f"Run {runs} - You typed: " + message[: i + 1]
demo = gr.ChatInterface(slow_echo)
chat = gr.ChatInterface(slow_echo, fill_height=True)
with gr.Blocks() as demo:
chat.render()
# We reset the global variable to minimize flakes
# this works because CI runs only one test at at time
# need to use gr.State if we want to parallelize this test
# currently chatinterface does not support that
demo.unload(reset_runs)
if __name__ == "__main__":
demo.launch()

View File

@ -82,7 +82,7 @@ def generate_response(history):
Would be displayed as following:
<img src="https://github.com/freddyaboulton/freddyboulton/assets/41651716/a4bb2b0a-5f8a-4287-814b-4eab278e021e" alt="Gradio chatbot tool display">
<img src="https://github.com/user-attachments/assets/c1514bc9-bc29-4af1-8c3f-cd4a7c2b217f" alt="Gradio chatbot tool display">
All of the types expected by the messages format are documented below: