mirror of
https://github.com/gradio-app/gradio.git
synced 2025-01-24 10:54:04 +08:00
Fixes streaming event race condition (#7926)
* changes * add changeset --------- Co-authored-by: Ali Abid <aliabid94@gmail.com> Co-authored-by: gradio-pr-bot <gradio-pr-bot@users.noreply.github.com>
This commit is contained in:
parent
b43055b297
commit
9666854790
6
.changeset/spotty-impalas-stick.md
Normal file
6
.changeset/spotty-impalas-stick.md
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
"@gradio/client": patch
|
||||
"gradio": patch
|
||||
---
|
||||
|
||||
fix:Fixes streaming event race condition
|
@ -1049,6 +1049,10 @@ export function api_factory(
|
||||
event_stream = EventSource_factory(url);
|
||||
event_stream.onmessage = async function (event) {
|
||||
let _data = JSON.parse(event.data);
|
||||
if (_data.msg === "close_stream") {
|
||||
close_stream();
|
||||
return;
|
||||
}
|
||||
const event_id = _data.event_id;
|
||||
if (!event_id) {
|
||||
await Promise.all(
|
||||
@ -1074,9 +1078,6 @@ export function api_factory(
|
||||
}
|
||||
pending_stream_messages[event_id].push(_data);
|
||||
}
|
||||
if (_data.msg === "close_stream") {
|
||||
close_stream();
|
||||
}
|
||||
};
|
||||
event_stream.onerror = async function (event) {
|
||||
await Promise.all(
|
||||
|
1
demo/rapid_generation/run.ipynb
Normal file
1
demo/rapid_generation/run.ipynb
Normal file
@ -0,0 +1 @@
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: rapid_generation"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr \n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot(elem_id=\"chatbot\")\n", "\n", " with gr.Row():\n", " num1 = gr.Number(label=\"a\")\n", " num2 = gr.Number(label=\"b\")\n", " with gr.Row():\n", " num3 = gr.Number(label=\"c\")\n", " num4 = gr.Number(label=\"d\")\n", "\n", " btn = gr.Button(\"Start\")\n", "\n", " def add_user(history):\n", " new_response = [\"\", None]\n", " history.append(new_response)\n", " for i in range(100):\n", " new_response[0] += f\"{len(history)} \"\n", " yield history\n", "\n", " def add_bot(history):\n", " last_response = history[-1]\n", " last_response[1] = \"\"\n", " for i in range(100):\n", " last_response[1] += f\"{len(history)} \"\n", " yield history\n", "\n", " chat_evt = btn.click(add_user, chatbot, chatbot).then(add_bot, chatbot, chatbot)\n", " for i in range(25):\n", " chat_evt = chat_evt.then(add_user, chatbot, chatbot).then(add_bot, chatbot, chatbot)\n", "\n", " increase = lambda x: x + 1\n", "\n", " btn_evt = btn.click(increase, num1, num2).then(increase, num2, num1)\n", " btn_evt2 = btn.click(increase, num3, num4).then(increase, num4, num3)\n", " for i in range(25):\n", " btn_evt = btn_evt.then(increase, num1, num2).then(increase, num2, num1)\n", " btn_evt2 = btn_evt2.then(increase, num3, num4).then(increase, num4, num3)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
42
demo/rapid_generation/run.py
Normal file
42
demo/rapid_generation/run.py
Normal file
@ -0,0 +1,42 @@
|
||||
import gradio as gr
|
||||
|
||||
with gr.Blocks() as demo:
|
||||
chatbot = gr.Chatbot(elem_id="chatbot")
|
||||
|
||||
with gr.Row():
|
||||
num1 = gr.Number(label="a")
|
||||
num2 = gr.Number(label="b")
|
||||
with gr.Row():
|
||||
num3 = gr.Number(label="c")
|
||||
num4 = gr.Number(label="d")
|
||||
|
||||
btn = gr.Button("Start")
|
||||
|
||||
def add_user(history):
|
||||
new_response = ["", None]
|
||||
history.append(new_response)
|
||||
for i in range(100):
|
||||
new_response[0] += f"{len(history)} "
|
||||
yield history
|
||||
|
||||
def add_bot(history):
|
||||
last_response = history[-1]
|
||||
last_response[1] = ""
|
||||
for i in range(100):
|
||||
last_response[1] += f"{len(history)} "
|
||||
yield history
|
||||
|
||||
chat_evt = btn.click(add_user, chatbot, chatbot).then(add_bot, chatbot, chatbot)
|
||||
for i in range(25):
|
||||
chat_evt = chat_evt.then(add_user, chatbot, chatbot).then(add_bot, chatbot, chatbot)
|
||||
|
||||
increase = lambda x: x + 1
|
||||
|
||||
btn_evt = btn.click(increase, num1, num2).then(increase, num2, num1)
|
||||
btn_evt2 = btn.click(increase, num3, num4).then(increase, num4, num3)
|
||||
for i in range(25):
|
||||
btn_evt = btn_evt.then(increase, num1, num2).then(increase, num2, num1)
|
||||
btn_evt2 = btn_evt2.then(increase, num3, num4).then(increase, num4, num3)
|
||||
|
||||
if __name__ == "__main__":
|
||||
demo.launch()
|
16
js/app/test/rapid_generation.spec.ts
Normal file
16
js/app/test/rapid_generation.spec.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { test, expect } from "@gradio/tootils";
|
||||
|
||||
test("No errors on generation", async ({ page }) => {
|
||||
await page.getByRole("button", { name: "Start" }).click();
|
||||
const conversation = page.getByLabel("chatbot conversation");
|
||||
const num_a = page.getByLabel("a", { exact: true });
|
||||
const num_b = page.getByLabel("b", { exact: true });
|
||||
const num_c = page.getByLabel("c", { exact: true });
|
||||
const num_d = page.getByLabel("d", { exact: true });
|
||||
|
||||
await expect(conversation).toContainText("26 26 26 26 26 26 26 26");
|
||||
await expect(num_a).toHaveValue("52");
|
||||
await expect(num_b).toHaveValue("51");
|
||||
await expect(num_c).toHaveValue("52");
|
||||
await expect(num_d).toHaveValue("51");
|
||||
});
|
Loading…
Reference in New Issue
Block a user