remove static while pending behaviour (#7410)

* remove static while pending behaviour

* add changeset

* fix notebooks

* add changeset

* cleanup unused code + fix test

* fix notebooks

* oops

* re-add check

---------

Co-authored-by: gradio-pr-bot <gradio-pr-bot@users.noreply.github.com>
This commit is contained in:
pngwn 2024-02-14 16:15:16 +00:00 committed by GitHub
parent 32b317f24e
commit c2dfc592a4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 97 additions and 74 deletions

View File

@ -0,0 +1,6 @@
---
"@gradio/app": patch
"gradio": patch
---
fix:remove static while pending behaviour

View File

@ -1 +1 @@
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i+1]\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}

View File

@ -1,10 +1,12 @@
import time
import gradio as gr
def slow_echo(message, history):
for i in range(len(message)):
time.sleep(0.05)
yield "You typed: " + message[: i+1]
yield "You typed: " + message[: i + 1]
demo = gr.ChatInterface(slow_echo).queue()

View File

@ -0,0 +1 @@
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "runs = 0\n", "\n", "\n", "def slow_echo(message, history):\n", " global runs # i didn't want to add state or anything to this demo\n", " runs = runs + 1\n", " for i in range(len(message)):\n", " yield f\"Run {runs} - You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}

View File

@ -0,0 +1,16 @@
import gradio as gr
runs = 0
def slow_echo(message, history):
global runs # i didn't want to add state or anything to this demo
runs = runs + 1
for i in range(len(message)):
yield f"Run {runs} - You typed: " + message[: i + 1]
demo = gr.ChatInterface(slow_echo).queue()
if __name__ == "__main__":
demo.launch()

View File

@ -343,11 +343,7 @@
rootNode = rootNode;
}, 50);
async function handle_update(
data: any,
fn_index: number,
outputs_set_to_non_interactive: number[]
): Promise<void> {
async function handle_update(data: any, fn_index: number): Promise<void> {
const outputs = dependencies[fn_index].outputs;
data?.forEach((value: any, i: number) => {
@ -369,9 +365,6 @@
continue;
} else {
output.props[update_key] = update_value;
if (update_key == "interactive" && !update_value) {
outputs_set_to_non_interactive.push(outputs[i]);
}
}
}
} else {
@ -485,7 +478,7 @@
payload.data = v;
make_prediction(payload);
} else {
handle_update(v, dep_index, []);
handle_update(v, dep_index);
}
});
} else {
@ -505,8 +498,6 @@
}
function make_prediction(payload: Payload): void {
const pending_outputs: number[] = [];
let outputs_set_to_non_interactive: number[] = [];
const submission = app
.submit(
payload.fn_index,
@ -520,27 +511,10 @@
make_prediction(dep.final_event);
}
dep.pending_request = false;
handle_update(data, fn_index, outputs_set_to_non_interactive);
handle_update(data, fn_index);
})
.on("status", ({ fn_index, ...status }) => {
tick().then(() => {
const outputs = dependencies[fn_index].outputs;
outputs.forEach((id) => {
if (
instance_map[id].props.interactive &&
status.stage === "pending" &&
!["focus", "key_up"].includes(dep.targets[0][1])
) {
pending_outputs.push(id);
instance_map[id].props.interactive = false;
} else if (
["complete", "error"].includes(status.stage) &&
pending_outputs.includes(id) &&
!outputs_set_to_non_interactive.includes(id)
) {
instance_map[id].props.interactive = true;
}
});
//@ts-ignore
loading_status.update({
...status,

View File

@ -1,43 +0,0 @@
import { test, expect } from "@gradio/tootils";
test("chatinterface works with streaming functions and all buttons behave as expected", async ({
page
}) => {
const submit_button = await page.getByRole("button", { name: "Submit" });
const retry_button = await page.getByRole("button", { name: "🔄 Retry" });
const undo_button = await page.getByRole("button", { name: "↩️ Undo" });
const clear_button = await page.getByRole("button", { name: "🗑️ Clear" });
const textbox = await page.getByPlaceholder("Type a message...");
await textbox.fill("hello");
await submit_button.click();
await expect(textbox).toHaveValue("");
const bot_message_0 = await page.locator(".bot.message").nth(0);
await expect(bot_message_0).toContainText("You typed: hello");
await textbox.fill("hi");
await submit_button.click();
await expect(textbox).toHaveValue("");
const bot_message_1 = await page.locator(".bot").nth(1);
await expect(bot_message_1).toContainText("You typed: hi");
await retry_button.click();
await expect(textbox).toHaveValue("");
await expect(page.locator(".bot").nth(1)).toContainText("You typed: hi");
await undo_button.click();
await expect
.poll(async () => page.locator(".message.bot").count(), { timeout: 5000 })
.toBe(1);
await expect(textbox).toHaveValue("hi");
await textbox.fill("salaam");
await submit_button.click();
await expect(textbox).toHaveValue("");
await expect(page.locator(".bot").nth(1)).toContainText("You typed: salaam");
await clear_button.click();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 5000 })
.toBe(0);
});

View File

@ -0,0 +1,67 @@
import { test, expect } from "@gradio/tootils";
test("chatinterface works with streaming functions and all buttons behave as expected", async ({
page
}) => {
const submit_button = page.getByRole("button", { name: "Submit" });
const retry_button = page.getByRole("button", { name: "🔄 Retry" });
const undo_button = page.getByRole("button", { name: "↩️ Undo" });
const clear_button = page.getByRole("button", { name: "🗑️ Clear" });
const textbox = page.getByPlaceholder("Type a message...");
await textbox.fill("hello");
await submit_button.click();
await expect(textbox).toHaveValue("");
const expected_text_el_0 = page.locator(".bot p", {
hasText: "Run 1 - You typed: hello"
});
await expect(expected_text_el_0).toBeVisible();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 2000 })
.toBe(1);
await textbox.fill("hi");
await submit_button.click();
await expect(textbox).toHaveValue("");
const expected_text_el_1 = page.locator(".bot p", {
hasText: "Run 2 - You typed: hi"
});
await expect(expected_text_el_1).toBeVisible();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 2000 })
.toBe(2);
await undo_button.click();
await expect
.poll(async () => page.locator(".message.bot").count(), { timeout: 5000 })
.toBe(1);
await expect(textbox).toHaveValue("hi");
await retry_button.click();
const expected_text_el_2 = page.locator(".bot p", {
hasText: "Run 3 - You typed: hello"
});
expect(textbox).toHaveValue("");
await expect(expected_text_el_2).toBeVisible();
await expect
.poll(async () => page.locator(".message.bot").count(), { timeout: 5000 })
.toBe(1);
await textbox.fill("hi");
await submit_button.click();
await expect(textbox).toHaveValue("");
const expected_text_el_3 = page.locator(".bot p", {
hasText: "Run 4 - You typed: hi"
});
await expect(expected_text_el_3).toBeVisible();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 2000 })
.toBe(2);
await clear_button.click();
await expect
.poll(async () => page.locator(".bot.message").count(), { timeout: 5000 })
.toBe(0);
});