mirror of
https://github.com/gradio-app/gradio.git
synced 2025-03-31 12:20:26 +08:00
Add show_recording_waveform
to Audio (#6551)
* add show_recording_waveform * add changeset * add animation * Refactor audio component and waveform options * formatting * add margin before audio controls * amend default values * expose gr.WaveformOptions * Tweak waveform options types and handle none * add waveform_options to reverse_audio * tweak bool typing * notebook --------- Co-authored-by: gradio-pr-bot <gradio-pr-bot@users.noreply.github.com> Co-authored-by: Abubakar Abid <abubakar@huggingface.co>
This commit is contained in:
parent
d548202d2b
commit
8fc562a8ab
6
.changeset/spotty-jokes-show.md
Normal file
6
.changeset/spotty-jokes-show.md
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
"@gradio/audio": patch
|
||||
"gradio": patch
|
||||
---
|
||||
|
||||
fix:Add `show_recording_waveform` to Audio
|
@ -1 +1 @@
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: reverse_audio"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "os.mkdir('audio')\n", "!wget -q -O audio/cantina.wav https://github.com/gradio-app/gradio/raw/main/demo/reverse_audio/audio/cantina.wav\n", "!wget -q -O audio/recording1.wav https://github.com/gradio-app/gradio/raw/main/demo/reverse_audio/audio/recording1.wav"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import os\n", "\n", "import numpy as np\n", "\n", "import gradio as gr\n", "\n", "\n", "def reverse_audio(audio):\n", " sr, data = audio\n", " return (sr, np.flipud(data))\n", "\n", "\n", "demo = gr.Interface(fn=reverse_audio, \n", " inputs=\"microphone\", \n", " outputs=\"audio\", \n", " examples=[\n", " \"https://samplelib.com/lib/preview/mp3/sample-3s.mp3\",\n", " os.path.join(os.path.abspath(''), \"audio/recording1.wav\")\n", " ], cache_examples=True)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
||||
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: reverse_audio"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "os.mkdir('audio')\n", "!wget -q -O audio/cantina.wav https://github.com/gradio-app/gradio/raw/main/demo/reverse_audio/audio/cantina.wav\n", "!wget -q -O audio/recording1.wav https://github.com/gradio-app/gradio/raw/main/demo/reverse_audio/audio/recording1.wav"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import os\n", "\n", "import numpy as np\n", "\n", "import gradio as gr\n", "\n", "\n", "def reverse_audio(audio):\n", " sr, data = audio\n", " return (sr, np.flipud(data))\n", "\n", "\n", "input_audio = gr.Audio(\n", " sources=[\"microphone\"],\n", " waveform_options=gr.WaveformOptions(\n", " waveform_color=\"#01C6FF\",\n", " waveform_progress_color=\"#0066B4\",\n", " skip_length=2,\n", " show_controls=False,\n", " ),\n", ")\n", "demo = gr.Interface(\n", " fn=reverse_audio,\n", " inputs=input_audio,\n", " outputs=\"audio\",\n", " examples=[\n", " \"https://samplelib.com/lib/preview/mp3/sample-3s.mp3\",\n", " os.path.join(os.path.abspath(''), \"audio/recording1.wav\"),\n", " ],\n", " cache_examples=True,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
@ -10,13 +10,25 @@ def reverse_audio(audio):
|
||||
return (sr, np.flipud(data))
|
||||
|
||||
|
||||
demo = gr.Interface(fn=reverse_audio,
|
||||
inputs="microphone",
|
||||
outputs="audio",
|
||||
examples=[
|
||||
"https://samplelib.com/lib/preview/mp3/sample-3s.mp3",
|
||||
os.path.join(os.path.dirname(__file__), "audio/recording1.wav")
|
||||
], cache_examples=True)
|
||||
input_audio = gr.Audio(
|
||||
sources=["microphone"],
|
||||
waveform_options=gr.WaveformOptions(
|
||||
waveform_color="#01C6FF",
|
||||
waveform_progress_color="#0066B4",
|
||||
skip_length=2,
|
||||
show_controls=False,
|
||||
),
|
||||
)
|
||||
demo = gr.Interface(
|
||||
fn=reverse_audio,
|
||||
inputs=input_audio,
|
||||
outputs="audio",
|
||||
examples=[
|
||||
"https://samplelib.com/lib/preview/mp3/sample-3s.mp3",
|
||||
os.path.join(os.path.dirname(__file__), "audio/recording1.wav"),
|
||||
],
|
||||
cache_examples=True,
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
demo.launch()
|
||||
|
@ -57,6 +57,7 @@ from gradio.components import (
|
||||
Video,
|
||||
component,
|
||||
)
|
||||
from gradio.components.audio import WaveformOptions
|
||||
from gradio.data_classes import FileData
|
||||
from gradio.events import EventData, LikeData, SelectData, on
|
||||
from gradio.exceptions import Error
|
||||
|
@ -2,8 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Literal, TypedDict
|
||||
from typing import Any, Callable, Literal
|
||||
|
||||
import numpy as np
|
||||
import requests
|
||||
@ -18,11 +19,13 @@ from gradio.events import Events
|
||||
set_documentation_group("component")
|
||||
|
||||
|
||||
class WaveformOptions(TypedDict, total=False):
|
||||
waveform_color: str
|
||||
waveform_progress_color: str
|
||||
show_controls: bool
|
||||
skip_length: int
|
||||
@dataclasses.dataclass
|
||||
class WaveformOptions:
|
||||
waveform_color: str | None = None
|
||||
waveform_progress_color: str | None = None
|
||||
show_controls: bool = False
|
||||
skip_length: str | None = None
|
||||
show_recording_waveform: bool = True
|
||||
|
||||
|
||||
@document()
|
||||
@ -80,7 +83,7 @@ class Audio(
|
||||
show_share_button: bool | None = None,
|
||||
min_length: int | None = None,
|
||||
max_length: int | None = None,
|
||||
waveform_options: WaveformOptions | None = None,
|
||||
waveform_options: WaveformOptions | dict | None = None,
|
||||
):
|
||||
"""
|
||||
Parameters:
|
||||
@ -142,7 +145,13 @@ class Audio(
|
||||
if show_share_button is None
|
||||
else show_share_button
|
||||
)
|
||||
self.waveform_options = waveform_options
|
||||
if waveform_options is None:
|
||||
self.waveform_options = WaveformOptions()
|
||||
self.waveform_options = (
|
||||
WaveformOptions(**waveform_options)
|
||||
if isinstance(waveform_options, dict)
|
||||
else waveform_options
|
||||
)
|
||||
self.min_length = min_length
|
||||
self.max_length = max_length
|
||||
super().__init__(
|
||||
|
@ -42,7 +42,7 @@
|
||||
/>
|
||||
|
||||
<Story
|
||||
name="Audio with autoplay"
|
||||
name="with autoplay"
|
||||
args={{
|
||||
value: {
|
||||
path: "https://audio-samples.github.io/samples/mp3/blizzard_unconditional/sample-0.mp3",
|
||||
@ -55,3 +55,15 @@
|
||||
autoplay: true
|
||||
}}
|
||||
/>
|
||||
|
||||
<Story
|
||||
name="with hidden recording waveform"
|
||||
args={{
|
||||
value: null,
|
||||
interactive: true,
|
||||
sources: ["microphone"],
|
||||
waveform_options: {
|
||||
show_recording_waveform: false
|
||||
}
|
||||
}}
|
||||
/>
|
||||
|
@ -210,7 +210,14 @@
|
||||
{#if active_source === "microphone"}
|
||||
<ModifyUpload {i18n} on:clear={clear} absolute={true} />
|
||||
{#if streaming}
|
||||
<StreamAudio {record} {recording} {stop} {i18n} {waveform_settings} />
|
||||
<StreamAudio
|
||||
{record}
|
||||
{recording}
|
||||
{stop}
|
||||
{i18n}
|
||||
{waveform_settings}
|
||||
{waveform_options}
|
||||
/>
|
||||
{:else}
|
||||
<AudioRecorder
|
||||
bind:mode
|
||||
|
@ -194,6 +194,10 @@
|
||||
padding: var(--size-3);
|
||||
}
|
||||
|
||||
:global(::part(wrapper)) {
|
||||
margin-bottom: var(--size-2);
|
||||
}
|
||||
|
||||
.timestamps {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
|
@ -17,7 +17,9 @@
|
||||
event: "stream" | "change" | "stop_recording"
|
||||
) => Promise<void> | undefined;
|
||||
export let waveform_settings: Record<string, any>;
|
||||
export let waveform_options: WaveformOptions;
|
||||
export let waveform_options: WaveformOptions = {
|
||||
show_recording_waveform: true
|
||||
};
|
||||
export let handle_reset_value: () => void;
|
||||
|
||||
let micWaveform: WaveSurfer;
|
||||
@ -69,8 +71,10 @@
|
||||
start_interval();
|
||||
timing = true;
|
||||
dispatch("start_recording");
|
||||
let waveformCanvas = microphoneContainer;
|
||||
if (waveformCanvas) waveformCanvas.style.display = "block";
|
||||
if (waveform_options.show_recording_waveform) {
|
||||
let waveformCanvas = microphoneContainer;
|
||||
if (waveformCanvas) waveformCanvas.style.display = "block";
|
||||
}
|
||||
});
|
||||
|
||||
$: record?.on("record-end", async (blob) => {
|
||||
@ -124,14 +128,13 @@
|
||||
});
|
||||
|
||||
const create_mic_waveform = (): void => {
|
||||
const recorder = microphoneContainer;
|
||||
if (recorder) recorder.innerHTML = "";
|
||||
if (microphoneContainer) microphoneContainer.innerHTML = "";
|
||||
if (micWaveform !== undefined) micWaveform.destroy();
|
||||
if (!recorder) return;
|
||||
if (!microphoneContainer) return;
|
||||
micWaveform = WaveSurfer.create({
|
||||
...waveform_settings,
|
||||
normalize: false,
|
||||
container: recorder
|
||||
container: microphoneContainer
|
||||
});
|
||||
|
||||
record = micWaveform.registerPlugin(RecordPlugin.create());
|
||||
@ -200,7 +203,7 @@
|
||||
/>
|
||||
<div bind:this={recordingContainer} data-testid="recording-waveform" />
|
||||
|
||||
{#if timing || recordedAudio}
|
||||
{#if (timing || recordedAudio) && waveform_options.show_recording_waveform}
|
||||
<div class="timestamps">
|
||||
<time bind:this={timeRef} class="time">0:00</time>
|
||||
<div>
|
||||
@ -216,8 +219,14 @@
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if micWaveform && !recordedAudio}
|
||||
<WaveformRecordControls bind:record {i18n} />
|
||||
{#if microphoneContainer && !recordedAudio}
|
||||
<WaveformRecordControls
|
||||
bind:record
|
||||
{i18n}
|
||||
{timing}
|
||||
show_recording_waveform={waveform_options.show_recording_waveform}
|
||||
record_time={format_time(seconds)}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if recordingWaveform && recordedAudio}
|
||||
|
@ -14,6 +14,10 @@
|
||||
let stopButton: HTMLButtonElement;
|
||||
let stopButtonPaused: HTMLButtonElement;
|
||||
|
||||
export let record_time: string;
|
||||
export let show_recording_waveform: boolean | undefined;
|
||||
export let timing = false;
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
error: string;
|
||||
}>();
|
||||
@ -120,6 +124,9 @@
|
||||
class="resume-button"
|
||||
on:click={() => record.resumeRecording()}>{i18n("audio.resume")}</button
|
||||
>
|
||||
{#if timing && !show_recording_waveform}
|
||||
<time class="duration-button duration">{record_time}</time>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<select
|
||||
@ -145,6 +152,7 @@
|
||||
border-radius: var(--radius-full);
|
||||
font-size: var(--text-md);
|
||||
border: 1px solid var(--neutral-400);
|
||||
margin: var(--size-1) var(--size-1) 0 0;
|
||||
}
|
||||
.controls {
|
||||
display: flex;
|
||||
@ -156,7 +164,7 @@
|
||||
|
||||
.controls select {
|
||||
text-overflow: ellipsis;
|
||||
margin: var(--size-2) 0;
|
||||
max-width: var(--size-40);
|
||||
}
|
||||
|
||||
@media (max-width: 375px) {
|
||||
@ -168,7 +176,7 @@
|
||||
.wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.record {
|
||||
@ -183,7 +191,7 @@
|
||||
border-radius: var(--radius-3xl);
|
||||
align-items: center;
|
||||
border: 1px solid var(--neutral-400);
|
||||
margin-right: 5px;
|
||||
margin: var(--size-1) var(--size-1) 0 0;
|
||||
}
|
||||
|
||||
.stop-button-paused::before {
|
||||
@ -212,7 +220,7 @@
|
||||
border-radius: var(--radius-3xl);
|
||||
align-items: center;
|
||||
border: 1px solid var(--primary-600);
|
||||
margin-right: 5px;
|
||||
margin: var(--size-1) var(--size-1) 0 0;
|
||||
}
|
||||
|
||||
.record-button::before {
|
||||
@ -265,6 +273,7 @@
|
||||
border: 1px solid var(--neutral-400);
|
||||
border-radius: var(--radius-3xl);
|
||||
padding: var(--spacing-md);
|
||||
margin: var(--size-1) var(--size-1) 0 0;
|
||||
}
|
||||
|
||||
.resume-button {
|
||||
@ -276,6 +285,19 @@
|
||||
padding: var(--spacing-xl);
|
||||
line-height: 1px;
|
||||
font-size: var(--text-md);
|
||||
margin: var(--size-1) var(--size-1) 0 0;
|
||||
}
|
||||
|
||||
.duration {
|
||||
display: flex;
|
||||
height: var(--size-8);
|
||||
width: var(--size-20);
|
||||
border: 1px solid var(--neutral-400);
|
||||
border-radius: var(--radius-3xl);
|
||||
padding: var(--spacing-md);
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin: var(--size-1) var(--size-1) 0 0;
|
||||
}
|
||||
|
||||
:global(::part(region)) {
|
||||
|
@ -4,4 +4,5 @@ export type WaveformOptions = {
|
||||
show_controls?: boolean;
|
||||
skip_length?: number;
|
||||
trim_region_color?: string;
|
||||
show_recording_waveform?: boolean;
|
||||
};
|
||||
|
@ -3,6 +3,7 @@
|
||||
import type { I18nFormatter } from "@gradio/utils";
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import RecordPlugin from "wavesurfer.js/dist/plugins/record.js";
|
||||
import type { WaveformOptions } from "../shared/types";
|
||||
|
||||
export let recording = false;
|
||||
export let paused_recording = false;
|
||||
@ -10,21 +11,26 @@
|
||||
export let record: () => void;
|
||||
export let i18n: I18nFormatter;
|
||||
export let waveform_settings: Record<string, any>;
|
||||
export let waveform_options: WaveformOptions = {
|
||||
show_recording_waveform: true
|
||||
};
|
||||
|
||||
let micWaveform: WaveSurfer;
|
||||
let waveformRecord: RecordPlugin;
|
||||
|
||||
let microphoneContainer: HTMLDivElement;
|
||||
|
||||
onMount(() => {
|
||||
create_mic_waveform();
|
||||
});
|
||||
|
||||
const create_mic_waveform = (): void => {
|
||||
if (micWaveform !== undefined) micWaveform.destroy();
|
||||
|
||||
if (!microphoneContainer) return;
|
||||
micWaveform = WaveSurfer.create({
|
||||
...waveform_settings,
|
||||
height: 100,
|
||||
container: "#microphone"
|
||||
container: microphoneContainer
|
||||
});
|
||||
|
||||
waveformRecord = micWaveform.registerPlugin(RecordPlugin.create());
|
||||
@ -32,12 +38,17 @@
|
||||
</script>
|
||||
|
||||
<div class="mic-wrap">
|
||||
<div id="microphone" style:display={recording ? "block" : "none"} />
|
||||
{#if waveform_options.show_recording_waveform}
|
||||
<div
|
||||
bind:this={microphoneContainer}
|
||||
style:display={recording ? "block" : "none"}
|
||||
/>
|
||||
{/if}
|
||||
{#if recording}
|
||||
<button
|
||||
class={paused_recording ? "stop-button-paused" : "stop-button"}
|
||||
on:click={() => {
|
||||
waveformRecord.stopMic();
|
||||
waveformRecord?.stopMic();
|
||||
stop();
|
||||
}}
|
||||
>
|
||||
@ -51,7 +62,7 @@
|
||||
<button
|
||||
class="record-button"
|
||||
on:click={() => {
|
||||
waveformRecord.startMic();
|
||||
waveformRecord?.startMic();
|
||||
record();
|
||||
}}
|
||||
>
|
||||
@ -129,4 +140,19 @@
|
||||
align-items: center;
|
||||
border: 1px solid var(--neutral-400);
|
||||
}
|
||||
|
||||
@keyframes scaling {
|
||||
0% {
|
||||
background-color: var(--primary-600);
|
||||
scale: 1;
|
||||
}
|
||||
50% {
|
||||
background-color: var(--primary-600);
|
||||
scale: 1.2;
|
||||
}
|
||||
100% {
|
||||
background-color: var(--primary-600);
|
||||
scale: 1;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
Loading…
x
Reference in New Issue
Block a user