Merget pu branch 'master' of github.com:gradio-app/gradio-UI

This commit is contained in:
aliabd 2020-06-30 17:25:08 -07:00
commit f691e0fe29
41 changed files with 305 additions and 201 deletions

View File

@ -12,6 +12,7 @@ import time
import warnings
import json
import datetime
import os
# Where to find the static resources associated with each template.
# BASE_INPUT_INTERFACE_TEMPLATE_PATH = 'static/js/interfaces/input/{}.js'
@ -51,6 +52,12 @@ class AbstractInput(ABC):
"""
return inp
def process_example(self, example):
"""
Proprocess example for UI
"""
return example
@classmethod
def get_shortcut_implementations(cls):
"""
@ -58,13 +65,6 @@ class AbstractInput(ABC):
"""
return {}
@classmethod
def process_example(self, example):
"""
Proprocess example for UI
"""
return example
class Sketchpad(AbstractInput):
def __init__(self, cast_to="numpy", shape=(28, 28), invert_colors=True,
@ -80,6 +80,12 @@ class Sketchpad(AbstractInput):
self.sample_inputs = sample_inputs
super().__init__(label)
@classmethod
def get_shortcut_implementations(cls):
return {
"sketchpad": {},
}
def preprocess(self, inp):
"""
Default preprocessing method for the SketchPad is to convert the sketch to black and white and resize 28x28
@ -99,6 +105,9 @@ class Sketchpad(AbstractInput):
array = array.astype(self.dtype)
return array
def process_example(self, example):
return preprocessing_utils.convert_file_to_base64(example)
class Webcam(AbstractInput):
def __init__(self, image_width=224, image_height=224, num_channels=3, label=None):
@ -296,6 +305,8 @@ class Image(AbstractInput):
self.num_channels)
return array
def process_example(self, example):
return preprocessing_utils.convert_file_to_base64(example)
class Microphone(AbstractInput):

View File

@ -15,6 +15,7 @@ import pkg_resources
import requests
import random
import time
import inspect
from IPython import get_ipython
LOCALHOST_IP = "0.0.0.0"
@ -84,7 +85,7 @@ class Interface:
self.examples = examples
def get_config_file(self):
return {
config = {
"input_interfaces": [
(iface.__class__.__name__.lower(), iface.get_template_context())
for iface in self.input_interfaces],
@ -99,6 +100,15 @@ class Interface:
"description": self.description,
"thumbnail": self.thumbnail
}
try:
param_names = inspect.getfullargspec(self.predict[0])[0]
for iface, param in zip(config["input_interfaces"], param_names):
if not iface[1]["label"]:
iface[1]["label"] = param.replace("_", " ")
except ValueError:
pass
return config
def process(self, raw_input):
processed_input = [input_interface.preprocess(
@ -295,7 +305,15 @@ class Interface:
config = self.get_config_file()
config["share_url"] = share_url
config["examples"] = self.examples
processed_examples = []
for example_set in self.examples:
processed_set = []
for iface, example in zip(self.input_interfaces, example_set):
processed_set.append(iface.process_example(example))
processed_examples.append(processed_set)
config["examples"] = processed_examples
networking.set_config(config, output_directory)
return httpd, path_to_local_server, share_url

View File

@ -89,7 +89,7 @@ class KeyValues(AbstractOutput):
class Textbox(AbstractOutput):
def __init__(self, lines=None, placeholder=None, label=None):
def __init__(self, lines=1, placeholder=None, label=None):
self.lines = lines
self.placeholder = placeholder
super().__init__(label)

View File

@ -17,6 +17,13 @@ def decode_base64_to_image(encoding):
return Image.open(BytesIO(base64.b64decode(image_encoded)))
def convert_file_to_base64(img):
with open(img, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read())
base64_str = str(encoded_string, 'utf-8')
type = img.split(".")[-1]
return "data:image/" + type + ";base64," + base64_str
def encode_plot_to_base64(plt):
with BytesIO() as output_bytes:
plt.savefig(output_bytes, format="png")

View File

@ -80,3 +80,27 @@ button.secondary {
#featured_history img {
height: 60px;
}
#examples table {
border-collapse: collapse;
font-family: monospace;
padding: 8px;
background-color: whitesmoke;
border-right: solid 4px whitesmoke;
border-left: solid 4px whitesmoke;
}
#examples th {
padding: 8px;
text-align: left;
font-size: 18px;
}
#examples td {
padding: 8px;
cursor: pointer;
}
#examples tbody tr:nth-child(odd) {
background-color: white;
}
#examples tbody tr:hover {
background-color: lightgray;
}

View File

@ -10,5 +10,12 @@ const checkbox = {
},
clear: function() {
this.target.find("input").prop("checked", false);
},
load_example: function(data) {
if (data) {
this.target.find("input").prop("checked", true);
} else {
this.target.find("input").prop("checked", false);
}
}
}

View File

@ -6,8 +6,9 @@ const checkbox_group = {
html = "<div class='checkbox_group'>"
for ([index, choice] of opts.choices.entries()) {
html += `
<input id="${this.id}_${index}" type="checkbox" name="${this.id}" value="${index}">
<label for="${this.id}_${index}">${choice}</label>`;
<label for="${this.id}_${index}">${choice}
<input id="${this.id}_${index}" type="checkbox" name="${this.id}" value="${index}">
</label>`;
}
html += "</div>"
this.target.html(html);
@ -22,5 +23,17 @@ const checkbox_group = {
},
clear: function() {
this.target.find("input").prop("checked", false);
},
load_example: function(data) {
for (let [i, choice] of this.choices.entries()) {
let child = i + 1;
let checkbox = this.target.find("label:nth-child("+child+") input");
console.log(data, choice, child)
if (data.includes(choice)) {
checkbox.prop("checked", true);
} else {
checkbox.prop("checked", false);
}
}
}
}

View File

@ -18,5 +18,9 @@ const dropdown = {
},
clear: function() {
this.target.find("option").prop("selected", false);
},
load_example: function(data) {
let child = this.choices.indexOf(data) + 1;
this.target.find("option:nth-child(" + child + ")").prop("selected", true);
}
}

View File

@ -76,17 +76,6 @@ const image_input = {
io.set_image_data(io.tui_editor.toDataURL(), /*update_editor=*/false);
}
});
$(".tests").html(this.test_html);
$(".rotate_test").click(function () {
if (io.image_data) {
io.io_master.test("rotation", io.image_data);
}
})
$(".light_test").click(function () {
if (io.image_data) {
io.io_master.test("lighting", io.image_data);
}
})
},
submit: function() {
var io = this;
@ -147,5 +136,15 @@ const image_input = {
io.set_image_data(this.result, /*update_editor=*/true);
io.state = "IMAGE_LOADED"
}
},
load_example_preview: function(data) {
return "<img src="+data+" height=100>"
},
load_example: function(data) {
let io = this;
io.target.find(".upload_zone").hide();
io.target.find(".image_display").removeClass("hide");
io.set_image_data(data, /*update_editor=*/true);
io.state = "IMAGE_LOADED"
}
}

View File

@ -20,5 +20,9 @@ const radio = {
},
clear: function() {
this.target.find("input").prop("checked", false);
},
load_example: function(data) {
let child = this.choices.indexOf(data) + 1;
this.target.find("input:nth-child("+child+")").prop("checked", true);
}
}

View File

@ -37,10 +37,11 @@ const sketchpad_input = {
this.context.clearRect(0, 0, this.context.canvas.width, this.context.
canvas.height);
},
renderFeatured: function(data) {
return `<img src=${data}>`;
load_example_preview: function(data) {
return "<img src="+data+" height=100>"
},
loadFeatured: function(data) {
load_example: function(data) {
this.clear();
let ctx = this.context;
var img = new Image;
let dimension = this.target.find(".canvas_holder canvas").width();
@ -49,6 +50,5 @@ const sketchpad_input = {
ctx.drawImage(img,0,0,dimension,dimension);
};
img.src = data;
this.target.find(".saliency_holder").addClass("hide");
}
}

View File

@ -6,8 +6,9 @@ const slider = {
<span class="max"></span>:
<div class="value"></div>
</div>`,
init: function(opts) {this
init: function(opts) {
let io = this;
this.minimum = opts.minimum;
this.target.css("height", "auto");
this.target.find(".min").text(opts.minimum);
this.target.find(".max").text(opts.maximum);
@ -32,6 +33,10 @@ const slider = {
this.io_master.input(this.id, parseFloat(value));
},
clear: function() {
this.target.find("input").prop("checked", false);
this.target.find("input").val(this.minimum);
},
load_example: function(data) {
this.target.find("input").val(data);
this.target.find(".value").text(data);
}
}

View File

@ -33,11 +33,7 @@ const textbox_input = {
this.target.find(".input_text_saliency").hide();
this.target.find(".input_text").show();
},
renderFeatured: function(data) {
return data;
},
loadFeatured: function(data) {
this.clear();
this.target.find(".input_text").val(data);
load_example: function(data) {
this.target.find(".input_text").val(data);
}
}

View File

@ -81,7 +81,7 @@
<script src="../static/js/gradio.js"></script>
<script>
$.getJSON("static/config.json", function(config) {
gradio(config, function(data) {
io = gradio(config, function(data) {
return new Promise((resolve, reject) => {
$.ajax({type: "POST",
url: "/api/predict/",
@ -115,15 +115,24 @@
}
html += "</thead>";
html += "<tbody>";
for (let example of config["examples"]) {
html += "<tr>";
for (let col of example) {
for (let [i, example] of config["examples"].entries()) {
html += "<tr row="+i+">";
for (let [j, col] of example.entries()) {
if (io.input_interfaces[j].load_example_preview) {
col = io.input_interfaces[j].load_example_preview(col);
}
html += "<td>" + col + "</td>";
}
html += "</tr>";
}
html += "</tbody>";
$("#examples table").html(html);
$("#examples tr").click(function() {
let example_id = parseInt($(this).attr("row"));
for (let [i, value] of config["examples"][example_id].entries()) {
io.input_interfaces[i].load_example(value);
}
})
};
});
const copyToClipboard = str => {

View File

@ -1,24 +1,23 @@
import gradio as gr
from time import sleep
def answer_question(text1, text2):
sleep(2)
return text1[::-1], [
("Value 1", 12.3),
("Section", "DF3"),
("Confidence", 100),
]
def answer_question(quantity, animal, place, activity_list, morning):
return f"""The {quantity} {animal}s went to the {place} where they {" and ".join(activity_list)} until the {"morning" if morning else "night"}"""
gr.Interface(answer_question,
[
gr.inputs.Textbox(label="text 1", lines=4),
gr.inputs.Textbox(label="text 2", lines=4),
], [
gr.outputs.Textbox(label="out", lines=8),
"key_values"
], examples=[
["things1", "things2"],
["things10", "things20"],
gr.inputs.Slider(2, 20),
gr.inputs.Dropdown(["cat", "dog", "bird"]),
gr.inputs.Radio(["park", "zoo", "road"]),
gr.inputs.CheckboxGroup(["ran", "swam", "ate", "slept"]),
gr.inputs.Checkbox(label="Is it the morning?"),
],
gr.outputs.Textbox(label="out", lines=8),
examples=[
[2, "cat", "park", ["ran", "swam"], True],
[4, "dog", "zoo", ["ate", "swam"], False],
[10, "bird", "road", ["ran"], False],
[8, "cat", "zoo", ["ate"], True],
]
).launch()

View File

@ -14,7 +14,12 @@ def flip2(image):
return np.fliplr(image), time() - start
gr.Interface(flip,
gr.Interface(flip2,
"image",
["image", "label"
]).launch()
["image", "text"],
examples=[
["images/cheetah1.jpg"],
["images/cheetah2.jpg"],
["images/lion.jpg"],
]
).launch()

BIN
demo/images/cheetah1.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

BIN
demo/images/cheetah2.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 71 KiB

BIN
demo/images/lion.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

BIN
dist/gradio-0.9.3-py3.7.egg vendored Normal file

Binary file not shown.

BIN
dist/gradio-0.9.4-py3.7.egg vendored Normal file

Binary file not shown.

View File

@ -54,7 +54,6 @@ gradio/static/js/gradio.js
gradio/static/js/utils.js
gradio/static/js/interfaces/input/checkbox.js
gradio/static/js/interfaces/input/checkbox_group.js
gradio/static/js/interfaces/input/csv.js
gradio/static/js/interfaces/input/dropdown.js
gradio/static/js/interfaces/input/image.js
gradio/static/js/interfaces/input/microphone.js
@ -82,7 +81,6 @@ gradio/static/js/vendor/tui-image-editor.js
gradio/static/js/vendor/wavesurfer.min.js
gradio/static/js/vendor/webcam.min.js
gradio/static/js/vendor/white-theme.js
gradio/templates/bulk_data.html
gradio/templates/index.html
test/test_inputs.py
test/test_interface.py

View File

@ -12,6 +12,7 @@ import time
import warnings
import json
import datetime
import os
# Where to find the static resources associated with each template.
# BASE_INPUT_INTERFACE_TEMPLATE_PATH = 'static/js/interfaces/input/{}.js'
@ -51,6 +52,12 @@ class AbstractInput(ABC):
"""
return inp
def process_example(self, example):
"""
Proprocess example for UI
"""
return example
@classmethod
def get_shortcut_implementations(cls):
"""
@ -58,13 +65,6 @@ class AbstractInput(ABC):
"""
return {}
@classmethod
def process_example(self, example):
"""
Proprocess example for UI
"""
return example
class Sketchpad(AbstractInput):
def __init__(self, cast_to="numpy", shape=(28, 28), invert_colors=True,
@ -80,6 +80,12 @@ class Sketchpad(AbstractInput):
self.sample_inputs = sample_inputs
super().__init__(label)
@classmethod
def get_shortcut_implementations(cls):
return {
"sketchpad": {},
}
def preprocess(self, inp):
"""
Default preprocessing method for the SketchPad is to convert the sketch to black and white and resize 28x28
@ -99,6 +105,9 @@ class Sketchpad(AbstractInput):
array = array.astype(self.dtype)
return array
def process_example(self, example):
return preprocessing_utils.convert_file_to_base64(example)
class Webcam(AbstractInput):
def __init__(self, image_width=224, image_height=224, num_channels=3, label=None):
@ -296,6 +305,8 @@ class Image(AbstractInput):
self.num_channels)
return array
def process_example(self, example):
return preprocessing_utils.convert_file_to_base64(example)
class Microphone(AbstractInput):

View File

@ -15,6 +15,7 @@ import pkg_resources
import requests
import random
import time
import inspect
from IPython import get_ipython
LOCALHOST_IP = "0.0.0.0"
@ -84,7 +85,7 @@ class Interface:
self.examples = examples
def get_config_file(self):
return {
config = {
"input_interfaces": [
(iface.__class__.__name__.lower(), iface.get_template_context())
for iface in self.input_interfaces],
@ -99,6 +100,15 @@ class Interface:
"description": self.description,
"thumbnail": self.thumbnail
}
try:
param_names = inspect.getfullargspec(self.predict[0])[0]
for iface, param in zip(config["input_interfaces"], param_names):
if not iface[1]["label"]:
iface[1]["label"] = param.replace("_", " ")
except ValueError:
pass
return config
def process(self, raw_input):
processed_input = [input_interface.preprocess(
@ -299,7 +309,15 @@ class Interface:
config = self.get_config_file()
config["share_url"] = share_url
config["examples"] = self.examples
processed_examples = []
for example_set in self.examples:
processed_set = []
for iface, example in zip(self.input_interfaces, example_set):
processed_set.append(iface.process_example(example))
processed_examples.append(processed_set)
config["examples"] = processed_examples
networking.set_config(config, output_directory)
return httpd, path_to_local_server, share_url

View File

@ -89,7 +89,7 @@ class KeyValues(AbstractOutput):
class Textbox(AbstractOutput):
def __init__(self, lines=None, placeholder=None, label=None):
def __init__(self, lines=1, placeholder=None, label=None):
self.lines = lines
self.placeholder = placeholder
super().__init__(label)

View File

@ -17,6 +17,13 @@ def decode_base64_to_image(encoding):
return Image.open(BytesIO(base64.b64decode(image_encoded)))
def convert_file_to_base64(img):
with open(img, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read())
base64_str = str(encoded_string, 'utf-8')
type = img.split(".")[-1]
return "data:image/" + type + ";base64," + base64_str
def encode_plot_to_base64(plt):
with BytesIO() as output_bytes:
plt.savefig(output_bytes, format="png")

View File

@ -88,3 +88,27 @@ button.secondary {
#featured_history img {
height: 60px;
}
#examples table {
border-collapse: collapse;
font-family: monospace;
padding: 8px;
background-color: whitesmoke;
border-right: solid 4px whitesmoke;
border-left: solid 4px whitesmoke;
}
#examples th {
padding: 8px;
text-align: left;
font-size: 18px;
}
#examples td {
padding: 8px;
cursor: pointer;
}
#examples tbody tr:nth-child(odd) {
background-color: white;
}
#examples tbody tr:hover {
background-color: lightgray;
}

View File

@ -10,5 +10,12 @@ const checkbox = {
},
clear: function() {
this.target.find("input").prop("checked", false);
},
load_example: function(data) {
if (data) {
this.target.find("input").prop("checked", true);
} else {
this.target.find("input").prop("checked", false);
}
}
}

View File

@ -6,8 +6,9 @@ const checkbox_group = {
html = "<div class='checkbox_group'>"
for ([index, choice] of opts.choices.entries()) {
html += `
<input id="${this.id}_${index}" type="checkbox" name="${this.id}" value="${index}">
<label for="${this.id}_${index}">${choice}</label>`;
<label for="${this.id}_${index}">${choice}
<input id="${this.id}_${index}" type="checkbox" name="${this.id}" value="${index}">
</label>`;
}
html += "</div>"
this.target.html(html);
@ -22,5 +23,17 @@ const checkbox_group = {
},
clear: function() {
this.target.find("input").prop("checked", false);
},
load_example: function(data) {
for (let [i, choice] of this.choices.entries()) {
let child = i + 1;
let checkbox = this.target.find("label:nth-child("+child+") input");
console.log(data, choice, child)
if (data.includes(choice)) {
checkbox.prop("checked", true);
} else {
checkbox.prop("checked", false);
}
}
}
}

View File

@ -1,59 +0,0 @@
// var MAX_PREVIEW_ROWS = 100
//
// $('body').on('click', ".input_csv.drop_mode", function (e) {
// $(this).parent().find(".hidden_upload").click();
// })
//
// $('body').on('drag dragstart dragend dragover dragenter dragleave drop', ".input_csv.drop_mode", function(e) {
// e.preventDefault();
// e.stopPropagation();
// })
//
// function loadTableFromFiles(files) {
// Papa.parse(files[0], {
// complete: function(results) {
// $(".input_csv").hide()
// $(".input_csv").removeClass("drop_mode")
// var data_array = results.data
// var table_html = ""
// for (var i = 0; i < data_array.length && i <= MAX_PREVIEW_ROWS; i++) {
// row = data_array[i]
// if (i == 0) {
// table_html += "<tr class='header'>"
// } else {
// table_html += "<tr>"
// }
// for (var c = 0; c < row.length; c++) {
// table_html += "<td>" + row[c] + "</td>"
// }
// table_html += "</tr>"
// }
// table_html += ""
// $(".csv_preview").html(table_html)
// $(".table_holder").show()
// }
// })
// }
//
// $(".input_csv").on('drop', function(e) {
// files = e.originalEvent.dataTransfer.files;
// loadTableFromFiles(files)
// });
//
// $(".hidden_upload").on("change", function() {
// var files = !!this.files ? this.files : []
// if (!files.length || !window.FileReader) {
// return
// }
// loadTableFromFiles(files)
// })
//
// $('body').on('click', '.clear', function(e) {
// $(".hidden_upload").prop("value", "")
// $(".input_csv").show()
// $(".input_csv").addClass("drop_mode")
// $(".table_holder").hide()
// })
// $('body').on('click', '.submit', function(e) {
// loadStart();
// })

View File

@ -18,5 +18,9 @@ const dropdown = {
},
clear: function() {
this.target.find("option").prop("selected", false);
},
load_example: function(data) {
let child = this.choices.indexOf(data) + 1;
this.target.find("option:nth-child(" + child + ")").prop("selected", true);
}
}

View File

@ -76,17 +76,6 @@ const image_input = {
io.set_image_data(io.tui_editor.toDataURL(), /*update_editor=*/false);
}
});
$(".tests").html(this.test_html);
$(".rotate_test").click(function () {
if (io.image_data) {
io.io_master.test("rotation", io.image_data);
}
})
$(".light_test").click(function () {
if (io.image_data) {
io.io_master.test("lighting", io.image_data);
}
})
},
submit: function() {
var io = this;
@ -147,5 +136,15 @@ const image_input = {
io.set_image_data(this.result, /*update_editor=*/true);
io.state = "IMAGE_LOADED"
}
},
load_example_preview: function(data) {
return "<img src="+data+" height=100>"
},
load_example: function(data) {
let io = this;
io.target.find(".upload_zone").hide();
io.target.find(".image_display").removeClass("hide");
io.set_image_data(data, /*update_editor=*/true);
io.state = "IMAGE_LOADED"
}
}

View File

@ -20,5 +20,9 @@ const radio = {
},
clear: function() {
this.target.find("input").prop("checked", false);
},
load_example: function(data) {
let child = this.choices.indexOf(data) + 1;
this.target.find("input:nth-child("+child+")").prop("checked", true);
}
}

View File

@ -37,10 +37,11 @@ const sketchpad_input = {
this.context.clearRect(0, 0, this.context.canvas.width, this.context.
canvas.height);
},
renderFeatured: function(data) {
return `<img src=${data}>`;
load_example_preview: function(data) {
return "<img src="+data+" height=100>"
},
loadFeatured: function(data) {
load_example: function(data) {
this.clear();
let ctx = this.context;
var img = new Image;
let dimension = this.target.find(".canvas_holder canvas").width();
@ -49,6 +50,5 @@ const sketchpad_input = {
ctx.drawImage(img,0,0,dimension,dimension);
};
img.src = data;
this.target.find(".saliency_holder").addClass("hide");
}
}

View File

@ -6,8 +6,9 @@ const slider = {
<span class="max"></span>:
<div class="value"></div>
</div>`,
init: function(opts) {this
init: function(opts) {
let io = this;
this.minimum = opts.minimum;
this.target.css("height", "auto");
this.target.find(".min").text(opts.minimum);
this.target.find(".max").text(opts.maximum);
@ -32,6 +33,10 @@ const slider = {
this.io_master.input(this.id, parseFloat(value));
},
clear: function() {
this.target.find("input").prop("checked", false);
this.target.find("input").val(this.minimum);
},
load_example: function(data) {
this.target.find("input").val(data);
this.target.find(".value").text(data);
}
}

View File

@ -33,11 +33,7 @@ const textbox_input = {
this.target.find(".input_text_saliency").hide();
this.target.find(".input_text").show();
},
renderFeatured: function(data) {
return data;
},
loadFeatured: function(data) {
this.clear();
this.target.find(".input_text").val(data);
load_example: function(data) {
this.target.find(".input_text").val(data);
}
}

View File

@ -1,33 +0,0 @@
<html lang="en">
<head>
<title>Gradio</title>
<link rel="stylesheet" href="../static/css/bulk_style.css">
</head>
<body>
<nav>
<a href="https://gradio.app"><img src="../static/img/logo_inline.png" /></a>
</nav>
<table id="bulk_rows">
<thead>
<th>Image</th>
<th>Label</th>
</thead>
</table>
<script src="../static/js/vendor/jquery.min.js"></script>
<script>
$.get("/static/flagged/data.txt", function(data) {
let lines = data.split("\n");
lines.forEach((line) => {
let row_data = JSON.parse(line);
let output = row_data["output"];
$("#bulk_rows").append(`
<tr class="bulk_row">
<td><img src="/static/flagged/${row_data["input"]}" /></td>
<td class="label">${output["label"] + (output["confidences"] ? ": " + Math.round(100 * output["confidences"][0]["confidence"]) + "%" : "")}</td>
</tr>
`)
})
});
</script>
</body>
</html>

View File

@ -81,7 +81,7 @@
<script src="../static/js/gradio.js"></script>
<script>
$.getJSON("static/config.json", function(config) {
gradio(config, function(data) {
io = gradio(config, function(data) {
return new Promise((resolve, reject) => {
$.ajax({type: "POST",
url: "/api/predict/",
@ -115,15 +115,24 @@
}
html += "</thead>";
html += "<tbody>";
for (let example of config["examples"]) {
html += "<tr>";
for (let col of example) {
for (let [i, example] of config["examples"].entries()) {
html += "<tr row="+i+">";
for (let [j, col] of example.entries()) {
if (io.input_interfaces[j].load_example_preview) {
col = io.input_interfaces[j].load_example_preview(col);
}
html += "<td>" + col + "</td>";
}
html += "</tr>";
}
html += "</tbody>";
$("#examples table").html(html);
$("#examples tr").click(function() {
let example_id = parseInt($(this).attr("row"));
for (let [i, value] of config["examples"][example_id].entries()) {
io.input_interfaces[i].load_example(value);
}
})
};
});
const copyToClipboard = str => {

View File

@ -11,7 +11,7 @@ PACKAGE_NAME = 'gradio'
class TestSketchpad(unittest.TestCase):
def test_path_exists(self):
inp = inputs.Sketchpad()
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.get_name())
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.__class__.__name__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
def test_preprocessing(self):
@ -23,7 +23,7 @@ class TestSketchpad(unittest.TestCase):
class TestWebcam(unittest.TestCase):
def test_path_exists(self):
inp = inputs.Webcam()
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.get_name())
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.__class__.__name__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
def test_preprocessing(self):
@ -35,7 +35,7 @@ class TestWebcam(unittest.TestCase):
class TestTextbox(unittest.TestCase):
def test_path_exists(self):
inp = inputs.Textbox()
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.get_name())
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.__class__.__name__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
def test_preprocessing(self):
@ -46,17 +46,17 @@ class TestTextbox(unittest.TestCase):
class TestImageUpload(unittest.TestCase):
def test_path_exists(self):
inp = inputs.ImageIn()
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.get_name())
inp = inputs.Image()
path = inputs.BASE_INPUT_INTERFACE_JS_PATH.format(inp.__class__.__name__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
def test_preprocessing(self):
inp = inputs.ImageIn()
inp = inputs.Image()
array = inp.preprocess(BASE64_IMG)
self.assertEqual(array.shape, (224, 224, 3))
def test_preprocessing(self):
inp = inputs.ImageIn()
inp = inputs.Image()
inp.image_height = 48
inp.image_width = 48
array = inp.preprocess(BASE64_IMG)

View File

@ -13,7 +13,7 @@ class TestInterface(unittest.TestCase):
self.assertIsInstance(io.output_interfaces[0], gradio.outputs.Textbox)
def test_input_interface_is_instance(self):
inp = gradio.inputs.ImageIn()
inp = gradio.inputs.Image()
io = gr.Interface(inputs=inp, outputs='textBOX', fn=lambda x: x)
self.assertEqual(io.input_interfaces[0], inp)

View File

@ -11,7 +11,7 @@ BASE64_IMG = "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAYEBQYFBAY
class TestLabel(unittest.TestCase):
def test_path_exists(self):
out = outputs.Label()
path = outputs.BASE_OUTPUT_INTERFACE_JS_PATH.format(out.get_name())
path = outputs.BASE_OUTPUT_INTERFACE_JS_PATH.format(out.__class__.__name__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
# def test_postprocessing_string(self):
@ -50,7 +50,7 @@ class TestLabel(unittest.TestCase):
class TestTextbox(unittest.TestCase):
def test_path_exists(self):
out = outputs.Textbox()
path = outputs.BASE_OUTPUT_INTERFACE_JS_PATH.format(out.get_name())
path = outputs.BASE_OUTPUT_INTERFACE_JS_PATH.format(out.__class__.__name__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
def test_postprocessing(self):
@ -63,7 +63,7 @@ class TestTextbox(unittest.TestCase):
class TestImage(unittest.TestCase):
def test_path_exists(self):
out = outputs.Image()
path = outputs.BASE_OUTPUT_INTERFACE_JS_PATH.format(out.get_name())
path = outputs.BASE_OUTPUT_INTERFACE_JS_PATH.format(out.__class__.__qualname__)
self.assertTrue(os.path.exists(os.path.join(PACKAGE_NAME, path)))
def test_postprocessing(self):