updated PyPi version

This commit is contained in:
Abubakar Abid 2020-08-10 22:23:48 -05:00
parent 64e78d1a6f
commit 1a261f13f2
20 changed files with 30 additions and 1868 deletions

View File

@ -1,19 +0,0 @@
"""
This file is used by launch models on a hosted service, like `GradioHub`
"""
import tempfile
import traceback
import webbrowser
import gradio.inputs
import gradio.outputs
from gradio import networking, strings
from distutils.version import StrictVersion
import pkg_resources
import requests
import random
import time
def launch_from_config(path):
pass

File diff suppressed because it is too large Load Diff

View File

@ -21,7 +21,6 @@ import weakref
import analytics
import os
PKG_VERSION_URL = "https://gradio.app/api/pkg-version"
analytics.write_key = "uxIFddIEuuUcFLf9VgH2teTEtPlWdkNy"
analytics_url = 'https://api.gradio.app/'
@ -48,7 +47,7 @@ class Interface:
def __init__(self, fn, inputs, outputs, verbose=False, examples=None,
live=False, show_input=True, show_output=True,
capture_session=False, title=None, description=None,
thumbnail=None, server_port=None, server_name=networking.LOCALHOST_NAME,
thumbnail=None, server_port=None, server_name=networking.LOCALHOST_NAME,
allow_screenshot=True, allow_flagging=True,
flagging_dir="flagged"):
"""
@ -69,6 +68,7 @@ class Interface:
allow_flagging (bool): if False, users will not see a button to flag an input and output.
flagging_dir (str): what to name the dir where flagged data is stored.
"""
def get_input_instance(iface):
if isinstance(iface, str):
shortcut = InputComponent.get_all_shortcut_implementations()[iface]
@ -90,6 +90,7 @@ class Interface:
"Output interface must be of type `str` or "
"`OutputComponent`"
)
if isinstance(inputs, list):
self.input_interfaces = [get_input_instance(i) for i in inputs]
else:
@ -135,7 +136,7 @@ class Interface:
try:
import tensorflow as tf
self.session = tf.get_default_graph(), \
tf.keras.backend.get_session()
tf.keras.backend.get_session()
except (ImportError, AttributeError):
# If they are using TF >= 2.0 or don't have TF,
# just ignore this.
@ -151,7 +152,7 @@ class Interface:
"_{}".format(index)):
index += 1
self.flagging_dir = self.flagging_dir + "/" + dir_name + \
"_{}".format(index)
"_{}".format(index)
try:
requests.post(analytics_url + 'gradio-initiated-analytics/',
@ -188,8 +189,8 @@ class Interface:
iface[1]["label"] = ret_name
except ValueError:
pass
return config
return config
def process(self, raw_input):
"""
@ -208,7 +209,7 @@ class Interface:
durations = []
for predict_fn in self.predict:
start = time.time()
if self.capture_session and not(self.session is None):
if self.capture_session and not (self.session is None):
graph, sess = self.session
with graph.as_default():
with sess.as_default():
@ -238,10 +239,19 @@ class Interface:
return processed_output, durations
def close(self):
if self.simple_server and not(self.simple_server.fileno() == -1): # checks to see if server is running
if self.simple_server and not (self.simple_server.fileno() == -1): # checks to see if server is running
print("Closing Gradio server on port {}...".format(self.server_port))
networking.close_server(self.simple_server)
def run_until_interrupted(self, thread, path_to_local_server):
try:
while 1:
pass
except (KeyboardInterrupt, OSError):
print("Keyboard interruption in main thread... closing server.")
thread.keep_running = False
networking.url_ok(path_to_local_server)
def launch(self, inline=None, inbrowser=None, share=False, debug=False):
"""
Parameters
@ -258,11 +268,10 @@ class Interface:
path_to_local_server (str): Locally accessible link
share_url (str): Publicly accessible link (if share=True)
"""
output_directory = tempfile.mkdtemp()
# Set up a port to serve the directory containing the static files with interface.
server_port, httpd = networking.start_simple_server(self, output_directory, self.server_name,
server_port=self.server_port)
server_port, httpd, thread = networking.start_simple_server(
self, output_directory, self.server_name, server_port=self.server_port)
path_to_local_server = "http://{}:{}/".format(self.server_name, server_port)
networking.build_template(output_directory)
@ -277,7 +286,7 @@ class Interface:
print("IMPORTANT: You are using gradio version {}, "
"however version {} "
"is available, please upgrade.".format(
current_pkg_version, latest_pkg_version))
current_pkg_version, latest_pkg_version))
print('--------')
except: # TODO(abidlabs): don't catch all exceptions
pass
@ -370,6 +379,11 @@ class Interface:
data=data)
except requests.ConnectionError:
pass # do not push analytics if no network
is_in_interactive_mode = bool(getattr(sys, 'ps1', sys.flags.interactive))
if not is_in_interactive_mode:
self.run_until_interrupted(thread, path_to_local_server)
return httpd, path_to_local_server, share_url

View File

@ -114,7 +114,8 @@ class Label(OutputComponent):
"""
Default rebuild method for label
"""
return json.loads(data)
# return json.loads(data)
return data
class Image(OutputComponent):
'''

View File

@ -1,155 +0,0 @@
from PIL import Image, ImageOps
from io import BytesIO
import base64
import tempfile
import scipy.io.wavfile
from scipy.fftpack import dct
import numpy as np
import skimage
#########################
# IMAGE PRE-PROCESSING
#########################
def decode_base64_to_image(encoding):
content = encoding.split(';')[1]
image_encoded = content.split(',')[1]
return Image.open(BytesIO(base64.b64decode(image_encoded)))
def encode_file_to_base64(f, type="image", ext=None):
with open(f, "rb") as file:
encoded_string = base64.b64encode(file.read())
base64_str = str(encoded_string, 'utf-8')
if ext is None:
ext = f.split(".")[-1]
return "data:" + type + "/" + ext + ";base64," + base64_str
def encode_plot_to_base64(plt):
with BytesIO() as output_bytes:
plt.savefig(output_bytes, format="png")
bytes_data = output_bytes.getvalue()
plt.close()
base64_str = str(base64.b64encode(bytes_data), 'utf-8')
return "data:image/png;base64," + base64_str
def encode_array_to_base64(image_array):
with BytesIO() as output_bytes:
PIL_image = Image.fromarray(skimage.img_as_ubyte(image_array))
PIL_image.save(output_bytes, 'PNG')
bytes_data = output_bytes.getvalue()
base64_str = str(base64.b64encode(bytes_data), 'utf-8')
return "data:image/png;base64," + base64_str
def resize_and_crop(img, size, crop_type='center'):
"""
Resize and crop an image to fit the specified size.
args:
size: `(width, height)` tuple.
crop_type: can be 'top', 'middle' or 'bottom', depending on this
value, the image will cropped getting the 'top/left', 'middle' or
'bottom/right' of the image to fit the size.
raises:
ValueError: if an invalid `crop_type` is provided.
"""
if crop_type == "top":
center = (0, 0)
elif crop_type == "center":
center = (0.5, 0.5)
else:
raise ValueError
return ImageOps.fit(img, size, centering=center)
##################
# OUTPUT
##################
def decode_base64_to_binary(encoding):
inp = encoding.split(';')[1].split(',')[1]
return base64.b64decode(inp)
def decode_base64_to_file(encoding):
file_obj = tempfile.NamedTemporaryFile()
file_obj.write(decode_base64_to_binary(encoding))
return file_obj
##################
# AUDIO FILES
##################
def generate_mfcc_features_from_audio_file(wav_filename,
pre_emphasis=0.95,
frame_size= 0.025,
frame_stride=0.01,
NFFT=512,
nfilt=40,
num_ceps=12,
cep_lifter=22):
"""
Loads and preprocesses a .wav audio file into mfcc coefficients, the typical inputs to models.
Adapted from: https://haythamfayek.com/2016/04/21/speech-processing-for-machine-learning.html
:param wav_filename: string name of audio file to process.
:param pre_emphasis: a float factor, typically 0.95 or 0.97, which amplifies high frequencies.
:param frame_size: a float that is the length, in seconds, of time frame over which to take the fft.
:param frame_stride: a float that is the offset, in seconds, between consecutive time frames.
:param NFFT: The number of points in the short-time fft for each time frame.
:param nfilt: The number of filters on the Mel-scale to extract frequency bands.
:param num_ceps: the number of cepstral coefficients to retrain.
:param cep_lifter: the int factor, by which to de-emphasize higher-frequency.
:return: a numpy array of mfcc coefficients.
"""
sample_rate, signal = scipy.io.wavfile.read(wav_filename)
emphasized_signal = np.append(signal[0], signal[1:] - pre_emphasis * signal[:-1])
frame_length, frame_step = frame_size * sample_rate, frame_stride * sample_rate # Convert from seconds to samples
signal_length = len(emphasized_signal)
frame_length = int(round(frame_length))
frame_step = int(round(frame_step))
num_frames = int(np.ceil(float(np.abs(signal_length - frame_length)) / frame_step)) # Make sure that we have at least 1 frame
pad_signal_length = num_frames * frame_step + frame_length
z = np.zeros((pad_signal_length - signal_length))
pad_signal = np.append(emphasized_signal, z) # Pad Signal to make sure that all frames have equal number of samples without truncating any samples from the original signal
indices = np.tile(np.arange(0, frame_length), (num_frames, 1)) + np.tile(np.arange(0, num_frames * frame_step, frame_step), (frame_length, 1)).T
frames = pad_signal[indices.astype(np.int32, copy=False)]
frames *= np.hamming(frame_length)
mag_frames = np.absolute(np.fft.rfft(frames, NFFT)) # Magnitude of the FFT
pow_frames = ((1.0 / NFFT) * ((mag_frames) ** 2)) # Power Spectrum
low_freq_mel = 0
high_freq_mel = (2595 * np.log10(1 + (sample_rate / 2) / 700)) # Convert Hz to Mel
mel_points = np.linspace(low_freq_mel, high_freq_mel, nfilt + 2) # Equally spaced in Mel scale
hz_points = (700 * (10**(mel_points / 2595) - 1)) # Convert Mel to Hz
bin = np.floor((NFFT + 1) * hz_points / sample_rate)
fbank = np.zeros((nfilt, int(np.floor(NFFT / 2 + 1))))
for m in range(1, nfilt + 1):
f_m_minus = int(bin[m - 1]) # left
f_m = int(bin[m]) # center
f_m_plus = int(bin[m + 1]) # right
for k in range(f_m_minus, f_m):
fbank[m - 1, k] = (k - bin[m - 1]) / (bin[m] - bin[m - 1])
for k in range(f_m, f_m_plus):
fbank[m - 1, k] = (bin[m + 1] - k) / (bin[m + 1] - bin[m])
filter_banks = np.dot(pow_frames, fbank.T)
filter_banks = np.where(filter_banks == 0, np.finfo(float).eps, filter_banks) # Numerical Stability
filter_banks = 20 * np.log10(filter_banks) # dB
mfcc = dct(filter_banks, type=2, axis=1, norm='ortho')[:, 0: (num_ceps + 1)] # Keep filters 1-13 by default.
(nframes, ncoeff) = mfcc.shape
n = np.arange(ncoeff)
lift = 1 + (cep_lifter / 2) * np.sin(np.pi * n / cep_lifter)
mfcc *= lift
filter_banks -= (np.mean(filter_banks, axis=0) + 1e-8)
mfcc -= (np.mean(mfcc, axis=0) + 1e-8)
return mfcc[np.newaxis, :, :] # Create a batch dimension.

View File

@ -1,5 +0,0 @@
{
"input_interface_type": "{{input_interface_type}}",
"output_interface_type": "{{output_interface_type}}",
"live": {{live}}
}

View File

@ -1,30 +0,0 @@
body {
font-family: 'Open Sans', sans-serif;
font-size: 12px;
margin: 0;
}
nav {
text-align: center;
padding: 16px 0 4px;
}
nav img {
margin-right: auto;
height: 32px;
}
#bulk_rows td {
text-align: center;
height: 60px;
}
#bulk_rows tr:nth-child(even) {
background: #DDD
}
#bulk_rows img {
text-align: center;
height: 100%;
}
#bulk_rows {
margin: 0 auto;
width: 70%;
border-spacing: 0;
border-collapse: collapse;
}

File diff suppressed because one or more lines are too long

View File

@ -1,26 +0,0 @@
.table_holder {
max-width: 100%;
max-height: 100%;
overflow: scroll;
display: none;
}
.csv_preview {
background-color: white;
max-width: 100%;
max-height: 100%;
font-size: 12px;
font-family: monospace;
}
.csv_preview tr {
border-bottom: solid 1px black;
}
.csv_preview tr.header td {
background-color: #e67e22;
font-weight: bold;
}
.csv_preview td {
padding: 2px 4px;
}
.csv_preview td:nth-child(even) {
background-color: whitesmoke;
}

View File

@ -1,68 +0,0 @@
.hide {
display: none !important;
}
.image_display {
height: 100%;
}
.view_holders {
flex-grow: 1;
height: calc(100% - 36px);
background-color: #CCCCCC;
position: relative;
}
.image_preview_holder, .saliency_holder {
width: 100%;
height: 100%;
display: flex;
justify-content: center;
align-items: center;
}
.saliency_holder {
position: absolute;
top: 0;
}
.saliency {
display: flex;
flex-direction: column;
border: none;
opacity: 1;
}
.saliency > div {
display: flex;
flex-grow: 1;
}
.saliency > div > div {
flex-grow: 1;
background-color: #EEA45D;
}
.image_preview {
max-width: 100%;
max-height: 100%;
}
.hidden_upload {
display: none;
}
.image_editor_overlay {
display: flex;
justify-content: center;
align-items: center;
}
.image_editor_holder {
height: 85%;
width: 85%;
}
.image_editor {
background-color: black;
}
#tie-btn-reset, #tie-btn-delete, #tie-btn-delete-all {
display: none !important;
}
.tui-image-editor-icpartition {
background-color: transparent !important;
}
.tui_close {
border-radius: 0 !important;
border: none !important;
margin-left: 10px !important;
font-family: 'Open Sans', sans-serif !important;
}

View File

@ -1,73 +0,0 @@
var io_master_template = {
gather: function() {
this.clear();
for (let iface of this.input_interfaces) {
iface.submit();
}
},
clear: function() {
this.last_input = new Array(this.input_interfaces.length);
this.input_count = 0;
},
input: function(interface_id, data) {
this.last_input[interface_id] = data;
this.input_count += 1;
if (this.input_count == this.input_interfaces.length) {
this.submit();
}
},
submit: function() {
let io = this;
if (!this.config.live) {
this.target.find(".loading").removeClass("invisible");
this.target.find(".loading_in_progress").show();
this.target.find(".loading_failed").hide();
this.target.find(".output_interface").addClass("invisible");
this.target.find(".output_interfaces .panel_header").addClass("invisible");
}
this.fn(this.last_input).then((output) => {
io.output(output);
}).catch((error) => {
console.error(error);
this.target.find(".loading_in_progress").hide();
this.target.find(".loading_failed").show();
})
},
output: function(data) {
this.last_output = data["data"];
for (let i = 0; i < this.output_interfaces.length; i++) {
this.output_interfaces[i].output(data["data"][i]);
}
if (data["durations"]) {
let ratio = this.output_interfaces.length / data["durations"].length;
for (let i = 0; i < this.output_interfaces.length; i = i + ratio) {
this.output_interfaces[i].target.parent().find(`.loading_time[interface="${i + ratio - 1}"]`).text("Latency: " + ((data["durations"][i / ratio])).toFixed(2) + "s");
}
}
if (this.config.live) {
this.gather();
} else {
this.target.find(".loading").addClass("invisible");
this.target.find(".output_interface").removeClass("invisible");
this.target.find(".output_interfaces .panel_header").removeClass("invisible");
}
},
flag: function(message) {
var post_data = {
'data': {
'input_data' : toStringIfObject(this.last_input) ,
'output_data' : toStringIfObject(this.last_output),
'message' : message
}
}
$.ajax({type: "POST",
url: "/api/flag/",
data: JSON.stringify(post_data),
success: function(output){
console.log("Flagging successful")
},
});
}
};

View File

@ -1,20 +0,0 @@
history_count = 0;
entry_history = [];
function add_history(entry) {
$("#featured_table").append(`
<div entry=${history_count}>${io_master.input_interface.renderFeatured(entry)}</div>
`);
entry_history.push(entry);
history_count++;
}
function load_history(data) {
data.forEach(add_history)
}
$('body').on('click', "#featured_table div", function() {
let entry = entry_history[$(this).attr("entry")];
io_master.input_interface.loadFeatured(entry);
io_master.output_interface.clear();
})

View File

@ -1,119 +0,0 @@
input_to_object_map = {
"csv" : {},
"imagein" : image_input,
"sketchpad" : sketchpad_input,
"textbox" : textbox_input,
"webcam" : webcam,
"microphone" : microphone,
"radio" : radio,
"checkbox" : checkbox,
"checkboxgroup" : checkbox_group,
"slider" : slider,
"dropdown" : dropdown,
}
output_to_object_map = {
"csv" : {},
"image" : image_output,
"label" : label_output,
"textbox" : textbox_output
}
id_to_interface_map = {}
function set_interface_id(interface, id) {
interface.id = id;
id_to_interface_map[id] = interface;
}
var config;
$.getJSON("static/config.json", function(data) {
config = data;
_id = 0;
let input_interfaces = [];
let output_interfaces = [];
for (let i = 0; i < config["input_interfaces"].length; i++) {
input_interface_data = config["input_interfaces"][i];
input_interface = Object.create(input_to_object_map[input_interface_data[0]]);
if (input_interface_data[1]["label"]) {
$(".input_interfaces").append(`
<div class="panel_header">${input_interface_data[1]["label"]}</strong>
`);
}
$(".input_interfaces").append(`
<div class="input_interface interface" interface_id=${_id}>
${input_interface.html}
</div>
`);
input_interface.target = $(`.input_interface[interface_id=${_id}]`);
set_interface_id(input_interface, _id);
input_interface.init(input_interface_data[1]);
input_interfaces.push(input_interface);
input_interface.io_master = io_master;
_id++;
}
for (let i = 0; i < config["output_interfaces"].length; i++) {
if (i != 0 && i % (config["output_interfaces"].length / config.function_count) == 0) {
$(".output_interfaces").append("<hr>");
}
output_interface_data = config["output_interfaces"][i];
output_interface = Object.create(output_to_object_map[
output_interface_data[0]]);
if (output_interface_data[1]["label"]) {
$(".output_interfaces").append(`
<div class="panel_header">${output_interface_data[1]["label"]}</strong>
`);
}
$(".output_interfaces").append(`
<div class="output_interface interface" interface_id=${_id}>
${output_interface.html}
</div>
`);
output_interface.target = $(`.output_interface[interface_id=${_id}]`);
set_interface_id(output_interface, _id);
output_interface.init(output_interface_data[1]);
output_interfaces.push(output_interface);
output_interface.io_master = io_master;
_id++;
}
io_master.input_interfaces = input_interfaces;
io_master.output_interfaces = output_interfaces;
$(".clear").click(function() {
for (let input_interface of input_interfaces) {
input_interface.clear();
}
for (let output_interface of output_interfaces) {
output_interface.clear();
}
$(".flag").removeClass("flagged");
$(".flag_message").empty();
$("#loading").addClass("invisible");
$(".output_interface").removeClass("invisible");
io_master.last_input = null;
io_master.last_output = null;
})
if (config["share_url"] != "None") {
$("#share_row").css('display', 'flex');
}
load_history(config["sample_inputs"] || []);
if (!config["sample_inputs"]) {
$("#featured_history").hide();
}
if (config.live) {
io_master.gather();
} else {
$(".submit").show();
$(".submit").click(function() {
io_master.gather();
$(".flag").removeClass("flagged");
})
}
if (!config.show_input) {
$(".input_panel").hide();
}
});
$('body').on('click', '.flag', function(e) {
if (io_master.last_output) {
$(".flag").addClass("flagged");
io_master.flag($(".flag_message").val());
}
})

View File

@ -1,41 +0,0 @@
$("#share").click(function() {
$("#share").hide()
$("#share_form").css('display', 'flex')
})
$("#send_link").click(function(evt) {
let name = $("#share_name").val()
let email = $("#share_email").val()
if (name && email) {
$("#send_link").attr('disabled', true);
$.ajax({
"url" : "https://gradio.app/api/send-email/",
"type": "POST",
"crossDomain": true,
"data": {
"url": config["share_url"],
"name": name,
"email": email
},
"success": function() {
$("#share_message").text("Shared successfully.");
$("#share_more").text("Share more");
},
"error": function() {
$("#share_message").text("Failed to share.");
$("#share_more").text("Try again");
},
"complete": function() {
$("#share_form").hide();
$("#share_complete").show();
$("#send_link").attr('disabled', false);
}
})
}
})
$("#share_more").click(function (evt) {
$("#share_email").val("");
$("#share_form").show();
$("#share_complete").hide();
})

View File

@ -6266,9 +6266,6 @@
var source = container.bounds;
var destination = box;
source = {top: 0, left: 0, width: container.intrinsicWidth, height: container.intrinsicHeight}
console.log(image)
console.log(container)
console.log(box)
var newWidth = 30;
var newHeight = 30;
var newX = destination.left;
@ -6283,9 +6280,6 @@
newHeight = destination.height;
newX = destination.left + (destination.width - newWidth) / 2;
}
console.log(destination.left, destination.top, destination.width, destination.height);
console.log(newX, newY, newWidth, newHeight);
console.log("---");
this.ctx.drawImage(image, 0, 0, container.intrinsicWidth, container.intrinsicHeight, newX, newY, newWidth, newHeight);
this.ctx.restore();
}

View File

@ -1,238 +0,0 @@
/*!
* jQuery UI Touch Punch 1.0.7 as modified by RWAP Software
* based on original touchpunch v0.2.3 which has not been updated since 2014
*
* Updates by RWAP Software to take account of various suggested changes on the original code issues
*
* Original: https://github.com/furf/jquery-ui-touch-punch
* Copyright 20112014, Dave Furfero
* Dual licensed under the MIT or GPL Version 2 licenses.
*
* Fork: https://github.com/RWAP/jquery-ui-touch-punch
*
* Depends:
* jquery.ui.widget.js
* jquery.ui.mouse.js
*/
(function( factory ) {
if ( typeof define === "function" && define.amd ) {
// AMD. Register as an anonymous module.
define([ "jquery", "jquery.ui" ], factory );
} else {
// Browser globals
factory( jQuery );
}
}(function ($) {
// Detect touch support
$.support.touch = ( 'ontouchstart' in document
|| 'ontouchstart' in window
|| window.TouchEvent
|| (window.DocumentTouch && document instanceof DocumentTouch)
|| navigator.maxTouchPoints > 0
|| navigator.msMaxTouchPoints > 0
);
// Ignore browsers without touch or mouse support
if (!$.support.touch || !$.ui.mouse) {
return;
}
var mouseProto = $.ui.mouse.prototype,
_mouseInit = mouseProto._mouseInit,
_mouseDestroy = mouseProto._mouseDestroy,
touchHandled;
/**
* Get the x,y position of a touch event
* @param {Object} event A touch event
*/
function getTouchCoords (event) {
return {
x: event.originalEvent.changedTouches[0].pageX,
y: event.originalEvent.changedTouches[0].pageY
};
}
/**
* Simulate a mouse event based on a corresponding touch event
* @param {Object} event A touch event
* @param {String} simulatedType The corresponding mouse event
*/
function simulateMouseEvent (event, simulatedType) {
// Ignore multi-touch events
if (event.originalEvent.touches.length > 1) {
return;
}
// Prevent "Ignored attempt to cancel a touchmove event with cancelable=false" errors
if (event.cancelable) {
event.preventDefault();
}
var touch = event.originalEvent.changedTouches[0],
simulatedEvent = document.createEvent('MouseEvents');
// Initialize the simulated mouse event using the touch event's coordinates
simulatedEvent.initMouseEvent(
simulatedType, // type
true, // bubbles
true, // cancelable
window, // view
1, // detail
touch.screenX, // screenX
touch.screenY, // screenY
touch.clientX, // clientX
touch.clientY, // clientY
false, // ctrlKey
false, // altKey
false, // shiftKey
false, // metaKey
0, // button
null // relatedTarget
);
// Dispatch the simulated event to the target element
event.target.dispatchEvent(simulatedEvent);
}
/**
* Handle the jQuery UI widget's touchstart events
* @param {Object} event The widget element's touchstart event
*/
mouseProto._touchStart = function (event) {
var self = this;
// Interaction time
this._startedMove = event.timeStamp;
// Track movement to determine if interaction was a click
self._startPos = getTouchCoords(event);
// Ignore the event if another widget is already being handled
if (touchHandled || !self._mouseCapture(event.originalEvent.changedTouches[0])) {
return;
}
// Set the flag to prevent other widgets from inheriting the touch event
touchHandled = true;
// Track movement to determine if interaction was a click
self._touchMoved = false;
// Simulate the mouseover event
simulateMouseEvent(event, 'mouseover');
// Simulate the mousemove event
simulateMouseEvent(event, 'mousemove');
// Simulate the mousedown event
simulateMouseEvent(event, 'mousedown');
};
/**
* Handle the jQuery UI widget's touchmove events
* @param {Object} event The document's touchmove event
*/
mouseProto._touchMove = function (event) {
// Ignore event if not handled
if (!touchHandled) {
return;
}
// Interaction was moved
this._touchMoved = true;
// Simulate the mousemove event
simulateMouseEvent(event, 'mousemove');
};
/**
* Handle the jQuery UI widget's touchend events
* @param {Object} event The document's touchend event
*/
mouseProto._touchEnd = function (event) {
// Ignore event if not handled
if (!touchHandled) {
return;
}
// Simulate the mouseup event
simulateMouseEvent(event, 'mouseup');
// Simulate the mouseout event
simulateMouseEvent(event, 'mouseout');
// If the touch interaction did not move, it should trigger a click
// Check for this in two ways - length of time of simulation and distance moved
// Allow for Apple Stylus to be used also
var timeMoving = event.timeStamp - this._startedMove;
if (!this._touchMoved || timeMoving < 500) {
// Simulate the click event
simulateMouseEvent(event, 'click');
} else {
var endPos = getTouchCoords(event);
if ((Math.abs(endPos.x - this._startPos.x) < 10) && (Math.abs(endPos.y - this._startPos.y) < 10)) {
// If the touch interaction did not move, it should trigger a click
if (!this._touchMoved || event.originalEvent.changedTouches[0].touchType === 'stylus') {
// Simulate the click event
simulateMouseEvent(event, 'click');
}
}
}
// Unset the flag to determine the touch movement stopped
this._touchMoved = false;
// Unset the flag to allow other widgets to inherit the touch event
touchHandled = false;
};
/**
* A duck punch of the $.ui.mouse _mouseInit method to support touch events.
* This method extends the widget with bound touch event handlers that
* translate touch events to mouse events and pass them to the widget's
* original mouse event handling methods.
*/
mouseProto._mouseInit = function () {
var self = this;
// Delegate the touch handlers to the widget's element
self.element.on({
touchstart: $.proxy(self, '_touchStart'),
touchmove: $.proxy(self, '_touchMove'),
touchend: $.proxy(self, '_touchEnd')
});
// Call the original $.ui.mouse init method
_mouseInit.call(self);
};
/**
* Remove the touch event handlers
*/
mouseProto._mouseDestroy = function () {
var self = this;
// Delegate the touch handlers to the widget's element
self.element.off({
touchstart: $.proxy(self, '_touchStart'),
touchmove: $.proxy(self, '_touchMove'),
touchend: $.proxy(self, '_touchEnd')
});
// Call the original $.ui.mouse destroy method
_mouseDestroy.call(self);
};
}));

View File

@ -1,33 +0,0 @@
<html lang="en">
<head>
<title>Gradio</title>
<link rel="stylesheet" href="../static/css/bulk_style.css">
</head>
<body>
<nav>
<a href="https://gradio.app"><img src="../static/img/logo_inline.png" /></a>
</nav>
<table id="bulk_rows">
<thead>
<th>Image</th>
<th>Label</th>
</thead>
</table>
<script src="../static/js/vendor/jquery.min.js"></script>
<script>
$.get("/static/flagged/data.txt", function(data) {
let lines = data.split("\n");
lines.forEach((line) => {
let row_data = JSON.parse(line);
let output = row_data["output"];
$("#bulk_rows").append(`
<tr class="bulk_row">
<td><img src="/static/flagged/${row_data["input"]}" /></td>
<td class="label">${output["label"] + (output["confidences"] ? ": " + Math.round(100 * output["confidences"][0]["confidence"]) + "%" : "")}</td>
</tr>
`)
})
});
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: gradio
Version: 1.1.3
Version: 1.1.4
Summary: Python library for easily interacting with trained machine learning models
Home-page: https://github.com/gradio-app/gradio-UI
Author: Abubakar Abid

View File

@ -5,7 +5,7 @@ except ImportError:
setup(
name='gradio',
version='1.1.3',
version='1.1.4',
include_package_data=True,
description='Python library for easily interacting with trained machine learning models',
author='Abubakar Abid',