mirror of
https://github.com/gradio-app/gradio.git
synced 2024-12-21 02:19:59 +08:00
check all demo test
This commit is contained in:
parent
505deb8620
commit
bef5ca265f
@ -146,7 +146,7 @@ class CheckboxGroup(InputComponent):
|
||||
Input type: Union[List[str], List[int]]
|
||||
"""
|
||||
|
||||
def __init__(self, choices, type="choices", label=None):
|
||||
def __init__(self, choices, type="value", label=None):
|
||||
'''
|
||||
Parameters:
|
||||
choices (List[str]): list of options to select from.
|
||||
@ -241,20 +241,18 @@ class Image(InputComponent):
|
||||
Input type: Union[numpy.array, PIL.Image, str]
|
||||
"""
|
||||
|
||||
def __init__(self, shape=None, image_mode='RGB', source="upload", tools=["brush", "crop", "rotate", "undo", "filter"], type="numpy", label=None):
|
||||
def __init__(self, shape=None, image_mode='RGB', source="upload", type="numpy", label=None):
|
||||
'''
|
||||
Parameters:
|
||||
shape (Tuple[int, int]): shape to crop and resize image to; if None, matches input image size.
|
||||
image_mode (str): "RGB" if color, or "L" if black and white.
|
||||
source (str): Source of image. "upload" creates a box where user can drop an image file, "webcam" allows user to take snapshot from their webcam, "canvas" defaults to a white image that can be edited and drawn upon with tools.
|
||||
tools (List[str]): Tools available to user to edit images. "brush" allows user to draw on image, "crop" allows user to select portion of image, "rotate" allows user to rotate or flip image, "undo" allows user to revert changes, "filter" allows user to apply filters on image.
|
||||
type (str): Type of value to be returned by component. "numpy" returns a numpy array with shape (width, height, 3), "pil" returns a PIL image object, "file" returns a temporary file object whose path can be retrieved by file_obj.name.
|
||||
label (str): component name in interface.
|
||||
'''
|
||||
self.shape = shape
|
||||
self.image_mode = image_mode
|
||||
self.source = source
|
||||
self.tools = tools
|
||||
self.type = type
|
||||
super().__init__(label)
|
||||
|
||||
@ -263,15 +261,13 @@ class Image(InputComponent):
|
||||
return {
|
||||
"image": {},
|
||||
"webcam": {"source": "webcam"},
|
||||
"sketchpad": {"image_mode": "L", "source": "canvas", "tools": ["brush"]},
|
||||
"paint": {"source": "canvas", "tools": ["brush", "undo"]},
|
||||
"sketchpad": {"image_mode": "L", "source": "canvas"},
|
||||
}
|
||||
|
||||
def get_template_context(self):
|
||||
return {
|
||||
"image_mode": self.image_mode,
|
||||
"source": self.source,
|
||||
"tools": self.tools,
|
||||
**super().get_template_context()
|
||||
}
|
||||
|
||||
@ -292,7 +288,6 @@ class Image(InputComponent):
|
||||
im.save(file_obj.name)
|
||||
return file_obj
|
||||
|
||||
|
||||
def process_example(self, example):
|
||||
if os.path.exists(example):
|
||||
return processing_utils.encode_file_to_base64(example)
|
||||
@ -312,7 +307,7 @@ class Image(InputComponent):
|
||||
|
||||
class Audio(InputComponent):
|
||||
"""
|
||||
Component accepts audio input files. Provides numpy array of shape `(samples, 2)` as an argument to the wrapped function.
|
||||
Component accepts audio input files.
|
||||
Input type: Union[Tuple[int, numpy.array], str, numpy.array]
|
||||
"""
|
||||
|
||||
@ -327,6 +322,12 @@ class Audio(InputComponent):
|
||||
self.type = type
|
||||
super().__init__(label)
|
||||
|
||||
def get_template_context(self):
|
||||
return {
|
||||
"source": self.source,
|
||||
**super().get_template_context()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_shortcut_implementations(cls):
|
||||
return {
|
||||
@ -434,7 +435,9 @@ class Dataframe(InputComponent):
|
||||
else:
|
||||
raise ValueError("Unknown type: " + self.type + ". Please choose from: 'pandas', 'numpy', 'array'.")
|
||||
|
||||
#######################
|
||||
# DEPRECATED COMPONENTS
|
||||
#######################
|
||||
|
||||
class Sketchpad(InputComponent):
|
||||
"""
|
||||
|
@ -14,6 +14,7 @@ from numbers import Number
|
||||
import warnings
|
||||
import tempfile
|
||||
import scipy
|
||||
import os
|
||||
|
||||
class OutputComponent(Component):
|
||||
"""
|
||||
@ -314,14 +315,13 @@ class HTML(OutputComponent):
|
||||
|
||||
class File(OutputComponent):
|
||||
'''
|
||||
Used for file output. Expects a string path to a file if `return_path` is True.
|
||||
Output type: Union[bytes, str]
|
||||
Used for file output.
|
||||
Output type: Union[file-like, str]
|
||||
'''
|
||||
|
||||
def __init__(self, type="file", label=None):
|
||||
def __init__(self, label=None):
|
||||
'''
|
||||
Parameters:
|
||||
type (str): Type of value to be passed to component. "file" expects a file path, "str" exxpects a string to be returned as a file, "binary" expects an bytes object to be returned as a file.
|
||||
label (str): component name in interface.
|
||||
'''
|
||||
super().__init__(label)
|
||||
@ -333,6 +333,13 @@ class File(OutputComponent):
|
||||
"file": {},
|
||||
}
|
||||
|
||||
def postprocess(self, y):
|
||||
return {
|
||||
"name": os.path.basename(y),
|
||||
"size": os.path.getsize(y),
|
||||
"data": processing_utils.encode_file_to_base64(y, header=False)
|
||||
}
|
||||
|
||||
|
||||
class Dataframe(OutputComponent):
|
||||
"""
|
||||
|
157
build/lib/gradio/processing_utils.py
Normal file
157
build/lib/gradio/processing_utils.py
Normal file
@ -0,0 +1,157 @@
|
||||
from PIL import Image, ImageOps
|
||||
from io import BytesIO
|
||||
import base64
|
||||
import tempfile
|
||||
import scipy.io.wavfile
|
||||
from scipy.fftpack import dct
|
||||
import numpy as np
|
||||
import skimage
|
||||
|
||||
|
||||
#########################
|
||||
# IMAGE PRE-PROCESSING
|
||||
#########################
|
||||
def decode_base64_to_image(encoding):
|
||||
content = encoding.split(';')[1]
|
||||
image_encoded = content.split(',')[1]
|
||||
return Image.open(BytesIO(base64.b64decode(image_encoded)))
|
||||
|
||||
|
||||
def encode_file_to_base64(f, type="image", ext=None, header=True):
|
||||
with open(f, "rb") as file:
|
||||
encoded_string = base64.b64encode(file.read())
|
||||
base64_str = str(encoded_string, 'utf-8')
|
||||
if not header:
|
||||
return base64_str
|
||||
if ext is None:
|
||||
ext = f.split(".")[-1]
|
||||
return "data:" + type + "/" + ext + ";base64," + base64_str
|
||||
|
||||
|
||||
def encode_plot_to_base64(plt):
|
||||
with BytesIO() as output_bytes:
|
||||
plt.savefig(output_bytes, format="png")
|
||||
bytes_data = output_bytes.getvalue()
|
||||
plt.close()
|
||||
base64_str = str(base64.b64encode(bytes_data), 'utf-8')
|
||||
return "data:image/png;base64," + base64_str
|
||||
|
||||
def encode_array_to_base64(image_array):
|
||||
with BytesIO() as output_bytes:
|
||||
PIL_image = Image.fromarray(skimage.img_as_ubyte(image_array))
|
||||
PIL_image.save(output_bytes, 'PNG')
|
||||
bytes_data = output_bytes.getvalue()
|
||||
base64_str = str(base64.b64encode(bytes_data), 'utf-8')
|
||||
return "data:image/png;base64," + base64_str
|
||||
|
||||
|
||||
def resize_and_crop(img, size, crop_type='center'):
|
||||
"""
|
||||
Resize and crop an image to fit the specified size.
|
||||
args:
|
||||
size: `(width, height)` tuple.
|
||||
crop_type: can be 'top', 'middle' or 'bottom', depending on this
|
||||
value, the image will cropped getting the 'top/left', 'middle' or
|
||||
'bottom/right' of the image to fit the size.
|
||||
raises:
|
||||
ValueError: if an invalid `crop_type` is provided.
|
||||
"""
|
||||
if crop_type == "top":
|
||||
center = (0, 0)
|
||||
elif crop_type == "center":
|
||||
center = (0.5, 0.5)
|
||||
else:
|
||||
raise ValueError
|
||||
return ImageOps.fit(img, size, centering=center)
|
||||
|
||||
##################
|
||||
# OUTPUT
|
||||
##################
|
||||
|
||||
def decode_base64_to_binary(encoding):
|
||||
inp = encoding.split(';')[1].split(',')[1]
|
||||
return base64.b64decode(inp)
|
||||
|
||||
|
||||
def decode_base64_to_file(encoding):
|
||||
file_obj = tempfile.NamedTemporaryFile()
|
||||
file_obj.write(decode_base64_to_binary(encoding))
|
||||
return file_obj
|
||||
|
||||
|
||||
##################
|
||||
# AUDIO FILES
|
||||
##################
|
||||
|
||||
def generate_mfcc_features_from_audio_file(wav_filename,
|
||||
pre_emphasis=0.95,
|
||||
frame_size= 0.025,
|
||||
frame_stride=0.01,
|
||||
NFFT=512,
|
||||
nfilt=40,
|
||||
num_ceps=12,
|
||||
cep_lifter=22):
|
||||
"""
|
||||
Loads and preprocesses a .wav audio file into mfcc coefficients, the typical inputs to models.
|
||||
Adapted from: https://haythamfayek.com/2016/04/21/speech-processing-for-machine-learning.html
|
||||
:param wav_filename: string name of audio file to process.
|
||||
:param pre_emphasis: a float factor, typically 0.95 or 0.97, which amplifies high frequencies.
|
||||
:param frame_size: a float that is the length, in seconds, of time frame over which to take the fft.
|
||||
:param frame_stride: a float that is the offset, in seconds, between consecutive time frames.
|
||||
:param NFFT: The number of points in the short-time fft for each time frame.
|
||||
:param nfilt: The number of filters on the Mel-scale to extract frequency bands.
|
||||
:param num_ceps: the number of cepstral coefficients to retrain.
|
||||
:param cep_lifter: the int factor, by which to de-emphasize higher-frequency.
|
||||
:return: a numpy array of mfcc coefficients.
|
||||
"""
|
||||
sample_rate, signal = scipy.io.wavfile.read(wav_filename)
|
||||
emphasized_signal = np.append(signal[0], signal[1:] - pre_emphasis * signal[:-1])
|
||||
|
||||
frame_length, frame_step = frame_size * sample_rate, frame_stride * sample_rate # Convert from seconds to samples
|
||||
signal_length = len(emphasized_signal)
|
||||
frame_length = int(round(frame_length))
|
||||
frame_step = int(round(frame_step))
|
||||
num_frames = int(np.ceil(float(np.abs(signal_length - frame_length)) / frame_step)) # Make sure that we have at least 1 frame
|
||||
|
||||
pad_signal_length = num_frames * frame_step + frame_length
|
||||
z = np.zeros((pad_signal_length - signal_length))
|
||||
pad_signal = np.append(emphasized_signal, z) # Pad Signal to make sure that all frames have equal number of samples without truncating any samples from the original signal
|
||||
|
||||
indices = np.tile(np.arange(0, frame_length), (num_frames, 1)) + np.tile(np.arange(0, num_frames * frame_step, frame_step), (frame_length, 1)).T
|
||||
frames = pad_signal[indices.astype(np.int32, copy=False)]
|
||||
|
||||
frames *= np.hamming(frame_length)
|
||||
mag_frames = np.absolute(np.fft.rfft(frames, NFFT)) # Magnitude of the FFT
|
||||
pow_frames = ((1.0 / NFFT) * ((mag_frames) ** 2)) # Power Spectrum
|
||||
|
||||
low_freq_mel = 0
|
||||
high_freq_mel = (2595 * np.log10(1 + (sample_rate / 2) / 700)) # Convert Hz to Mel
|
||||
mel_points = np.linspace(low_freq_mel, high_freq_mel, nfilt + 2) # Equally spaced in Mel scale
|
||||
hz_points = (700 * (10**(mel_points / 2595) - 1)) # Convert Mel to Hz
|
||||
bin = np.floor((NFFT + 1) * hz_points / sample_rate)
|
||||
|
||||
fbank = np.zeros((nfilt, int(np.floor(NFFT / 2 + 1))))
|
||||
for m in range(1, nfilt + 1):
|
||||
f_m_minus = int(bin[m - 1]) # left
|
||||
f_m = int(bin[m]) # center
|
||||
f_m_plus = int(bin[m + 1]) # right
|
||||
|
||||
for k in range(f_m_minus, f_m):
|
||||
fbank[m - 1, k] = (k - bin[m - 1]) / (bin[m] - bin[m - 1])
|
||||
for k in range(f_m, f_m_plus):
|
||||
fbank[m - 1, k] = (bin[m + 1] - k) / (bin[m + 1] - bin[m])
|
||||
filter_banks = np.dot(pow_frames, fbank.T)
|
||||
filter_banks = np.where(filter_banks == 0, np.finfo(float).eps, filter_banks) # Numerical Stability
|
||||
filter_banks = 20 * np.log10(filter_banks) # dB
|
||||
|
||||
mfcc = dct(filter_banks, type=2, axis=1, norm='ortho')[:, 0: (num_ceps + 1)] # Keep filters 1-13 by default.
|
||||
(nframes, ncoeff) = mfcc.shape
|
||||
n = np.arange(ncoeff)
|
||||
lift = 1 + (cep_lifter / 2) * np.sin(np.pi * n / cep_lifter)
|
||||
mfcc *= lift
|
||||
|
||||
filter_banks -= (np.mean(filter_banks, axis=0) + 1e-8)
|
||||
mfcc -= (np.mean(mfcc, axis=0) + 1e-8)
|
||||
return mfcc[np.newaxis, :, :] # Create a batch dimension.
|
||||
|
||||
|
@ -27,10 +27,19 @@
|
||||
flex-grow: 1;
|
||||
}
|
||||
.interface {
|
||||
height: 360px;
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
}
|
||||
.interface_box {
|
||||
height: 360px;
|
||||
}
|
||||
.interface_mini_box {
|
||||
height: 180px;
|
||||
}
|
||||
.interface_max_box {
|
||||
overflow: auto;
|
||||
max-height: 360px;
|
||||
}
|
||||
.interface:not(*:last-child) {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
@ -37,10 +37,10 @@
|
||||
.canvas_holder canvas {
|
||||
background-color: white;
|
||||
}
|
||||
.canvas_holder, .saliency_holder {
|
||||
.canvas_holder {
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
height: calc(100% - 36px);
|
||||
}
|
||||
.saliency_holder {
|
||||
position: absolute;
|
||||
|
@ -1,15 +0,0 @@
|
||||
.output_text {
|
||||
width: 100%;
|
||||
font-size: 18px;
|
||||
outline: none;
|
||||
background-color: white;
|
||||
border: solid 1px lightgray;
|
||||
border-radius: 2px;
|
||||
box-sizing: border-box;
|
||||
padding: 4px;
|
||||
min-height: 30px;
|
||||
font-family: monospace;
|
||||
white-space: pre-wrap; /* CSS3 */
|
||||
white-space: -moz-pre-wrap; /* Firefox */
|
||||
word-wrap: break-word; /* IE */
|
||||
}
|
@ -40,6 +40,7 @@
|
||||
.output_class {
|
||||
font-weight: bold;
|
||||
font-size: 36px;
|
||||
padding: 32px 16px;;
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
@ -1,27 +1,40 @@
|
||||
const audio_input = {
|
||||
html: `
|
||||
<div class="upload_zone">
|
||||
<img class="not_recording" src="/static/img/mic.png" />
|
||||
<div class="recording hidden volume_display">
|
||||
<div class="volume volume_left">
|
||||
<div class="volume_bar"></div>
|
||||
<div class="interface_box">
|
||||
<div class="file_zone hidden">
|
||||
<div class="upload_zone drop_zone">
|
||||
<div class="input_caption">Drop Audio Here<br>- or -<br>Click to Upload</div>
|
||||
</div>
|
||||
<img src="/static/img/mic_recording.png" />
|
||||
<div class="volume volume_right">
|
||||
<div class="volume_bar"></div>
|
||||
<div class="file_display hide">
|
||||
<div class="file_name"></div>
|
||||
<div class="file_size"></div>
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" accept="audio/*" />
|
||||
</div>
|
||||
<div class="upload_zone mic_zone hidden">
|
||||
<img class="not_recording" src="/static/img/mic.png" />
|
||||
<div class="recording hidden volume_display">
|
||||
<div class="volume volume_left">
|
||||
<div class="volume_bar"></div>
|
||||
</div>
|
||||
<img src="/static/img/mic_recording.png" />
|
||||
<div class="volume volume_right">
|
||||
<div class="volume_bar"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="not_recording input_caption">Click to Record from Microphone</div>
|
||||
<div class="recording hidden input_caption">Click to Stop Recording</div>
|
||||
</div>
|
||||
<div class="player hidden">
|
||||
<div class="waveform"></div>
|
||||
<button class="playpause primary">Play / Pause</button>
|
||||
</div>
|
||||
<div class="not_recording input_caption">Click to Record from Microphone</div>
|
||||
<div class="recording hidden input_caption">Click to Stop Recording</div>
|
||||
</div>
|
||||
<div class="player hidden">
|
||||
<div class="waveform"></div>
|
||||
<button class="playpause primary">Play / Pause</button>
|
||||
</div>
|
||||
`,
|
||||
state: "NO_AUDIO",
|
||||
init: function(opts) {
|
||||
var io = this;
|
||||
this.source = opts.source;
|
||||
this.wavesurfer = WaveSurfer.create({
|
||||
container: io.target.find('.waveform')[0],
|
||||
waveColor: '#888888',
|
||||
@ -29,54 +42,94 @@ const audio_input = {
|
||||
barWidth: 3,
|
||||
hideScrollbar: true
|
||||
});
|
||||
this.target.find(".upload_zone").click(function() {
|
||||
if (io.state == "NO_AUDIO") {
|
||||
if (!has_audio_loaded) {
|
||||
loadAudio();
|
||||
io.mic = new p5.AudioIn();
|
||||
}
|
||||
io.recorder = new p5.SoundRecorder();
|
||||
io.soundFile = new p5.SoundFile();
|
||||
io.recorder.setInput(io.mic);
|
||||
io.target.find(".recording").removeClass("hidden");
|
||||
io.target.find(".not_recording").hide();
|
||||
io.state = "RECORDING";
|
||||
io.mic.start();
|
||||
io.recorder.record(io.soundFile);
|
||||
|
||||
io.interval_id = window.setInterval(function () {
|
||||
var volume = Math.floor(100 * io.mic.getLevel());
|
||||
io.target.find(".volume_bar").width(`${(volume > 0 ? 10 : 0) + Math.round(2 * Math.sqrt(10 * volume))}px`)
|
||||
}, 100)
|
||||
}
|
||||
});
|
||||
this.target.find(".upload_zone").mousedown(function() {
|
||||
if (io.state == "RECORDING" || io.state == "STOP_RECORDING") {
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.recorder.stop();
|
||||
var blob = io.soundFile.getBlob();
|
||||
var reader = new window.FileReader();
|
||||
reader.readAsDataURL(blob);
|
||||
reader.onloadend = function() {
|
||||
console.log(reader.result)
|
||||
io.audio_data = reader.result;
|
||||
io.target.find(".player").removeClass("hidden");
|
||||
io.wavesurfer.load(io.audio_data);
|
||||
if (io.state == "STOP_RECORDING") {
|
||||
io.state = "RECORDED";
|
||||
io.submit();
|
||||
if (this.source == "microphone") {
|
||||
this.target.find(".mic_zone").removeClass("hidden");
|
||||
this.target.find(".mic_zone").click(function() {
|
||||
if (io.state == "NO_AUDIO") {
|
||||
if (!has_audio_loaded) {
|
||||
loadAudio();
|
||||
io.mic = new p5.AudioIn();
|
||||
}
|
||||
io.state = "RECORDED";
|
||||
io.recorder = new p5.SoundRecorder();
|
||||
io.soundFile = new p5.SoundFile();
|
||||
io.recorder.setInput(io.mic);
|
||||
io.target.find(".recording").removeClass("hidden");
|
||||
io.target.find(".not_recording").hide();
|
||||
io.state = "RECORDING";
|
||||
io.mic.start();
|
||||
io.recorder.record(io.soundFile);
|
||||
|
||||
io.interval_id = window.setInterval(function () {
|
||||
var volume = Math.floor(100 * io.mic.getLevel());
|
||||
io.target.find(".volume_bar").width(`${(volume > 0 ? 10 : 0) + Math.round(2 * Math.sqrt(10 * volume))}px`)
|
||||
}, 100)
|
||||
}
|
||||
if (io.interval_id) {
|
||||
window.clearInterval(io.interval_id);
|
||||
});
|
||||
|
||||
this.target.find(".mic_zone").mousedown(function() {
|
||||
if (io.state == "RECORDING" || io.state == "STOP_RECORDING") {
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.recorder.stop();
|
||||
var blob = io.soundFile.getBlob();
|
||||
var reader = new window.FileReader();
|
||||
reader.readAsDataURL(blob);
|
||||
reader.onloadend = function() {
|
||||
io.load_preview_from_audio(reader.result);
|
||||
}
|
||||
if (io.interval_id) {
|
||||
window.clearInterval(io.interval_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
} else if (this.source == "upload") {
|
||||
this.target.find(".file_zone").removeClass("hidden");
|
||||
this.target.find(".upload_zone").click(function (e) {
|
||||
io.target.find(".hidden_upload").click();
|
||||
});
|
||||
this.target.on('drag dragstart dragend dragover dragenter dragleave drop',
|
||||
".drop_zone", function(e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
})
|
||||
this.target.on('drop', '.drop_zone', function(e) {
|
||||
files = e.originalEvent.dataTransfer.files;
|
||||
io.load_preview_from_files(files)
|
||||
});
|
||||
this.target.find('.hidden_upload').on('change', function (e) {
|
||||
if (this.files) {
|
||||
io.load_preview_from_files(this.files);
|
||||
}
|
||||
})
|
||||
}
|
||||
this.target.find(".playpause").click(function () {
|
||||
io.wavesurfer.playPause();
|
||||
})
|
||||
},
|
||||
load_preview_from_audio: function(audio) {
|
||||
var io = this;
|
||||
io.audio_data = audio;
|
||||
io.target.find(".player").removeClass("hidden");
|
||||
io.wavesurfer.load(io.audio_data);
|
||||
if (io.state == "STOP_RECORDING") {
|
||||
io.state = "RECORDED";
|
||||
io.submit();
|
||||
}
|
||||
io.state = "RECORDED";
|
||||
},
|
||||
load_preview_from_files: function(files) {
|
||||
if (!files.length || !window.FileReader) {
|
||||
return
|
||||
}
|
||||
var ReaderObj = new FileReader()
|
||||
ReaderObj.readAsDataURL(files[0])
|
||||
ReaderObj.io = this;
|
||||
this.state = "AUDIO_LOADING"
|
||||
ReaderObj.onloadend = function() {
|
||||
let io = this.io;
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.load_preview_from_audio(this.result);
|
||||
}
|
||||
},
|
||||
submit: function() {
|
||||
if (this.state == "RECORDED") {
|
||||
this.io_master.input(this.id, this.audio_data);
|
||||
@ -92,6 +145,7 @@ const audio_input = {
|
||||
this.target.find(".recording").addClass("hidden");
|
||||
this.target.find(".player").addClass("hidden");
|
||||
this.target.find(".upload_zone").show();
|
||||
this.target.find(".hidden_upload").prop("value", "")
|
||||
if (this.wavesurfer) {
|
||||
this.wavesurfer.stop();
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ const checkbox = {
|
||||
<label><input class="checkbox" type="checkbox"> </label>
|
||||
</div>`,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
this.target.find("input").checkboxradio();
|
||||
},
|
||||
submit: function() {
|
||||
|
@ -1,7 +1,6 @@
|
||||
const checkbox_group = {
|
||||
html: ``,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
this.choices = opts.choices;
|
||||
html = "<div class='checkbox_group'>"
|
||||
for ([index, choice] of opts.choices.entries()) {
|
||||
|
@ -1,6 +1,8 @@
|
||||
const dataframe_input = {
|
||||
html: `
|
||||
<div class="dataframe">
|
||||
<div class="interface_max_box">
|
||||
<div class="dataframe">
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
init: function(opts) {
|
||||
|
@ -1,7 +1,6 @@
|
||||
const dropdown = {
|
||||
html: ``,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
this.choices = opts.choices;
|
||||
html = "<select class='dropdown'>"
|
||||
for ([index, choice] of opts.choices.entries()) {
|
||||
|
@ -1,13 +1,16 @@
|
||||
const file_input = {
|
||||
html: `
|
||||
<div class="upload_zone drop_zone">
|
||||
<div class="input_caption">Drop File Here<br>- or -<br>Click to Upload</div>
|
||||
<div class="interface_mini_box">
|
||||
<div class="upload_zone drop_zone">
|
||||
<div class="input_caption">Drop File Here<br>- or -<br>Click to Upload</div>
|
||||
</div>
|
||||
<div class="file_display hide">
|
||||
<div class="file_name"></div>
|
||||
<div class="file_size"></div>
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" />
|
||||
</div>
|
||||
<div class="file_display hide">
|
||||
<div class="file_name"></div>
|
||||
<div class="file_size"></div>
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" />`
|
||||
`
|
||||
,
|
||||
init: function(opts) {
|
||||
var io = this;
|
||||
@ -46,15 +49,7 @@ const file_input = {
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.target.find(".file_display").removeClass("hide");
|
||||
io.target.find(".file_name").text(files[0].name);
|
||||
let bytes = files[0].size;
|
||||
let units = ["B", "KB", "MB", "GB", "PB"];
|
||||
let i = 0;
|
||||
while (bytes > 1024) {
|
||||
bytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
let unit = units[i];
|
||||
io.target.find(".file_size").text(bytes.toFixed(1) + " " + unit);
|
||||
io.target.find(".file_size").text(prettyBytes(files[0].size));
|
||||
io.file_data = this.result;
|
||||
}
|
||||
},
|
||||
|
@ -1,22 +1,39 @@
|
||||
const image_input = {
|
||||
html: `
|
||||
<div class="upload_zone drop_zone">
|
||||
<div class="input_caption">Drop Image Here<br>- or -<br>Click to Upload</div>
|
||||
</div>
|
||||
<div class="image_display hide">
|
||||
<div class="edit_holder">
|
||||
<button class="edit_image interface_button primary">Edit</button>
|
||||
<div class="interface_box">
|
||||
<div class="upload_zone drop_zone hide">
|
||||
<div class="input_caption">Drop Image Here<br>- or -<br>Click to Upload</div>
|
||||
</div>
|
||||
<div class="view_holders">
|
||||
<div class="image_preview_holder">
|
||||
<img class="image_preview" />
|
||||
<div class="webcam upload_zone hide">
|
||||
<div class="webcam_box">
|
||||
</div>
|
||||
<div class="saliency_holder hide">
|
||||
<canvas class="saliency"></canvas>
|
||||
<span>Click to Snap!</span>
|
||||
</div>
|
||||
<div class="sketchpad hide">
|
||||
<div class="sketch_tools">
|
||||
<div id="brush_1" size="8" class="brush"></div>
|
||||
<div id="brush_2" size="16" class="brush selected"></div>
|
||||
<div id="brush_3" size="24" class="brush"></div>
|
||||
</div>
|
||||
<div class="view_holders">
|
||||
<div class="canvas_holder">
|
||||
<canvas class="sketch"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="image_display hide">
|
||||
<div class="edit_holder">
|
||||
<button class="edit_image interface_button primary">Edit</button>
|
||||
</div>
|
||||
<div class="view_holders">
|
||||
<div class="image_preview_holder">
|
||||
<img class="image_preview" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" accept="image/x-png,image/gif,image/jpeg" />
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" accept="image/x-png,image/gif,image/jpeg" />`
|
||||
`
|
||||
,
|
||||
overlay_html: `
|
||||
<div class="overlay interface_extension image_editor_overlay hide" interface_id="{0}">
|
||||
@ -27,25 +44,70 @@ const image_input = {
|
||||
`,
|
||||
init: function(opts) {
|
||||
var io = this;
|
||||
this.source = opts.source;
|
||||
$('body').append(this.overlay_html.format(this.id));
|
||||
this.overlay_target = $(`.overlay[interface_id=${this.id}]`);
|
||||
this.target.find(".upload_zone").click(function (e) {
|
||||
io.target.find(".hidden_upload").click();
|
||||
});
|
||||
this.target.on('drag dragstart dragend dragover dragenter dragleave drop',
|
||||
".drop_zone", function(e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
})
|
||||
this.target.on('drop', '.drop_zone', function(e) {
|
||||
files = e.originalEvent.dataTransfer.files;
|
||||
io.load_preview_from_files(files)
|
||||
});
|
||||
this.target.find('.hidden_upload').on('change', function (e) {
|
||||
if (this.files) {
|
||||
io.load_preview_from_files(this.files);
|
||||
}
|
||||
})
|
||||
if (this.source == "upload") {
|
||||
io.target.find(".drop_zone").removeClass("hide");
|
||||
this.target.find(".drop_zone").click(function (e) {
|
||||
io.target.find(".hidden_upload").click();
|
||||
});
|
||||
this.target.on('drag dragstart dragend dragover dragenter dragleave drop',
|
||||
".drop_zone", function(e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
})
|
||||
this.target.on('drop', '.drop_zone', function(e) {
|
||||
files = e.originalEvent.dataTransfer.files;
|
||||
io.load_preview_from_files(files)
|
||||
});
|
||||
this.target.find('.hidden_upload').on('change', function (e) {
|
||||
if (this.files) {
|
||||
io.load_preview_from_files(this.files);
|
||||
}
|
||||
})
|
||||
} else if (this.source == "webcam") {
|
||||
io.target.find(".webcam").removeClass("hide");
|
||||
let w = this.target.find(".webcam_box").width();
|
||||
let h = this.target.find(".webcam_box").height();
|
||||
let RATIO = 4/3;
|
||||
let dim = Math.min(h, w / RATIO);
|
||||
Webcam.set({
|
||||
image_format: 'jpeg',
|
||||
width: dim * RATIO,
|
||||
height: dim,
|
||||
dest_width: dim * RATIO,
|
||||
dest_height: dim,
|
||||
})
|
||||
Webcam.attach(this.target.find(".webcam_box")[0]);
|
||||
io.target.find(".webcam").click(function() {
|
||||
Webcam.snap(function(image_data) {
|
||||
io.target.find(".webcam").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(image_data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED";
|
||||
});
|
||||
})
|
||||
} else if (this.source == "canvas") {
|
||||
io.target.find(".sketchpad").removeClass("hide");
|
||||
var dimension = Math.min(this.target.find(".canvas_holder").width(),
|
||||
this.target.find(".canvas_holder").height()) - 2 // dimension - border
|
||||
var id = this.id;
|
||||
this.sketchpad = new Sketchpad({
|
||||
element: '.interface[interface_id=' + id + '] .sketch',
|
||||
width: dimension,
|
||||
height: dimension
|
||||
});
|
||||
this.sketchpad.penSize = this.target.find(".brush.selected").attr("size");
|
||||
this.canvas = this.target.find('.canvas_holder canvas')[0];
|
||||
this.context = this.canvas.getContext("2d");
|
||||
this.target.find(".brush").click(function (e) {
|
||||
io.target.find(".brush").removeClass("selected");
|
||||
$(this).addClass("selected");
|
||||
io.sketchpad.penSize = $(this).attr("size");
|
||||
})
|
||||
this.clear();
|
||||
}
|
||||
this.target.find('.edit_image').click(function (e) {
|
||||
io.overlay_target.removeClass("hide");
|
||||
})
|
||||
@ -69,42 +131,54 @@ const image_input = {
|
||||
this.overlay_target.find('.tui_close').click(function (e) {
|
||||
io.overlay_target.addClass("hide");
|
||||
if ($(e.target).hasClass('tui_save')) {
|
||||
// if (io.tui_editor.ui.submenu == "crop") {
|
||||
// io.tui_editor._cropAction().crop());
|
||||
// }
|
||||
io.set_image_data(io.tui_editor.toDataURL(), /*update_editor=*/false);
|
||||
}
|
||||
});
|
||||
},
|
||||
submit: function() {
|
||||
var io = this;
|
||||
if (this.state == "IMAGE_LOADED") {
|
||||
if (this.source == "canvas") {
|
||||
var dataURL = this.canvas.toDataURL("image/png");
|
||||
this.io_master.input(this.id, dataURL);
|
||||
} else if (this.state == "IMAGE_LOADED") {
|
||||
io.io_master.input(io.id, this.image_data);
|
||||
} else if (this.source == "webcam") {
|
||||
Webcam.snap(function(image_data) {
|
||||
io.target.find(".webcam").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(image_data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED";
|
||||
io.io_master.input(io.id, image_data);
|
||||
});
|
||||
}
|
||||
},
|
||||
clear: function() {
|
||||
this.target.find(".upload_zone").show();
|
||||
this.target.find(".image_preview").attr('src', '');
|
||||
this.target.find(".image_display").addClass("hide");
|
||||
this.target.find(".hidden_upload").prop("value", "")
|
||||
this.state = "NO_IMAGE";
|
||||
this.image_data = null;
|
||||
if (this.source == "canvas") {
|
||||
this.context.fillStyle = "#FFFFFF";
|
||||
this.context.fillRect(0, 0, this.context.canvas.width, this.context.
|
||||
canvas.height);
|
||||
} else {
|
||||
this.target.find(".upload_zone").show();
|
||||
this.target.find(".image_preview").attr('src', '');
|
||||
this.target.find(".image_display").addClass("hide");
|
||||
this.target.find(".hidden_upload").prop("value", "")
|
||||
this.state = "NO_IMAGE";
|
||||
this.image_data = null;
|
||||
}
|
||||
},
|
||||
state: "NO_IMAGE",
|
||||
image_data: null,
|
||||
set_image_data: function(data, update_editor) {
|
||||
set_image_data: function(image_data, update_editor) {
|
||||
let io = this;
|
||||
resizeImage.call(this, data, 600, 600, function(image_data) {
|
||||
io.image_data = image_data
|
||||
io.target.find(".image_preview").attr('src', image_data);
|
||||
if (update_editor) {
|
||||
io.tui_editor.loadImageFromURL(io.image_data, 'input').then(function (sizeValue) {
|
||||
io.tui_editor.clearUndoStack();
|
||||
io.tui_editor.ui.activeMenuEvent();
|
||||
io.tui_editor.ui.resizeEditor({ imageSize: sizeValue });
|
||||
});
|
||||
}
|
||||
})
|
||||
io.image_data = image_data
|
||||
io.target.find(".image_preview").attr('src', image_data);
|
||||
if (update_editor) {
|
||||
io.tui_editor.loadImageFromURL(io.image_data, 'input').then(function (sizeValue) {
|
||||
io.tui_editor.clearUndoStack();
|
||||
io.tui_editor.ui.activeMenuEvent();
|
||||
io.tui_editor.ui.resizeEditor({ imageSize: sizeValue });
|
||||
});
|
||||
}
|
||||
},
|
||||
load_preview_from_files: function(files) {
|
||||
if (!files.length || !window.FileReader || !/^image/.test(files[0].type)) {
|
||||
@ -127,9 +201,21 @@ const image_input = {
|
||||
},
|
||||
load_example: function(data) {
|
||||
let io = this;
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED"
|
||||
if (this.source == "canvas") {
|
||||
this.clear();
|
||||
let ctx = this.context;
|
||||
var img = new Image;
|
||||
let dimension = this.target.find(".canvas_holder canvas").width();
|
||||
img.onload = function(){
|
||||
ctx.clearRect(0,0,dimension,dimension);
|
||||
ctx.drawImage(img,0,0,dimension,dimension);
|
||||
};
|
||||
img.src = data;
|
||||
} else {
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,22 +1,24 @@
|
||||
const microphone = {
|
||||
html: `
|
||||
<div class="upload_zone">
|
||||
<img class="not_recording" src="/static/img/mic.png" />
|
||||
<div class="recording hidden volume_display">
|
||||
<div class="volume volume_left">
|
||||
<div class="volume_bar"></div>
|
||||
</div>
|
||||
<img src="/static/img/mic_recording.png" />
|
||||
<div class="volume volume_right">
|
||||
<div class="volume_bar"></div>
|
||||
<div class="interface_box">
|
||||
<div class="upload_zone">
|
||||
<img class="not_recording" src="/static/img/mic.png" />
|
||||
<div class="recording hidden volume_display">
|
||||
<div class="volume volume_left">
|
||||
<div class="volume_bar"></div>
|
||||
</div>
|
||||
<img src="/static/img/mic_recording.png" />
|
||||
<div class="volume volume_right">
|
||||
<div class="volume_bar"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="not_recording input_caption">Click to Record from Microphone</div>
|
||||
<div class="recording hidden input_caption">Click to Stop Recording</div>
|
||||
</div>
|
||||
<div class="player hidden">
|
||||
<div class="waveform"></div>
|
||||
<button class="playpause primary">Play / Pause</button>
|
||||
</div>
|
||||
<div class="not_recording input_caption">Click to Record from Microphone</div>
|
||||
<div class="recording hidden input_caption">Click to Stop Recording</div>
|
||||
</div>
|
||||
<div class="player hidden">
|
||||
<div class="waveform"></div>
|
||||
<button class="playpause primary">Play / Pause</button>
|
||||
</div>
|
||||
`,
|
||||
state: "NO_AUDIO",
|
||||
|
@ -1,7 +1,6 @@
|
||||
const radio = {
|
||||
html: ``,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
this.choices = opts.choices;
|
||||
html = "<div class='radio_group'>"
|
||||
for ([index, choice] of opts.choices.entries()) {
|
||||
|
@ -1,15 +1,18 @@
|
||||
const sketchpad_input = {
|
||||
html: `
|
||||
<div class="sketch_tools">
|
||||
<div id="brush_1" size="8" class="brush"></div>
|
||||
<div id="brush_2" size="16" class="brush selected"></div>
|
||||
<div id="brush_3" size="24" class="brush"></div>
|
||||
</div>
|
||||
<div class="view_holders">
|
||||
<div class="canvas_holder">
|
||||
<canvas class="sketch"></canvas>
|
||||
<div class="interface_box">
|
||||
<div class="sketch_tools">
|
||||
<div id="brush_1" size="8" class="brush"></div>
|
||||
<div id="brush_2" size="16" class="brush selected"></div>
|
||||
<div id="brush_3" size="24" class="brush"></div>
|
||||
</div>
|
||||
</div>`,
|
||||
<div class="view_holders">
|
||||
<div class="canvas_holder">
|
||||
<canvas class="sketch"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
init: function() {
|
||||
var io = this;
|
||||
var dimension = Math.min(this.target.find(".canvas_holder").width(),
|
||||
|
@ -8,7 +8,6 @@ const slider = {
|
||||
init: function(opts) {
|
||||
let io = this;
|
||||
this.minimum = opts.minimum;
|
||||
this.target.css("height", "auto");
|
||||
var handle = this.target.find(".ui-slider-handle");
|
||||
this.slider = this.target.find(".slider").slider({
|
||||
create: function() {
|
||||
|
@ -1,12 +1,9 @@
|
||||
const textbox_input = {
|
||||
html: `<textarea class="input_text"></textarea>
|
||||
<div class='input_text_saliency'></div>`,
|
||||
disabled_html: `<textarea class="input_text" disabled></textarea>
|
||||
<div class='input_text_saliency'></div>`,
|
||||
init: function(opts) {
|
||||
if (opts.lines) {
|
||||
this.target.find(".input_text").attr("rows", opts.lines).css("height", "auto");
|
||||
this.target.css("height", "auto");
|
||||
}
|
||||
if (opts.placeholder) {
|
||||
this.target.find(".input_text").attr("placeholder", opts.placeholder)
|
||||
|
@ -1,10 +1,11 @@
|
||||
const webcam = {
|
||||
html: `
|
||||
<div class="webcam_box"></div>
|
||||
<div class="interface_box">
|
||||
<div class="webcam_box"></div>
|
||||
</div>
|
||||
`,
|
||||
init: function(opts) {
|
||||
var io = this;
|
||||
// this.target.find(".webcam_box").width(this.target.find(".webcam_box").width);
|
||||
let w = this.target.find(".webcam_box").width();
|
||||
let h = this.target.find(".webcam_box").height();
|
||||
let RATIO = 4/3;
|
||||
|
@ -1,9 +1,11 @@
|
||||
const audio_output = {
|
||||
html: `
|
||||
<div class="player hidden">
|
||||
<div class="waveform"></div>
|
||||
<button class="playpause primary">Play / Pause</button>
|
||||
</div>
|
||||
<div class="interface_box">
|
||||
<div class="player hidden">
|
||||
<div class="waveform"></div>
|
||||
<button class="playpause primary">Play / Pause</button>
|
||||
</div>
|
||||
</div
|
||||
`,
|
||||
state: "NO_AUDIO",
|
||||
init: function(opts) {
|
||||
|
@ -1,6 +1,8 @@
|
||||
const dataframe_output = {
|
||||
html: `
|
||||
<div class="dataframe"></div>
|
||||
<div class="interface_max_box">
|
||||
<div class="dataframe"></div>
|
||||
</div>
|
||||
`,
|
||||
init: function(opts) {
|
||||
},
|
||||
|
@ -1,17 +1,28 @@
|
||||
const file_output = {
|
||||
html: `
|
||||
<div class="highlight_legend"></div>
|
||||
<div class="output_text"></div>
|
||||
<a class="interface_mini_box">
|
||||
<div class="file_display file_download">
|
||||
<div class="file_name"></div>
|
||||
<div class="file_size"></div>
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
},
|
||||
output: function(data) {
|
||||
this.target.find(".output_text").text(data);
|
||||
this.target.find(".file_name").text(data.name);
|
||||
this.target.find(".file_size").text(prettyBytes(data.size));
|
||||
this.target.find(".interface_mini_box")
|
||||
.attr("href", "data:;base64," + data.data)
|
||||
.attr("download", data.name);
|
||||
},
|
||||
submit: function() {
|
||||
},
|
||||
clear: function() {
|
||||
this.target.find(".output_text").empty();
|
||||
this.target.find(".file_name").empty();
|
||||
this.target.find(".file_size").empty();
|
||||
this.target.find(".interface_mini_box")
|
||||
.removeAttr("href")
|
||||
.removeAttr("download");
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ const highlighted_text = {
|
||||
<div class="output_text"></div>
|
||||
`,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
this.color_map = {};
|
||||
if (opts.color_map) {
|
||||
this.generate_category_legend(opts.color_map);
|
||||
@ -19,7 +18,6 @@ const highlighted_text = {
|
||||
},
|
||||
new_category_index: 0,
|
||||
generate_category_legend: function(map) {
|
||||
console.log(map)
|
||||
let default_colors = ["pink", "lightblue", "gold", "plum", "lightskyblue", "greenyellow", "khaki", "cyan", "moccasin", "lightgray"]
|
||||
for (let category in map) {
|
||||
if (category in this.color_map) {
|
||||
|
@ -1,7 +1,6 @@
|
||||
const html_output = {
|
||||
html: ``,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
},
|
||||
output: function(data) {
|
||||
this.target.html(data);
|
||||
|
@ -1,8 +1,10 @@
|
||||
const image_output = {
|
||||
html: `
|
||||
<div class="interface_box">
|
||||
<div class="output_image_holder">
|
||||
<img class="output_image" />
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
init: function(opts) {},
|
||||
output: function(data) {
|
||||
|
@ -2,7 +2,6 @@ const json_output = {
|
||||
html: `
|
||||
`,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
},
|
||||
output: function(data) {
|
||||
this.clear();
|
||||
|
@ -1,7 +1,6 @@
|
||||
const textbox_output = {
|
||||
html: `<div class="output_text"></div>`,
|
||||
init: function(opts) {
|
||||
this.target.css("height", "auto");
|
||||
},
|
||||
output: function(data) {
|
||||
this.target.find(".output_text").text(data);
|
||||
|
@ -78,3 +78,14 @@ function interpolate(val, rgb1, rgb2) {
|
||||
function colorToString(rgb) {
|
||||
return "rgb(" + rgb[0] + ", " + rgb[1] + ", " + rgb[2] + ")";
|
||||
}
|
||||
|
||||
function prettyBytes(bytes) {
|
||||
let units = ["B", "KB", "MB", "GB", "PB"];
|
||||
let i = 0;
|
||||
while (bytes > 1024) {
|
||||
bytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
let unit = units[i];
|
||||
return bytes.toFixed(1) + " " + unit;
|
||||
}
|
@ -35,6 +35,7 @@
|
||||
<link type="text/css" href="../static/css/vendor/jquery-ui.css" rel="stylesheet">
|
||||
<link type="text/css" href="../static/css/vendor/jexcel.min.css" rel="stylesheet">
|
||||
<link type="text/css" href="../static/css/vendor/jsuites.min.css" rel="stylesheet">
|
||||
<link type="text/css" href="../static/css/vendor/jsonTree.css" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/css?family=Open+Sans" rel="stylesheet">
|
||||
|
||||
<link rel="stylesheet" href="../static/css/style.css">
|
||||
|
@ -3,7 +3,6 @@ import random
|
||||
|
||||
def upload(file):
|
||||
print(file.name)
|
||||
with file:
|
||||
return file.name
|
||||
return "/mnt/c/Users/aliab/projects/gradio/gradio/static/js/interfaces/output/file.js"
|
||||
|
||||
gr.Interface(upload, "file", "text").launch()
|
||||
gr.Interface(upload, "file", "file").launch()
|
||||
|
@ -16,7 +16,7 @@ def flip10(i1, i2, i3, i4, i5):
|
||||
return i1 + i2
|
||||
|
||||
gr.Interface(flip2,
|
||||
["image"],
|
||||
["webcam"],
|
||||
["image"],
|
||||
examples=[
|
||||
["images/cheetah1.jpg"],
|
||||
|
@ -16,12 +16,12 @@ def answer_question(text, audio):
|
||||
{"name": "Aziz", "age": 18},
|
||||
{"name": "Fozan", "age": None},
|
||||
]
|
||||
}, "<div style='background-color: pink; padding: 2px;'>" + str(audio[1].shape) + "</div>", ""
|
||||
}, "<div style='background-color: pink; padding: 2px;'>" + str(audio[1].shape) + "</div>", audio
|
||||
|
||||
gr.Interface(answer_question,
|
||||
[
|
||||
gr.inputs.Dropdown(["cat", "dog", "bird"]),
|
||||
gr.inputs.Microphone(),
|
||||
"microphone",
|
||||
],
|
||||
[
|
||||
gr.outputs.HighlightedText(color_map={"good": "lightgreen", "bad": "pink"}),
|
||||
|
@ -4,4 +4,4 @@ import numpy as np
|
||||
def snap(image):
|
||||
return image
|
||||
|
||||
gr.Interface(snap, gr.inputs.Webcam(shape=(50,100)), "image").launch()
|
||||
gr.Interface(snap, gr.inputs.Image(shape=(100,100), image_mode="L", source="webcam"), "image").launch()
|
||||
|
@ -146,7 +146,7 @@ class CheckboxGroup(InputComponent):
|
||||
Input type: Union[List[str], List[int]]
|
||||
"""
|
||||
|
||||
def __init__(self, choices, type="choices", label=None):
|
||||
def __init__(self, choices, type="value", label=None):
|
||||
'''
|
||||
Parameters:
|
||||
choices (List[str]): list of options to select from.
|
||||
@ -268,7 +268,6 @@ class Image(InputComponent):
|
||||
return {
|
||||
"image_mode": self.image_mode,
|
||||
"source": self.source,
|
||||
"tools": self.tools,
|
||||
**super().get_template_context()
|
||||
}
|
||||
|
||||
@ -323,6 +322,12 @@ class Audio(InputComponent):
|
||||
self.type = type
|
||||
super().__init__(label)
|
||||
|
||||
def get_template_context(self):
|
||||
return {
|
||||
"source": self.source,
|
||||
**super().get_template_context()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_shortcut_implementations(cls):
|
||||
return {
|
||||
|
@ -14,6 +14,7 @@ from numbers import Number
|
||||
import warnings
|
||||
import tempfile
|
||||
import scipy
|
||||
import os
|
||||
|
||||
class OutputComponent(Component):
|
||||
"""
|
||||
@ -318,10 +319,9 @@ class File(OutputComponent):
|
||||
Output type: Union[file-like, str]
|
||||
'''
|
||||
|
||||
def __init__(self, type="file", label=None):
|
||||
def __init__(self, label=None):
|
||||
'''
|
||||
Parameters:
|
||||
type (str): Type of value to be passed to component. "file" expects a file-like object, "filepath" expects a string to an output file.
|
||||
label (str): component name in interface.
|
||||
'''
|
||||
super().__init__(label)
|
||||
@ -331,15 +331,14 @@ class File(OutputComponent):
|
||||
def get_shortcut_implementations(cls):
|
||||
return {
|
||||
"file": {},
|
||||
"filepath": {type: "filepath"},
|
||||
}
|
||||
|
||||
def postprocess(self, y):
|
||||
if self.type == "file":
|
||||
|
||||
elif self.type == "filepath":
|
||||
else:
|
||||
raise ValueError("Unknown type: " + self.type + ". Please choose from: 'file', 'filepath'.")
|
||||
return {
|
||||
"name": os.path.basename(y),
|
||||
"size": os.path.getsize(y),
|
||||
"data": processing_utils.encode_file_to_base64(y, header=False)
|
||||
}
|
||||
|
||||
|
||||
class Dataframe(OutputComponent):
|
||||
|
@ -17,10 +17,12 @@ def decode_base64_to_image(encoding):
|
||||
return Image.open(BytesIO(base64.b64decode(image_encoded)))
|
||||
|
||||
|
||||
def encode_file_to_base64(f, type="image", ext=None):
|
||||
def encode_file_to_base64(f, type="image", ext=None, header=True):
|
||||
with open(f, "rb") as file:
|
||||
encoded_string = base64.b64encode(file.read())
|
||||
base64_str = str(encoded_string, 'utf-8')
|
||||
if not header:
|
||||
return base64_str
|
||||
if ext is None:
|
||||
ext = f.split(".")[-1]
|
||||
return "data:" + type + "/" + ext + ";base64," + base64_str
|
||||
|
@ -37,6 +37,7 @@
|
||||
height: 180px;
|
||||
}
|
||||
.interface_max_box {
|
||||
overflow: auto;
|
||||
max-height: 360px;
|
||||
}
|
||||
.interface:not(*:last-child) {
|
||||
|
@ -37,10 +37,10 @@
|
||||
.canvas_holder canvas {
|
||||
background-color: white;
|
||||
}
|
||||
.canvas_holder, .saliency_holder {
|
||||
.canvas_holder {
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
height: calc(100% - 36px);
|
||||
}
|
||||
.saliency_holder {
|
||||
position: absolute;
|
||||
|
@ -1,15 +0,0 @@
|
||||
.output_text {
|
||||
width: 100%;
|
||||
font-size: 18px;
|
||||
outline: none;
|
||||
background-color: white;
|
||||
border: solid 1px lightgray;
|
||||
border-radius: 2px;
|
||||
box-sizing: border-box;
|
||||
padding: 4px;
|
||||
min-height: 30px;
|
||||
font-family: monospace;
|
||||
white-space: pre-wrap; /* CSS3 */
|
||||
white-space: -moz-pre-wrap; /* Firefox */
|
||||
word-wrap: break-word; /* IE */
|
||||
}
|
@ -40,6 +40,7 @@
|
||||
.output_class {
|
||||
font-weight: bold;
|
||||
font-size: 36px;
|
||||
padding: 32px 16px;;
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
@ -1,7 +1,7 @@
|
||||
const audio_input = {
|
||||
html: `
|
||||
<div class="interface_box">
|
||||
<div class="file_zone">
|
||||
<div class="file_zone hidden">
|
||||
<div class="upload_zone drop_zone">
|
||||
<div class="input_caption">Drop Audio Here<br>- or -<br>Click to Upload</div>
|
||||
</div>
|
||||
@ -9,9 +9,9 @@ const audio_input = {
|
||||
<div class="file_name"></div>
|
||||
<div class="file_size"></div>
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" />
|
||||
<input class="hidden_upload" type="file" accept="audio/*" />
|
||||
</div>
|
||||
<div class="upload_zone mic_zone">
|
||||
<div class="upload_zone mic_zone hidden">
|
||||
<img class="not_recording" src="/static/img/mic.png" />
|
||||
<div class="recording hidden volume_display">
|
||||
<div class="volume volume_left">
|
||||
@ -43,7 +43,7 @@ const audio_input = {
|
||||
hideScrollbar: true
|
||||
});
|
||||
if (this.source == "microphone") {
|
||||
this.target.find(".file_zone").hide();
|
||||
this.target.find(".mic_zone").removeClass("hidden");
|
||||
this.target.find(".mic_zone").click(function() {
|
||||
if (io.state == "NO_AUDIO") {
|
||||
if (!has_audio_loaded) {
|
||||
@ -74,29 +74,62 @@ const audio_input = {
|
||||
var reader = new window.FileReader();
|
||||
reader.readAsDataURL(blob);
|
||||
reader.onloadend = function() {
|
||||
console.log(reader.result)
|
||||
io.audio_data = reader.result;
|
||||
io.target.find(".player").removeClass("hidden");
|
||||
io.wavesurfer.load(io.audio_data);
|
||||
if (io.state == "STOP_RECORDING") {
|
||||
io.state = "RECORDED";
|
||||
io.submit();
|
||||
}
|
||||
io.state = "RECORDED";
|
||||
io.load_preview_from_audio(reader.result);
|
||||
}
|
||||
if (io.interval_id) {
|
||||
window.clearInterval(io.interval_id);
|
||||
}
|
||||
}
|
||||
})
|
||||
} else if (self.type == "upload") {
|
||||
this.target.find(".mic_zone").hide();
|
||||
|
||||
} else if (this.source == "upload") {
|
||||
this.target.find(".file_zone").removeClass("hidden");
|
||||
this.target.find(".upload_zone").click(function (e) {
|
||||
io.target.find(".hidden_upload").click();
|
||||
});
|
||||
this.target.on('drag dragstart dragend dragover dragenter dragleave drop',
|
||||
".drop_zone", function(e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
})
|
||||
this.target.on('drop', '.drop_zone', function(e) {
|
||||
files = e.originalEvent.dataTransfer.files;
|
||||
io.load_preview_from_files(files)
|
||||
});
|
||||
this.target.find('.hidden_upload').on('change', function (e) {
|
||||
if (this.files) {
|
||||
io.load_preview_from_files(this.files);
|
||||
}
|
||||
})
|
||||
}
|
||||
this.target.find(".playpause").click(function () {
|
||||
io.wavesurfer.playPause();
|
||||
})
|
||||
},
|
||||
load_preview_from_audio: function(audio) {
|
||||
var io = this;
|
||||
io.audio_data = audio;
|
||||
io.target.find(".player").removeClass("hidden");
|
||||
io.wavesurfer.load(io.audio_data);
|
||||
if (io.state == "STOP_RECORDING") {
|
||||
io.state = "RECORDED";
|
||||
io.submit();
|
||||
}
|
||||
io.state = "RECORDED";
|
||||
},
|
||||
load_preview_from_files: function(files) {
|
||||
if (!files.length || !window.FileReader) {
|
||||
return
|
||||
}
|
||||
var ReaderObj = new FileReader()
|
||||
ReaderObj.readAsDataURL(files[0])
|
||||
ReaderObj.io = this;
|
||||
this.state = "AUDIO_LOADING"
|
||||
ReaderObj.onloadend = function() {
|
||||
let io = this.io;
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.load_preview_from_audio(this.result);
|
||||
}
|
||||
},
|
||||
submit: function() {
|
||||
if (this.state == "RECORDED") {
|
||||
this.io_master.input(this.id, this.audio_data);
|
||||
@ -112,6 +145,7 @@ const audio_input = {
|
||||
this.target.find(".recording").addClass("hidden");
|
||||
this.target.find(".player").addClass("hidden");
|
||||
this.target.find(".upload_zone").show();
|
||||
this.target.find(".hidden_upload").prop("value", "")
|
||||
if (this.wavesurfer) {
|
||||
this.wavesurfer.stop();
|
||||
}
|
||||
|
@ -49,15 +49,7 @@ const file_input = {
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.target.find(".file_display").removeClass("hide");
|
||||
io.target.find(".file_name").text(files[0].name);
|
||||
let bytes = files[0].size;
|
||||
let units = ["B", "KB", "MB", "GB", "PB"];
|
||||
let i = 0;
|
||||
while (bytes > 1024) {
|
||||
bytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
let unit = units[i];
|
||||
io.target.find(".file_size").text(bytes.toFixed(1) + " " + unit);
|
||||
io.target.find(".file_size").text(prettyBytes(files[0].size));
|
||||
io.file_data = this.result;
|
||||
}
|
||||
},
|
||||
|
@ -1,9 +1,26 @@
|
||||
const image_input = {
|
||||
html: `
|
||||
<div class="interface_box">
|
||||
<div class="upload_zone drop_zone">
|
||||
<div class="upload_zone drop_zone hide">
|
||||
<div class="input_caption">Drop Image Here<br>- or -<br>Click to Upload</div>
|
||||
</div>
|
||||
<div class="webcam upload_zone hide">
|
||||
<div class="webcam_box">
|
||||
</div>
|
||||
<span>Click to Snap!</span>
|
||||
</div>
|
||||
<div class="sketchpad hide">
|
||||
<div class="sketch_tools">
|
||||
<div id="brush_1" size="8" class="brush"></div>
|
||||
<div id="brush_2" size="16" class="brush selected"></div>
|
||||
<div id="brush_3" size="24" class="brush"></div>
|
||||
</div>
|
||||
<div class="view_holders">
|
||||
<div class="canvas_holder">
|
||||
<canvas class="sketch"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="image_display hide">
|
||||
<div class="edit_holder">
|
||||
<button class="edit_image interface_button primary">Edit</button>
|
||||
@ -12,9 +29,6 @@ const image_input = {
|
||||
<div class="image_preview_holder">
|
||||
<img class="image_preview" />
|
||||
</div>
|
||||
<div class="saliency_holder hide">
|
||||
<canvas class="saliency"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input class="hidden_upload" type="file" accept="image/x-png,image/gif,image/jpeg" />
|
||||
@ -30,25 +44,70 @@ const image_input = {
|
||||
`,
|
||||
init: function(opts) {
|
||||
var io = this;
|
||||
this.source = opts.source;
|
||||
$('body').append(this.overlay_html.format(this.id));
|
||||
this.overlay_target = $(`.overlay[interface_id=${this.id}]`);
|
||||
this.target.find(".upload_zone").click(function (e) {
|
||||
io.target.find(".hidden_upload").click();
|
||||
});
|
||||
this.target.on('drag dragstart dragend dragover dragenter dragleave drop',
|
||||
".drop_zone", function(e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
})
|
||||
this.target.on('drop', '.drop_zone', function(e) {
|
||||
files = e.originalEvent.dataTransfer.files;
|
||||
io.load_preview_from_files(files)
|
||||
});
|
||||
this.target.find('.hidden_upload').on('change', function (e) {
|
||||
if (this.files) {
|
||||
io.load_preview_from_files(this.files);
|
||||
}
|
||||
})
|
||||
if (this.source == "upload") {
|
||||
io.target.find(".drop_zone").removeClass("hide");
|
||||
this.target.find(".drop_zone").click(function (e) {
|
||||
io.target.find(".hidden_upload").click();
|
||||
});
|
||||
this.target.on('drag dragstart dragend dragover dragenter dragleave drop',
|
||||
".drop_zone", function(e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
})
|
||||
this.target.on('drop', '.drop_zone', function(e) {
|
||||
files = e.originalEvent.dataTransfer.files;
|
||||
io.load_preview_from_files(files)
|
||||
});
|
||||
this.target.find('.hidden_upload').on('change', function (e) {
|
||||
if (this.files) {
|
||||
io.load_preview_from_files(this.files);
|
||||
}
|
||||
})
|
||||
} else if (this.source == "webcam") {
|
||||
io.target.find(".webcam").removeClass("hide");
|
||||
let w = this.target.find(".webcam_box").width();
|
||||
let h = this.target.find(".webcam_box").height();
|
||||
let RATIO = 4/3;
|
||||
let dim = Math.min(h, w / RATIO);
|
||||
Webcam.set({
|
||||
image_format: 'jpeg',
|
||||
width: dim * RATIO,
|
||||
height: dim,
|
||||
dest_width: dim * RATIO,
|
||||
dest_height: dim,
|
||||
})
|
||||
Webcam.attach(this.target.find(".webcam_box")[0]);
|
||||
io.target.find(".webcam").click(function() {
|
||||
Webcam.snap(function(image_data) {
|
||||
io.target.find(".webcam").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(image_data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED";
|
||||
});
|
||||
})
|
||||
} else if (this.source == "canvas") {
|
||||
io.target.find(".sketchpad").removeClass("hide");
|
||||
var dimension = Math.min(this.target.find(".canvas_holder").width(),
|
||||
this.target.find(".canvas_holder").height()) - 2 // dimension - border
|
||||
var id = this.id;
|
||||
this.sketchpad = new Sketchpad({
|
||||
element: '.interface[interface_id=' + id + '] .sketch',
|
||||
width: dimension,
|
||||
height: dimension
|
||||
});
|
||||
this.sketchpad.penSize = this.target.find(".brush.selected").attr("size");
|
||||
this.canvas = this.target.find('.canvas_holder canvas')[0];
|
||||
this.context = this.canvas.getContext("2d");
|
||||
this.target.find(".brush").click(function (e) {
|
||||
io.target.find(".brush").removeClass("selected");
|
||||
$(this).addClass("selected");
|
||||
io.sketchpad.penSize = $(this).attr("size");
|
||||
})
|
||||
this.clear();
|
||||
}
|
||||
this.target.find('.edit_image').click(function (e) {
|
||||
io.overlay_target.removeClass("hide");
|
||||
})
|
||||
@ -72,42 +131,54 @@ const image_input = {
|
||||
this.overlay_target.find('.tui_close').click(function (e) {
|
||||
io.overlay_target.addClass("hide");
|
||||
if ($(e.target).hasClass('tui_save')) {
|
||||
// if (io.tui_editor.ui.submenu == "crop") {
|
||||
// io.tui_editor._cropAction().crop());
|
||||
// }
|
||||
io.set_image_data(io.tui_editor.toDataURL(), /*update_editor=*/false);
|
||||
}
|
||||
});
|
||||
},
|
||||
submit: function() {
|
||||
var io = this;
|
||||
if (this.state == "IMAGE_LOADED") {
|
||||
if (this.source == "canvas") {
|
||||
var dataURL = this.canvas.toDataURL("image/png");
|
||||
this.io_master.input(this.id, dataURL);
|
||||
} else if (this.state == "IMAGE_LOADED") {
|
||||
io.io_master.input(io.id, this.image_data);
|
||||
} else if (this.source == "webcam") {
|
||||
Webcam.snap(function(image_data) {
|
||||
io.target.find(".webcam").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(image_data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED";
|
||||
io.io_master.input(io.id, image_data);
|
||||
});
|
||||
}
|
||||
},
|
||||
clear: function() {
|
||||
this.target.find(".upload_zone").show();
|
||||
this.target.find(".image_preview").attr('src', '');
|
||||
this.target.find(".image_display").addClass("hide");
|
||||
this.target.find(".hidden_upload").prop("value", "")
|
||||
this.state = "NO_IMAGE";
|
||||
this.image_data = null;
|
||||
if (this.source == "canvas") {
|
||||
this.context.fillStyle = "#FFFFFF";
|
||||
this.context.fillRect(0, 0, this.context.canvas.width, this.context.
|
||||
canvas.height);
|
||||
} else {
|
||||
this.target.find(".upload_zone").show();
|
||||
this.target.find(".image_preview").attr('src', '');
|
||||
this.target.find(".image_display").addClass("hide");
|
||||
this.target.find(".hidden_upload").prop("value", "")
|
||||
this.state = "NO_IMAGE";
|
||||
this.image_data = null;
|
||||
}
|
||||
},
|
||||
state: "NO_IMAGE",
|
||||
image_data: null,
|
||||
set_image_data: function(data, update_editor) {
|
||||
set_image_data: function(image_data, update_editor) {
|
||||
let io = this;
|
||||
resizeImage.call(this, data, 600, 600, function(image_data) {
|
||||
io.image_data = image_data
|
||||
io.target.find(".image_preview").attr('src', image_data);
|
||||
if (update_editor) {
|
||||
io.tui_editor.loadImageFromURL(io.image_data, 'input').then(function (sizeValue) {
|
||||
io.tui_editor.clearUndoStack();
|
||||
io.tui_editor.ui.activeMenuEvent();
|
||||
io.tui_editor.ui.resizeEditor({ imageSize: sizeValue });
|
||||
});
|
||||
}
|
||||
})
|
||||
io.image_data = image_data
|
||||
io.target.find(".image_preview").attr('src', image_data);
|
||||
if (update_editor) {
|
||||
io.tui_editor.loadImageFromURL(io.image_data, 'input').then(function (sizeValue) {
|
||||
io.tui_editor.clearUndoStack();
|
||||
io.tui_editor.ui.activeMenuEvent();
|
||||
io.tui_editor.ui.resizeEditor({ imageSize: sizeValue });
|
||||
});
|
||||
}
|
||||
},
|
||||
load_preview_from_files: function(files) {
|
||||
if (!files.length || !window.FileReader || !/^image/.test(files[0].type)) {
|
||||
@ -130,9 +201,21 @@ const image_input = {
|
||||
},
|
||||
load_example: function(data) {
|
||||
let io = this;
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED"
|
||||
if (this.source == "canvas") {
|
||||
this.clear();
|
||||
let ctx = this.context;
|
||||
var img = new Image;
|
||||
let dimension = this.target.find(".canvas_holder canvas").width();
|
||||
img.onload = function(){
|
||||
ctx.clearRect(0,0,dimension,dimension);
|
||||
ctx.drawImage(img,0,0,dimension,dimension);
|
||||
};
|
||||
img.src = data;
|
||||
} else {
|
||||
io.target.find(".upload_zone").hide();
|
||||
io.target.find(".image_display").removeClass("hide");
|
||||
io.set_image_data(data, /*update_editor=*/true);
|
||||
io.state = "IMAGE_LOADED";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ const webcam = {
|
||||
`,
|
||||
init: function(opts) {
|
||||
var io = this;
|
||||
// this.target.find(".webcam_box").width(this.target.find(".webcam_box").width);
|
||||
let w = this.target.find(".webcam_box").width();
|
||||
let h = this.target.find(".webcam_box").height();
|
||||
let RATIO = 4/3;
|
||||
|
@ -1,7 +1,7 @@
|
||||
const file_output = {
|
||||
html: `
|
||||
<div class="interface_mini_box">
|
||||
<div class="file_display hide">
|
||||
<a class="interface_mini_box">
|
||||
<div class="file_display file_download">
|
||||
<div class="file_name"></div>
|
||||
<div class="file_size"></div>
|
||||
</div>
|
||||
@ -10,11 +10,19 @@ const file_output = {
|
||||
init: function(opts) {
|
||||
},
|
||||
output: function(data) {
|
||||
this.target.find(".output_text").text(data);
|
||||
this.target.find(".file_name").text(data.name);
|
||||
this.target.find(".file_size").text(prettyBytes(data.size));
|
||||
this.target.find(".interface_mini_box")
|
||||
.attr("href", "data:;base64," + data.data)
|
||||
.attr("download", data.name);
|
||||
},
|
||||
submit: function() {
|
||||
},
|
||||
clear: function() {
|
||||
this.target.find(".output_text").empty();
|
||||
this.target.find(".file_name").empty();
|
||||
this.target.find(".file_size").empty();
|
||||
this.target.find(".interface_mini_box")
|
||||
.removeAttr("href")
|
||||
.removeAttr("download");
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ const highlighted_text = {
|
||||
},
|
||||
new_category_index: 0,
|
||||
generate_category_legend: function(map) {
|
||||
console.log(map)
|
||||
let default_colors = ["pink", "lightblue", "gold", "plum", "lightskyblue", "greenyellow", "khaki", "cyan", "moccasin", "lightgray"]
|
||||
for (let category in map) {
|
||||
if (category in this.color_map) {
|
||||
|
@ -1,11 +1,9 @@
|
||||
const label_output = {
|
||||
html: `
|
||||
<div class="interface_box">
|
||||
<div class="output_class"></div>
|
||||
<div class="confidence_intervals">
|
||||
<div class="labels"></div>
|
||||
<div class="confidences"></div>
|
||||
</div>
|
||||
<div class="output_class"></div>
|
||||
<div class="confidence_intervals">
|
||||
<div class="labels"></div>
|
||||
<div class="confidences"></div>
|
||||
</div>
|
||||
`,
|
||||
init: function(opts) {},
|
||||
|
@ -78,3 +78,14 @@ function interpolate(val, rgb1, rgb2) {
|
||||
function colorToString(rgb) {
|
||||
return "rgb(" + rgb[0] + ", " + rgb[1] + ", " + rgb[2] + ")";
|
||||
}
|
||||
|
||||
function prettyBytes(bytes) {
|
||||
let units = ["B", "KB", "MB", "GB", "PB"];
|
||||
let i = 0;
|
||||
while (bytes > 1024) {
|
||||
bytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
let unit = units[i];
|
||||
return bytes.toFixed(1) + " " + unit;
|
||||
}
|
@ -35,6 +35,7 @@
|
||||
<link type="text/css" href="../static/css/vendor/jquery-ui.css" rel="stylesheet">
|
||||
<link type="text/css" href="../static/css/vendor/jexcel.min.css" rel="stylesheet">
|
||||
<link type="text/css" href="../static/css/vendor/jsuites.min.css" rel="stylesheet">
|
||||
<link type="text/css" href="../static/css/vendor/jsonTree.css" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/css?family=Open+Sans" rel="stylesheet">
|
||||
|
||||
<link rel="stylesheet" href="../static/css/style.css">
|
||||
|
Loading…
Reference in New Issue
Block a user