This commit is contained in:
Ali Abid 2019-03-06 10:42:37 -08:00
commit b756313843
4 changed files with 57 additions and 79 deletions

View File

@ -73,13 +73,13 @@
"output_type": "stream",
"text": [
"Epoch 1/1\n",
"3/3 [==============================] - 8s 3s/step - loss: 2.0548 - acc: 0.3412\n"
"3/3 [==============================] - 6s 2s/step - loss: 2.1068 - acc: 0.3067\n"
]
},
{
"data": {
"text/plain": [
"<tensorflow.python.keras.callbacks.History at 0x1fca491be48>"
"<tensorflow.python.keras.callbacks.History at 0x28f0f75c550>"
]
},
"execution_count": 5,
@ -119,10 +119,30 @@
"output_type": "stream",
"text": [
"NOTE: Gradio is in beta stage, please report all bugs to: a12d@stanford.edu\n",
"Model available locally at: http://localhost:7860/interface.html\n",
"Model is running locally at: http://localhost:7860/interface.html\n",
"To create a public link, set `share=True` in the argument to `launch()`\n"
]
},
{
"data": {
"text/html": [
"\n",
" <iframe\n",
" width=\"1000\"\n",
" height=\"500\"\n",
" src=\"http://localhost:7860/interface.html\"\n",
" frameborder=\"0\"\n",
" allowfullscreen\n",
" ></iframe>\n",
" "
],
"text/plain": [
"<IPython.lib.display.IFrame at 0x28f091997f0>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
@ -137,78 +157,15 @@
"name": "stderr",
"output_type": "stream",
"text": [
"127.0.0.1 - - [05/Mar/2019 23:13:12] \"GET /interface.html HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:13:12] \"GET /static/js/all-io.js HTTP/1.1\" 200 -\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'label': 2, 'confidences': [{'label': 2, 'confidence': 1.0}, {'label': 0, 'confidence': 0.0}, {'label': 0, 'confidence': 0.0}]}\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /interface.html HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/css/style.css HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/css/gradio.css HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/js/utils.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/js/all-io.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/js/image-upload-input.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/img/logo_inline.png HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:47] \"GET /static/js/class-output.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:14:48] code 404, message File not found\n",
"127.0.0.1 - - [05/Mar/2019 23:14:48] \"GET /favicon.ico HTTP/1.1\" 404 -\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'label': 2, 'confidences': [{'label': 2, 'confidence': 1.0}, {'label': 0, 'confidence': 0.0}, {'label': 0, 'confidence': 0.0}]}\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"127.0.0.1 - - [05/Mar/2019 23:15:29] \"GET /interface.html HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:15:29] \"GET /static/js/all-io.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:15:32] \"GET /interface.html HTTP/1.1\" 200 -\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'label': 2, 'confidences': [{'label': 2, 'confidence': 1.0}, {'label': 0, 'confidence': 0.0}, {'label': 0, 'confidence': 0.0}]}\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"127.0.0.1 - - [05/Mar/2019 23:18:47] \"GET /interface.html HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:18:47] \"GET /static/css/gradio.css HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:18:47] \"GET /static/js/utils.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:18:47] \"GET /static/js/all-io.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:18:47] \"GET /static/js/image-upload-input.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [05/Mar/2019 23:18:48] \"GET /static/js/class-output.js HTTP/1.1\" 200 -\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'label': 2, 'confidences': [{'label': 2, 'confidence': 1.0}, {'label': 0, 'confidence': 0.0}, {'label': 0, 'confidence': 0.0}]}\n"
"127.0.0.1 - - [06/Mar/2019 10:19:46] \"GET /interface.html HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [06/Mar/2019 10:19:46] \"GET /interface.html HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [06/Mar/2019 10:19:46] \"GET /static/js/all-io.js HTTP/1.1\" 200 -\n",
"127.0.0.1 - - [06/Mar/2019 10:19:46] \"GET /static/js/all-io.js HTTP/1.1\" 200 -\n"
]
}
],
"source": [
"iface.launch(share=False)"
"iface.launch(browser=True, share=False)"
]
}
],

View File

@ -11,7 +11,6 @@ import gradio.inputs
import gradio.outputs
from gradio import networking
import tempfile
from IPython.display import IFrame
nest_asyncio.apply()
@ -109,6 +108,8 @@ class Interface:
await websocket.send(str(processed_output))
except websockets.exceptions.ConnectionClosed:
pass
except Exception as e:
print(e)
def predict(self, preprocessed_input):
"""
@ -124,7 +125,7 @@ class Interface:
else:
ValueError('model_type must be one of: {}'.format(self.VALID_MODEL_TYPES))
def launch(self, share=False, new_tab=False):
def launch(self, inline=None, browser=None, share=False):
"""
Standard method shared by interfaces that creates the interface and sets up a websocket to communicate with it.
:param share: boolean. If True, then a share link is generated using ngrok is displayed to the user.
@ -143,7 +144,7 @@ class Interface:
networking.set_socket_port_in_js(output_directory, websocket_port) # sets the websocket port in the JS file.
if self.verbose:
print("NOTE: Gradio is in beta stage, please report all bugs to: a12d@stanford.edu")
print("Model available locally at: {}".format(path_to_server + networking.TEMPLATE_TEMP))
print("Model is running locally at: {}".format(path_to_server + networking.TEMPLATE_TEMP))
if share:
site_ngrok_url = networking.setup_ngrok(server_port, websocket_port, output_directory)
@ -162,9 +163,23 @@ class Interface:
except RuntimeError: # Runtime errors are thrown in jupyter notebooks because of async.
pass
if new_tab:
if inline is None:
try: # Check if running interactively using ipython.
_ = get_ipython()
inline = True
if browser is None:
browser = False
except NameError:
inline = False
if browser is None:
browser = True
else:
if browser is None:
browser = False
if browser:
webbrowser.open(path_to_server + networking.TEMPLATE_TEMP) # Open a browser tab with the interface.
else:
print("Interface displayed inline, to launch the interface in a new tab, set `new_tab=True` in the argument to `launch()`")
if inline:
from IPython.display import IFrame
display(IFrame(path_to_server + networking.TEMPLATE_TEMP, width=1000, height=500))
return path_to_server + networking.TEMPLATE_TEMP, site_ngrok_url

View File

@ -73,7 +73,6 @@ class Label(AbstractOutput):
response[Label.LABEL_KEY] = prediction
else:
raise ValueError("Unable to post-process model prediction.")
print(response)
return json.dumps(response)

View File

@ -33,13 +33,20 @@
margin-left: 1px;
}
.submit, .clear {
background-color: #F6F6F6 !important;
padding: 8px !important;
box-sizing: border-box;
width: calc(50% - 8px);
text-transform: uppercase;
font-weight: bold;
}
.clear {
background-color: #F6F6F6 !important;
}
.submit {
background-color: #EEA45D !important;
color: white !important;
}
.submit {
margin-right: 8px;
}