fixed multiple ngrok issue

This commit is contained in:
Abubakar Abid 2019-04-10 11:30:01 -07:00
parent da1622e215
commit 9b8adaa1fd
6 changed files with 87 additions and 38 deletions

View File

@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -12,7 +12,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -48,13 +48,13 @@
"output_type": "stream",
"text": [
"Epoch 1/1\n",
"60000/60000 [==============================] - 25s 417us/step - loss: 0.2210 - acc: 0.9351\n"
"60000/60000 [==============================] - 24s 407us/step - loss: 0.2171 - acc: 0.9355\n"
]
},
{
"data": {
"text/plain": [
"<tensorflow.python.keras.callbacks.History at 0x22d334d8b00>"
"<tensorflow.python.keras.callbacks.History at 0x23bf7b1ae10>"
]
},
"execution_count": 4,
@ -86,7 +86,7 @@
"text": [
"No validation samples for this interface... skipping validation.\n",
"NOTE: Gradio is in beta stage, please report all bugs to: contact.gradio@gmail.com\n",
"Model is running locally at: http://localhost:7861/\n",
"Model is running locally at: http://localhost:7860/\n",
"To create a public link, set `share=True` in the argument to `launch()`\n"
]
},
@ -97,14 +97,14 @@
" <iframe\n",
" width=\"1000\"\n",
" height=\"500\"\n",
" src=\"http://localhost:7861/\"\n",
" src=\"http://localhost:7860/\"\n",
" frameborder=\"0\"\n",
" allowfullscreen\n",
" ></iframe>\n",
" "
],
"text/plain": [
"<IPython.lib.display.IFrame at 0x22d2cf1e710>"
"<IPython.lib.display.IFrame at 0x23bf7900160>"
]
},
"metadata": {},
@ -113,14 +113,33 @@
{
"data": {
"text/plain": [
"(<gradio.networking.serve_files_in_background.<locals>.HTTPServer at 0x22d348a7240>,\n",
" 'http://localhost:7861/',\n",
"(<gradio.networking.serve_files_in_background.<locals>.HTTPServer at 0x23bf9174b70>,\n",
" 'http://localhost:7860/',\n",
" None)"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Error in connection handler\n",
"Traceback (most recent call last):\n",
" File \"C:\\Users\\islam\\Anaconda3\\envs\\tensorflow\\lib\\site-packages\\websockets\\server.py\", line 169, in handler\n",
" yield from self.ws_handler(self, path)\n",
" File \"C:\\Users\\islam\\Repos\\gradio\\gradio\\interface.py\", line 114, in communicate\n",
" msg = json.loads(await websocket.recv())\n",
" File \"C:\\Users\\islam\\Anaconda3\\envs\\tensorflow\\lib\\json\\__init__.py\", line 354, in loads\n",
" return _default_decoder.decode(s)\n",
" File \"C:\\Users\\islam\\Anaconda3\\envs\\tensorflow\\lib\\json\\decoder.py\", line 339, in decode\n",
" obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n",
" File \"C:\\Users\\islam\\Anaconda3\\envs\\tensorflow\\lib\\json\\decoder.py\", line 357, in raw_decode\n",
" raise JSONDecodeError(\"Expecting value\", s, err.value) from None\n",
"json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)\n"
]
}
],
"source": [

View File

@ -77,7 +77,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
@ -92,7 +92,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 4,
"metadata": {
"scrolled": false
},
@ -103,7 +103,7 @@
"text": [
"NOTE: Gradio is in beta stage, please report all bugs to: contact.gradio@gmail.com\n",
"Model is running locally at: http://localhost:7860/\n",
"Model available publicly for 8 hours at: https://2140c179.gradio.app/\n"
"Model available publicly for 8 hours at: https://8b05f76e.gradio.app/\n"
]
},
{
@ -120,7 +120,7 @@
" "
],
"text/plain": [
"<IPython.lib.display.IFrame at 0x1dc825539e8>"
"<IPython.lib.display.IFrame at 0x1167b1d59e8>"
]
},
"metadata": {},
@ -128,7 +128,7 @@
}
],
"source": [
"io.launch(inline=True, inbrowser=True, share=True, validate=False);"
"io.launch(inline=True, inbrowser=False, share=True, validate=False);"
]
}
],

View File

@ -9,7 +9,7 @@ import nest_asyncio
import webbrowser
import gradio.inputs
import gradio.outputs
from gradio import networking
from gradio import networking, strings
import tempfile
import threading
import traceback
@ -71,6 +71,7 @@ class Interface:
self.status = self.STATUS_TYPES['OFF']
self.validate_flag = False
self.simple_server = None
self.ngrok_api_ports = None
@staticmethod
def _infer_model_type(model):
@ -240,25 +241,28 @@ class Interface:
pass
if self.verbose:
print("NOTE: Gradio is in beta stage, please report all bugs to: contact.gradio@gmail.com")
print(strings.en["BETA_MESSAGE"])
if not is_colab:
print(f"Model is running locally at: {path_to_local_server}")
print(strings.en["RUNNING_LOCALLY"].format(path_to_local_server))
if share:
try:
path_to_ngrok_server = networking.setup_ngrok(server_port, websocket_port, output_directory)
path_to_ngrok_server, ngrok_api_ports = networking.setup_ngrok(
server_port, websocket_port, output_directory, self.ngrok_api_ports)
self.ngrok_api_ports = ngrok_api_ports
except RuntimeError:
path_to_ngrok_server = None
if self.verbose:
print("Unable to create public link for interface, please check internet connection.")
print(strings.en["NGROK_NO_INTERNET"])
else:
if is_colab: # For a colab notebook, create a public link even if share is False.
path_to_ngrok_server = networking.setup_ngrok(server_port, websocket_port, output_directory)
path_to_ngrok_server, ngrok_api_ports = networking.setup_ngrok(
server_port, websocket_port, output_directory, self.ngrok_api_ports)
self.ngrok_api_ports = ngrok_api_ports
if self.verbose:
print(f"Cannot display local interface on google colab, public link created.")
print(strings.en["COLAB_NO_LOCAL"])
else: # If it's not a colab notebook and share=False, print a message telling them about the share option.
if self.verbose:
print("To create a public link, set `share=True` in the argument to `launch()`")
print(strings.en["PUBLIC_SHARE_TRUE"])
path_to_ngrok_server = None
if path_to_ngrok_server is not None:
@ -266,7 +270,7 @@ class Interface:
subdomain = url.hostname.split('.')[0]
path_to_ngrok_interface_page = SHARE_LINK_FORMAT.format(subdomain)
if self.verbose:
print(f"Model available publicly for 8 hours at: {path_to_ngrok_interface_page}")
print(strings.en["MODEL_PUBLICLY_AVAILABLE_URL"].format(path_to_ngrok_interface_page))
# Keep the server running in the background.
asyncio.get_event_loop().run_until_complete(start_server)

View File

@ -20,12 +20,14 @@ import pkg_resources
from bs4 import BeautifulSoup
from distutils import dir_util
from gradio import inputs, outputs
import time
import json
from urllib.parse import urlparse
INITIAL_PORT_VALUE = 7860 # The http server will try to open on port 7860. If not available, 7861, 7862, etc.
TRY_NUM_PORTS = 100 # Number of ports to try before giving up and throwing an exception.
LOCALHOST_NAME = 'localhost'
NGROK_TUNNELS_API_URL = "http://localhost:4040/api/tunnels" # TODO(this should be captured from output)
NGROK_TUNNELS_API_URL2 = "http://localhost:4041/api/tunnels" # TODO(this should be captured from output)
NGROK_TUNNEL_API_URL = "http://{}/api/tunnels"
BASE_TEMPLATE = pkg_resources.resource_filename('gradio', 'templates/base_template.html')
@ -223,31 +225,46 @@ def download_ngrok():
os.chmod('ngrok', st.st_mode | stat.S_IEXEC)
def create_ngrok_tunnel(local_port, api_url):
def create_ngrok_tunnel(local_port, log_file):
if not(os.path.isfile('ngrok.exe') or os.path.isfile('ngrok')):
download_ngrok()
if sys.platform == 'win32':
subprocess.Popen(['ngrok', 'http', str(local_port)])
subprocess.Popen(['ngrok', 'http', str(local_port), '--log', log_file, '--log-format', 'json'])
else:
subprocess.Popen(['./ngrok', 'http', str(local_port)])
subprocess.Popen(['./ngrok', 'http', str(local_port), '--log', log_file, '--log-format', 'json'])
time.sleep(1.5) # Let ngrok write to the log file TODO(abidlabs): a better way to do this.
session = requests.Session()
retry = Retry(connect=3, backoff_factor=0.5)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
r = session.get(api_url)
api_url = None
with open(log_file) as f:
for line in f:
log = json.loads(line)
if log["msg"] == "starting web service":
api_url = log["addr"]
api_port = urlparse(api_url).port
break
if api_url is None:
raise RuntimeError("Tunnel information not available in log file")
r = session.get(NGROK_TUNNEL_API_URL.format(api_url))
for tunnel in r.json()['tunnels']:
if '{}:'.format(LOCALHOST_NAME) + str(local_port) in tunnel['config']['addr']:
return tunnel['public_url']
if '{}:'.format(LOCALHOST_NAME) + str(local_port) in tunnel['config']['addr'] and tunnel['proto'] == 'https':
return tunnel['public_url'], api_port
raise RuntimeError("Not able to retrieve ngrok public URL")
def setup_ngrok(server_port, websocket_port, output_directory):
kill_processes([4040, 4041]) #TODO(abidlabs): better way to do this
site_ngrok_url = create_ngrok_tunnel(server_port, NGROK_TUNNELS_API_URL)
socket_ngrok_url = create_ngrok_tunnel(websocket_port, NGROK_TUNNELS_API_URL2)
def setup_ngrok(server_port, websocket_port, output_directory, existing_ports):
if not(existing_ports is None):
kill_processes(existing_ports)
site_ngrok_url, port1 = create_ngrok_tunnel(server_port, os.path.join(output_directory, 'ngrok1.log'))
socket_ngrok_url, port2 = create_ngrok_tunnel(websocket_port, os.path.join(output_directory, 'ngrok2.log'))
set_ngrok_url_in_js(output_directory, socket_ngrok_url)
return site_ngrok_url
return site_ngrok_url, [port1, port2]
def kill_processes(process_ids): #TODO(abidlabs): remove this, we shouldn't need to kill

View File

@ -22,7 +22,7 @@ function notifyError(error) {
try {
var origin = window.location.origin;
if (origin.includes("ngrok")){
if (origin.includes("ngrok") || origin.includes("gradio.app")){ //TODO(abidlabs): better way to distinguish localhost?
var ws = new WebSocket(NGROK_URL)
} else {
var ws = new WebSocket("ws://127.0.0.1:" + SOCKET_PORT + "/")

9
gradio/strings.py Normal file
View File

@ -0,0 +1,9 @@
en = {
"BETA_MESSAGE": "NOTE: Gradio is in beta stage, please report all bugs to: contact.gradio@gmail.com",
"RUNNING_LOCALLY": "Model is running locally at: {}",
"NGROK_NO_INTERNET": "Unable to create public link for interface, please check internet connection or try "
"restarting python interpreter.",
"COLAB_NO_LOCAL": "Cannot display local interface on google colab, public link created.",
"PUBLIC_SHARE_TRUE": "To create a public link, set `share=True` in the argument to `launch()`.",
"MODEL_PUBLICLY_AVAILABLE_URL": "Model available publicly for 8 hours at: {}"
}