mirror of
https://github.com/gradio-app/gradio.git
synced 2025-01-30 11:00:11 +08:00
Merge branch 'master' of https://github.com/abidlabs/gradio
This commit is contained in:
commit
fa94ff1eca
@ -9,9 +9,18 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": 47,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"The autoreload extension is already loaded. To reload it, use:\n",
|
||||
" %reload_ext autoreload\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%load_ext autoreload\n",
|
||||
"%autoreload 2\n",
|
||||
@ -23,33 +32,73 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 48,
|
||||
"metadata": {
|
||||
"scrolled": false
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Exception ignored in: <function BaseSession._Callable.__del__ at 0x000002BC6788E378>\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\", line 1455, in __del__\n",
|
||||
" self._session._session, self._handle, status)\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\framework\\errors_impl.py\", line 528, in __exit__\n",
|
||||
" c_api.TF_GetCode(self.status.status))\n",
|
||||
"tensorflow.python.framework.errors_impl.InvalidArgumentError: No such callable handle: 0\n",
|
||||
"Exception ignored in: <function BaseSession._Callable.__del__ at 0x000002BC6788E378>\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\", line 1455, in __del__\n",
|
||||
" self._session._session, self._handle, status)\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\framework\\errors_impl.py\", line 528, in __exit__\n",
|
||||
" c_api.TF_GetCode(self.status.status))\n",
|
||||
"tensorflow.python.framework.errors_impl.InvalidArgumentError: No such callable handle: 0\n",
|
||||
"Exception ignored in: <function BaseSession._Callable.__del__ at 0x000002BC6788E378>\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\", line 1455, in __del__\n",
|
||||
" self._session._session, self._handle, status)\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\framework\\errors_impl.py\", line 528, in __exit__\n",
|
||||
" c_api.TF_GetCode(self.status.status))\n",
|
||||
"tensorflow.python.framework.errors_impl.InvalidArgumentError: No such callable handle: 0\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model = tf.keras.applications.MobileNet()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 82,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"inp = gradio.inputs.ImageUpload()\n",
|
||||
"out = gradio.outputs.Label(label_names='imagenet1000', max_label_length=8)\n",
|
||||
"\n",
|
||||
"io = gradio.Interface(inputs=inp, \n",
|
||||
" outputs=out,\n",
|
||||
" model=model, \n",
|
||||
" model_type='keras')"
|
||||
"import numpy as np\n",
|
||||
"def saliency(model,img):\n",
|
||||
" return img[0,:, :, 0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 83,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"inp = gradio.inputs.ImageUpload()\n",
|
||||
"out = gradio.outputs.Label(label_names='imagenet1000', max_label_length=20)\n",
|
||||
"\n",
|
||||
"io = gradio.Interface(inputs=inp, \n",
|
||||
" outputs=out,\n",
|
||||
" model=model, \n",
|
||||
" model_type='keras',\n",
|
||||
" saliency=saliency)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 84,
|
||||
"metadata": {
|
||||
"scrolled": false
|
||||
},
|
||||
@ -59,8 +108,8 @@
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"NOTE: Gradio is in beta stage, please report all bugs to: gradio.app@gmail.com\n",
|
||||
"Model is running locally at: http://localhost:7863/\n",
|
||||
"Model available publicly at: https://48771.gradio.app\n"
|
||||
"Model is running locally at: http://localhost:7870/\n",
|
||||
"Model available publicly at: https://23329.gradio.app\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -70,14 +119,14 @@
|
||||
" <iframe\n",
|
||||
" width=\"1000\"\n",
|
||||
" height=\"500\"\n",
|
||||
" src=\"http://localhost:7863/\"\n",
|
||||
" src=\"http://localhost:7870/\"\n",
|
||||
" frameborder=\"0\"\n",
|
||||
" allowfullscreen\n",
|
||||
" ></iframe>\n",
|
||||
" "
|
||||
],
|
||||
"text/plain": [
|
||||
"<IPython.lib.display.IFrame at 0x1ea2ab7f470>"
|
||||
"<IPython.lib.display.IFrame at 0x2bc0334e3c8>"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
@ -87,13 +136,20 @@
|
||||
"source": [
|
||||
"io.launch(inline=True, share=True, validate=False);"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3.6 (tensorflow)",
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "tensorflow"
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
@ -105,7 +161,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.7"
|
||||
"version": "3.7.1"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
@ -36,18 +36,21 @@
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
|
||||
"Instructions for updating:\n",
|
||||
"Colocations handled automatically by placer.\n",
|
||||
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py:3445: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\n",
|
||||
"Instructions for updating:\n",
|
||||
"Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\n",
|
||||
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
|
||||
"Instructions for updating:\n",
|
||||
"Use tf.cast instead.\n"
|
||||
"ename": "OSError",
|
||||
"evalue": "Unable to open file (unable to open file: name = '.models/emotion.h5', errno = 2, error message = 'No such file or directory', flags = 0, o_flags = 0)",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[1;31mOSError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m<ipython-input-2-b0a69f92c228>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mmodel\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mload_model\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'.models/emotion.h5'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
|
||||
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\saving.py\u001b[0m in \u001b[0;36mload_model\u001b[1;34m(filepath, custom_objects, compile)\u001b[0m\n\u001b[0;32m 415\u001b[0m \u001b[0mmodel\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 416\u001b[0m \u001b[0mopened_new_file\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mh5py\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mGroup\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 417\u001b[1;33m \u001b[0mf\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mh5dict\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'r'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 418\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 419\u001b[0m \u001b[0mmodel\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0m_deserialize_model\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mf\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcustom_objects\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcompile\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\utils\\io_utils.py\u001b[0m in \u001b[0;36m__init__\u001b[1;34m(self, path, mode)\u001b[0m\n\u001b[0;32m 184\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_is_file\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mFalse\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 185\u001b[0m \u001b[1;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mstr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 186\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mh5py\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mFile\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmode\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mmode\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 187\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_is_file\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 188\u001b[0m \u001b[1;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\h5py\\_hl\\files.py\u001b[0m in \u001b[0;36m__init__\u001b[1;34m(self, name, mode, driver, libver, userblock_size, swmr, **kwds)\u001b[0m\n\u001b[0;32m 310\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mphil\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 311\u001b[0m \u001b[0mfapl\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmake_fapl\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdriver\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlibver\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 312\u001b[1;33m \u001b[0mfid\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmake_fid\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmode\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0muserblock_size\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfapl\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mswmr\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mswmr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 313\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 314\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mswmr_support\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\h5py\\_hl\\files.py\u001b[0m in \u001b[0;36mmake_fid\u001b[1;34m(name, mode, userblock_size, fapl, fcpl, swmr)\u001b[0m\n\u001b[0;32m 140\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mswmr\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mswmr_support\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 141\u001b[0m \u001b[0mflags\u001b[0m \u001b[1;33m|=\u001b[0m \u001b[0mh5f\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mACC_SWMR_READ\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 142\u001b[1;33m \u001b[0mfid\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mh5f\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mflags\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfapl\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfapl\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 143\u001b[0m \u001b[1;32melif\u001b[0m \u001b[0mmode\u001b[0m \u001b[1;33m==\u001b[0m \u001b[1;34m'r+'\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 144\u001b[0m \u001b[0mfid\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mh5f\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mh5f\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mACC_RDWR\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfapl\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfapl\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32mh5py\\_objects.pyx\u001b[0m in \u001b[0;36mh5py._objects.with_phil.wrapper\u001b[1;34m()\u001b[0m\n",
|
||||
"\u001b[1;32mh5py\\_objects.pyx\u001b[0m in \u001b[0;36mh5py._objects.with_phil.wrapper\u001b[1;34m()\u001b[0m\n",
|
||||
"\u001b[1;32mh5py\\h5f.pyx\u001b[0m in \u001b[0;36mh5py.h5f.open\u001b[1;34m()\u001b[0m\n",
|
||||
"\u001b[1;31mOSError\u001b[0m: Unable to open file (unable to open file: name = '.models/emotion.h5', errno = 2, error message = 'No such file or directory', flags = 0, o_flags = 0)"
|
||||
]
|
||||
}
|
||||
],
|
||||
@ -57,7 +60,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -68,46 +71,9 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Validating samples: 1/6 [=.....]\r\n",
|
||||
"----------\n",
|
||||
"Validation failed, likely due to incompatible pre-processing and model input. See below:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"C:\\Users\\ALI\\Desktop\\gradiome\\gradio\\interface.py\", line 162, in validate\n",
|
||||
" prediction = self.predict(processed_input)\n",
|
||||
" File \"C:\\Users\\ALI\\Desktop\\gradiome\\gradio\\interface.py\", line 131, in predict\n",
|
||||
" return self.model_obj.predict(preprocessed_input)\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\", line 1149, in predict\n",
|
||||
" x, _, _ = self._standardize_user_data(x)\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\", line 751, in _standardize_user_data\n",
|
||||
" exception_prefix='input')\n",
|
||||
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\training_utils.py\", line 138, in standardize_input_data\n",
|
||||
" str(data_shape))\n",
|
||||
"ValueError: Error when checking input: expected conv2d_1_input to have shape (48, 48, 1) but got array with shape (224, 224, 3)\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "RuntimeError",
|
||||
"evalue": "Validation did not pass",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[1;31mRuntimeError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[1;32m<ipython-input-4-617bd3537cc5>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[0miface\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mgradio\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mInterface\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'imageupload'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0moutputs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'label'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mmodel\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmodel_type\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'keras'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[0miface\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mlaunch\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mshare\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mTrue\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
|
||||
"\u001b[1;32m~\\Desktop\\gradiome\\gradio\\interface.py\u001b[0m in \u001b[0;36mlaunch\u001b[1;34m(self, inline, inbrowser, share, validate)\u001b[0m\n\u001b[0;32m 192\u001b[0m \"\"\"\n\u001b[0;32m 193\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mvalidate\u001b[0m \u001b[1;32mand\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mvalidate_flag\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 194\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mvalidate\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 195\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 196\u001b[0m \u001b[1;31m# If an existing interface is running with this instance, close it.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;32m~\\Desktop\\gradiome\\gradio\\interface.py\u001b[0m in \u001b[0;36mvalidate\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 181\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"\\n\\nValidation passed successfully!\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 182\u001b[0m \u001b[1;32mreturn\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 183\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mRuntimeError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Validation did not pass\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 184\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 185\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0mlaunch\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0minline\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0minbrowser\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mshare\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mFalse\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvalidate\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mTrue\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[1;31mRuntimeError\u001b[0m: Validation did not pass"
|
||||
]
|
||||
}
|
||||
],
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"iface = gradio.Interface(inputs='imageupload', outputs='label', model=model, model_type='keras')\n",
|
||||
"iface.launch(share=True)"
|
||||
|
@ -9,18 +9,9 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"The autoreload extension is already loaded. To reload it, use:\n",
|
||||
" %reload_ext autoreload\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%load_ext autoreload\n",
|
||||
"%autoreload 2\n",
|
||||
@ -32,16 +23,29 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
|
||||
"Instructions for updating:\n",
|
||||
"Colocations handled automatically by placer.\n",
|
||||
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\keras\\layers\\core.py:143: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\n",
|
||||
"Instructions for updating:\n",
|
||||
"Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model = tf.keras.applications.MobileNet()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -56,52 +60,28 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Closing existing server...\n",
|
||||
"NOTE: Gradio is in beta stage, please report all bugs to: gradio.app@gmail.com\n",
|
||||
"Model is running locally at: http://localhost:7863/\n",
|
||||
"Model available publicly at: https://25024.gradio.app -- may take up to a minute to setup.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"\n",
|
||||
" <iframe\n",
|
||||
" width=\"1000\"\n",
|
||||
" height=\"500\"\n",
|
||||
" src=\"http://localhost:7863/\"\n",
|
||||
" frameborder=\"0\"\n",
|
||||
" allowfullscreen\n",
|
||||
" ></iframe>\n",
|
||||
" "
|
||||
],
|
||||
"text/plain": [
|
||||
"<IPython.lib.display.IFrame at 0x266c18d8940>"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"io.launch(inline=True, validate=False);"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3.6 (tensorflow)",
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "tensorflow"
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
@ -113,7 +93,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.7"
|
||||
"version": "3.7.1"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
@ -119,7 +119,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -129,7 +129,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -139,9 +139,52 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"No validation samples for this interface... skipping validation.\n",
|
||||
"NOTE: Gradio is in beta stage, please report all bugs to: gradio.app@gmail.com\n",
|
||||
"Model is running locally at: http://localhost:7862/\n",
|
||||
"Model available publicly at: https://52991.gradio.app\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"\n",
|
||||
" <iframe\n",
|
||||
" width=\"1000\"\n",
|
||||
" height=\"500\"\n",
|
||||
" src=\"http://localhost:7862/\"\n",
|
||||
" frameborder=\"0\"\n",
|
||||
" allowfullscreen\n",
|
||||
" ></iframe>\n",
|
||||
" "
|
||||
],
|
||||
"text/plain": [
|
||||
"<IPython.lib.display.IFrame at 0x1b59c46c6d8>"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"(<gradio.networking.serve_files_in_background.<locals>.HTTPServer at 0x1b59a2a8d30>,\n",
|
||||
" 'http://localhost:7862/',\n",
|
||||
" 'https://52991.gradio.app')"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"io.launch()"
|
||||
]
|
||||
|
@ -45,6 +45,7 @@ class Interface:
|
||||
preprocessing_fns=None,
|
||||
postprocessing_fns=None,
|
||||
verbose=True,
|
||||
saliency=None,
|
||||
):
|
||||
"""
|
||||
:param inputs: a string or `AbstractInput` representing the input interface.
|
||||
@ -54,6 +55,8 @@ class Interface:
|
||||
provided.
|
||||
:param preprocessing_fns: an optional function that overrides the preprocessing function of the input interface.
|
||||
:param postprocessing_fns: an optional function that overrides the postprocessing fn of the output interface.
|
||||
:param saliency: an optional function that takes the model and the processed input and returns a 2-d array
|
||||
|
||||
"""
|
||||
if isinstance(inputs, str):
|
||||
self.input_interface = gradio.inputs.registry[inputs.lower()](
|
||||
@ -94,6 +97,7 @@ class Interface:
|
||||
self.validate_flag = False
|
||||
self.simple_server = None
|
||||
self.hash = random.getrandbits(32)
|
||||
self.saliency = saliency
|
||||
|
||||
@staticmethod
|
||||
def _infer_model_type(model):
|
||||
|
@ -171,8 +171,11 @@ def serve_files_in_background(interface, port, directory_to_serve=None):
|
||||
processed_input = interface.input_interface.preprocess(msg["data"])
|
||||
prediction = interface.predict(processed_input)
|
||||
processed_output = interface.output_interface.postprocess(prediction)
|
||||
output = {"action": "output", "data": processed_output}
|
||||
|
||||
if interface.saliency is not None:
|
||||
saliency = interface.saliency(interface.model_obj, processed_input)
|
||||
output = {"action": "output", "data": processed_output, "saliency": saliency.tolist()}
|
||||
else:
|
||||
output = {"action": "output", "data": processed_output}
|
||||
# Prepare return json dictionary.
|
||||
self.wfile.write(json.dumps(output).encode())
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user