added ability to run multiple interfaces without restarting

This commit is contained in:
Abubakar Abid 2019-02-19 00:11:46 -08:00
parent 8795282770
commit 4ef52d8717
8 changed files with 194 additions and 238 deletions

140
.idea/workspace.xml generated
View File

@ -3,11 +3,13 @@
<component name="ChangeListManager">
<list default="true" id="fd73cd66-e80f-470e-a2ec-e220d3b6b864" name="Default Changelist" comment="">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Sentiment Analysis (textbox to class) .ipynb" beforeDir="false" afterPath="$PROJECT_DIR$/Sentiment Analysis (textbox to class) .ipynb" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Test Notebook.ipynb" beforeDir="false" afterPath="$PROJECT_DIR$/Test Notebook.ipynb" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Usage.ipynb" beforeDir="false" afterPath="$PROJECT_DIR$/Usage.ipynb" afterDir="false" />
<change beforePath="$PROJECT_DIR$/gradio.py" beforeDir="false" afterPath="$PROJECT_DIR$/gradio.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/js/all-io.js" beforeDir="false" afterPath="$PROJECT_DIR$/js/all-io.js" afterDir="false" />
<change beforePath="$PROJECT_DIR$/networking.py" beforeDir="false" afterPath="$PROJECT_DIR$/networking.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/outputs.py" beforeDir="false" afterPath="$PROJECT_DIR$/outputs.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/templates/tmp_html.html" beforeDir="false" afterPath="$PROJECT_DIR$/templates/tmp_html.html" afterDir="false" />
</list>
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="SHOW_DIALOG" value="false" />
@ -80,7 +82,7 @@
<entry key="html" value="3" />
<entry key="ipynb" value="1" />
<entry key="js" value="5" />
<entry key="py" value="9" />
<entry key="py" value="11" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.types.open">
@ -90,7 +92,7 @@
<entry key="IPNB" value="1" />
<entry key="JavaScript" value="5" />
<entry key="PLAIN_TEXT" value="1" />
<entry key="Python" value="9" />
<entry key="Python" value="11" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.extensions.edit">
@ -99,17 +101,17 @@
<entry key="dummy" value="10" />
<entry key="gitignore" value="2" />
<entry key="html" value="112" />
<entry key="js" value="240" />
<entry key="py" value="3205" />
<entry key="js" value="272" />
<entry key="py" value="4059" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.types.edit">
<counts>
<entry key="CSS" value="11" />
<entry key="HTML" value="112" />
<entry key="JavaScript" value="240" />
<entry key="JavaScript" value="272" />
<entry key="PLAIN_TEXT" value="12" />
<entry key="Python" value="3205" />
<entry key="Python" value="4059" />
</counts>
</usages-collector>
</session>
@ -120,7 +122,7 @@
<entry file="file://$PROJECT_DIR$/css/draw-a-digit.css">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="1225">
<caret line="49" column="15" lean-forward="true" selection-start-line="49" selection-start-column="15" selection-end-line="49" selection-end-column="15" />
<caret line="75" column="15" selection-start-line="75" selection-start-column="15" selection-end-line="75" selection-end-column="15" />
</state>
</provider>
</entry>
@ -137,7 +139,7 @@
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/preprocessing_utils.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="-264">
<state relative-caret-position="336">
<caret line="16" column="31" selection-start-line="16" selection-start-column="31" selection-end-line="16" selection-end-column="31" />
</state>
</provider>
@ -146,11 +148,11 @@
<file pinned="false" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/gradio.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="761">
<caret line="119" lean-forward="true" selection-start-line="119" selection-end-line="119" />
<state relative-caret-position="422">
<caret line="121" column="23" selection-start-line="121" selection-start-column="23" selection-end-line="121" selection-end-column="23" />
<folding>
<element signature="e#0#14#0" expanded="true" />
<marker date="1550552437371" expanded="true" signature="1885:2171" ph="..." />
<marker date="1550563754843" expanded="true" signature="1905:2502" ph="..." />
</folding>
</state>
</provider>
@ -158,9 +160,44 @@
</file>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/js/all-io.js">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="100">
<caret line="4" column="34" selection-start-line="4" selection-start-column="34" selection-end-line="4" selection-end-column="34" />
</state>
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/networking.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="486">
<caret line="39" selection-start-line="32" selection-end-line="39" />
<folding>
<element signature="e#0#17#0" expanded="true" />
<marker date="1550562739002" expanded="true" signature="754:937" ph="..." />
<marker date="1550562739002" expanded="true" signature="2659:2661" ph="..." />
</folding>
</state>
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/inputs.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="50">
<caret line="2" selection-start-line="2" selection-end-line="2" />
<caret line="2" column="21" selection-start-line="2" selection-start-column="21" selection-end-line="2" selection-end-column="21" />
</state>
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/outputs.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="1350">
<caret line="60" selection-start-line="60" selection-end-line="60" />
<folding>
<element signature="e#0#35#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
@ -174,18 +211,6 @@
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/networking.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="575">
<caret line="23" lean-forward="true" selection-start-line="23" selection-end-line="23" />
<folding>
<element signature="e#0#17#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
</file>
</leaf>
</component>
<component name="FileTemplateManagerImpl">
@ -197,8 +222,6 @@
</component>
<component name="FindInProjectRecents">
<findStrings>
<find>width</find>
<find>submit</find>
<find>clear</find>
<find>ctx</find>
<find>clear-b</find>
@ -227,6 +250,8 @@
<find>ws</find>
<find>&lt;em</find>
<find>6002</find>
<find>INITIAL_WEBSOCKET_PORT</find>
<find>print(</find>
</findStrings>
<replaceStrings>
<replace>400</replace>
@ -264,9 +289,9 @@
<option value="$PROJECT_DIR$/preprocessing_utils.py" />
<option value="$PROJECT_DIR$/.gitignore" />
<option value="$PROJECT_DIR$/inputs.py" />
<option value="$PROJECT_DIR$/outputs.py" />
<option value="$PROJECT_DIR$/templates/all_io.html" />
<option value="$PROJECT_DIR$/css/draw-a-digit.css" />
<option value="$PROJECT_DIR$/outputs.py" />
<option value="$PROJECT_DIR$/networking.py" />
<option value="$PROJECT_DIR$/js/all-io.js" />
<option value="$PROJECT_DIR$/gradio.py" />
@ -461,20 +486,6 @@
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/outputs.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="250">
<caret line="10" column="10" selection-start-line="10" selection-start-column="10" selection-end-line="10" selection-end-column="10" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/inputs.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="50">
<caret line="2" column="21" lean-forward="true" selection-start-line="2" selection-start-column="21" selection-end-line="2" selection-end-column="21" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/css/index.css">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="700">
@ -524,24 +535,21 @@
<entry file="file://$PROJECT_DIR$/css/draw-a-digit.css">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="1225">
<caret line="49" column="15" lean-forward="true" selection-start-line="49" selection-start-column="15" selection-end-line="49" selection-end-column="15" />
<caret line="75" column="15" selection-start-line="75" selection-start-column="15" selection-end-line="75" selection-end-column="15" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/preprocessing_utils.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="-264">
<state relative-caret-position="336">
<caret line="16" column="31" selection-start-line="16" selection-start-column="31" selection-end-line="16" selection-end-column="31" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/networking.py">
<entry file="file://$PROJECT_DIR$/inputs.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="575">
<caret line="23" lean-forward="true" selection-start-line="23" selection-end-line="23" />
<folding>
<element signature="e#0#17#0" expanded="true" />
</folding>
<state relative-caret-position="50">
<caret line="2" column="21" selection-start-line="2" selection-start-column="21" selection-end-line="2" selection-end-column="21" />
</state>
</provider>
</entry>
@ -552,20 +560,42 @@
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/outputs.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="1350">
<caret line="60" selection-start-line="60" selection-end-line="60" />
<folding>
<element signature="e#0#35#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/networking.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="486">
<caret line="39" selection-start-line="32" selection-end-line="39" />
<folding>
<element signature="e#0#17#0" expanded="true" />
<marker date="1550562739002" expanded="true" signature="754:937" ph="..." />
<marker date="1550562739002" expanded="true" signature="2659:2661" ph="..." />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/js/all-io.js">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="50">
<caret line="2" selection-start-line="2" selection-end-line="2" />
<state relative-caret-position="100">
<caret line="4" column="34" selection-start-line="4" selection-start-column="34" selection-end-line="4" selection-end-column="34" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/gradio.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="761">
<caret line="119" lean-forward="true" selection-start-line="119" selection-end-line="119" />
<state relative-caret-position="422">
<caret line="121" column="23" selection-start-line="121" selection-start-column="23" selection-end-line="121" selection-end-column="23" />
<folding>
<element signature="e#0#14#0" expanded="true" />
<marker date="1550552437371" expanded="true" signature="1885:2171" ph="..." />
<marker date="1550563754843" expanded="true" signature="1905:2502" ph="..." />
</folding>
</state>
</provider>

View File

@ -30,62 +30,15 @@
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Colocations handled automatically by placer.\n",
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Use tf.cast instead.\n"
]
}
],
"outputs": [],
"source": [
"model = load_model('models/sentiment.h5')"
"model = load_model('.models/sentiment.h5')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[11, 6, 324]\n"
]
}
],
"source": [
"word2id = imdb.get_word_index()\n",
"text = \"this is nice\"\n",
"text = [word2id.get(i, ' ') for i in text.split(\" \")]\n",
"print(text)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def preprocessing(text): \n",
" word2id = imdb.get_word_index()\n",
" text = [word2id.get(i, ' ') for i in text.split(\" \")] \n",
" max_words = 500\n",
" text = sequence.pad_sequences(text, maxlen=max_words)\n",
" return text"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [],
"source": [
"from keras.preprocessing.text import one_hot\n",
@ -93,55 +46,28 @@
"from keras.preprocessing import sequence\n",
"\n",
"def preprocessing(text): \n",
" words = set(text_to_word_sequence(text))\n",
"# word2id = imdb.get_word_index()\n",
"# text = [word2id.get(i,' ') for i in text.split(\" \")] \n",
"\n",
"# print(text)\n",
" print(words)\n",
" vocab_size = len(words)\n",
" result = one_hot(words, round(vocab_size*1.3))\n",
" print(result)\n",
" max_words = 500\n",
" text = sequence.pad_sequences(text, maxlen=max_words)\n",
" return text"
" word2id = imdb.get_word_index()\n",
" integers = [word2id.get(i,None) for i in text.split(\" \") if word2id.get(i,None)] \n",
" padded = np.array(integers + [0]*(500-len(integers))).reshape(1,-1)\n",
" return padded"
]
},
{
"cell_type": "code",
"execution_count": 28,
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'much', 'beautiful', 'impressed', 'i', 'really', 'so', 'wow', 'is', 'this', \"i'm\", 'love'}\n"
]
},
{
"ename": "AttributeError",
"evalue": "'set' object has no attribute 'lower'",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m<ipython-input-28-c1c7236137f3>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mpreprocessing\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Wow, this is really beautiful! I love this so much. I'm impressed.\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[1;32m<ipython-input-27-0d15108689eb>\u001b[0m in \u001b[0;36mpreprocessing\u001b[1;34m(text)\u001b[0m\n\u001b[0;32m 11\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mwords\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[0mvocab_size\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mwords\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 13\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mone_hot\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mwords\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mround\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mvocab_size\u001b[0m\u001b[1;33m*\u001b[0m\u001b[1;36m1.3\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 14\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mresult\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[0mmax_words\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;36m500\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras_preprocessing\\text.py\u001b[0m in \u001b[0;36mone_hot\u001b[1;34m(text, n, filters, lower, split)\u001b[0m\n\u001b[0;32m 88\u001b[0m \u001b[0mfilters\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfilters\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 89\u001b[0m \u001b[0mlower\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mlower\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 90\u001b[1;33m split=split)\n\u001b[0m\u001b[0;32m 91\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 92\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras_preprocessing\\text.py\u001b[0m in \u001b[0;36mhashing_trick\u001b[1;34m(text, n, hash_function, filters, lower, split)\u001b[0m\n\u001b[0;32m 133\u001b[0m \u001b[0mfilters\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfilters\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 134\u001b[0m \u001b[0mlower\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mlower\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 135\u001b[1;33m split=split)\n\u001b[0m\u001b[0;32m 136\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mhash_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mw\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m%\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mn\u001b[0m \u001b[1;33m-\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mw\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mseq\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 137\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras_preprocessing\\text.py\u001b[0m in \u001b[0;36mtext_to_word_sequence\u001b[1;34m(text, filters, lower, split)\u001b[0m\n\u001b[0;32m 41\u001b[0m \"\"\"\n\u001b[0;32m 42\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mlower\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 43\u001b[1;33m \u001b[0mtext\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mlower\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 44\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 45\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0msys\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mversion_info\u001b[0m \u001b[1;33m<\u001b[0m \u001b[1;33m(\u001b[0m\u001b[1;36m3\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31mAttributeError\u001b[0m: 'set' object has no attribute 'lower'"
]
}
],
"outputs": [],
"source": [
"preprocessing(\"Wow, this is really beautiful! I love this so much. I'm impressed.\")"
"def postprocessing(prediction):\n",
" if prediction[0] > 0.5:\n",
" return \"Happy\"\n",
" else:\n",
" return \"Sad\""
]
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 5,
"metadata": {},
"outputs": [
{
@ -149,42 +75,24 @@
"output_type": "stream",
"text": [
"Model available locally at: http://localhost:7860/templates/tmp_html.html\n",
"Model available publicly for 8 hours at: http://b424b6a9.ngrok.io/templates/tmp_html.html\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Error in connection handler\n",
"Traceback (most recent call last):\n",
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\websockets\\server.py\", line 169, in handler\n",
" yield from self.ws_handler(self, path)\n",
" File \"C:\\Users\\ALI\\Desktop\\gradiome\\gradio.py\", line 73, in communicate\n",
" prediction = self.predict(processed_input)\n",
" File \"C:\\Users\\ALI\\Desktop\\gradiome\\gradio.py\", line 57, in predict\n",
" return self.model_obj.predict(array)[0].argmax()\n",
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\", line 1149, in predict\n",
" x, _, _ = self._standardize_user_data(x)\n",
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\", line 751, in _standardize_user_data\n",
" exception_prefix='input')\n",
" File \"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\training_utils.py\", line 138, in standardize_input_data\n",
" str(data_shape))\n",
"ValueError: Error when checking input: expected embedding_1_input to have shape (500,) but got array with shape (1,)\n"
"http://30379407.ngrok.io\n",
"Model available publicly for 8 hours at: http://f3cf0e10.ngrok.io/templates/tmp_html.html\n",
"-- Gradio is in beta stage --\n",
"Please report all bugs to: a12d@stanford.edu\n",
"If you'd like to launch another gradio instance, please restart your notebook/python kernel.\n"
]
}
],
"source": [
"iface = gradio.Interface(input='textbox', output='class', model=model, model_type='keras',preprocessing_fn=preprocessing)\n",
"iface.launch()"
"gradio.Interface(input='textbox', output='class', model=model, model_type='keras', preprocessing_fn=preprocessing, postprocessing_fn=postprocessing).launch()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3.6 (tensorflow)",
"language": "python",
"name": "python3"
"name": "tensorflow"
},
"language_info": {
"codemirror_mode": {
@ -196,7 +104,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.1"
"version": "3.6.7"
}
},
"nbformat": 4,

View File

@ -18,6 +18,11 @@
"import sklearn\n",
"import gradio\n",
"from keras.models import load_model\n",
"import sys\n",
"import warnings\n",
"\n",
"if not sys.warnoptions:\n",
" warnings.simplefilter(\"ignore\")\n",
"\n",
"%load_ext autoreload\n",
"%autoreload 2"
@ -30,47 +35,22 @@
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\framework\\op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Colocations handled automatically by placer.\n",
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py:3445: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\n",
"WARNING:tensorflow:From C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\ops\\math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"Use tf.cast instead.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\ALI\\Anaconda3\\lib\\site-packages\\keras\\engine\\saving.py:327: UserWarning: Error in loading the saved optimizer state. As a result, your model is starting with a freshly initialized optimizer.\n",
"C:\\Users\\islam\\Anaconda3\\envs\\tensorflow\\lib\\site-packages\\keras\\engine\\saving.py:327: UserWarning: Error in loading the saved optimizer state. As a result, your model is starting with a freshly initialized optimizer.\n",
" warnings.warn('Error in loading the saved optimizer '\n"
]
}
],
"source": [
"model = load_model('models/mnist-cnn.h5')"
"model = load_model('.models/mnist-cnn.h5')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def text_function(text): \n",
" return text.upper()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"scrolled": true
},
@ -79,8 +59,9 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Model available locally at: http://localhost:7860/templates/tmp_html.html\n",
"Model available publicly for 8 hours at: https://b424b6a9.ngrok.io/templates/tmp_html.html\n"
"Model available locally at: http://localhost:7868/templates/tmp_html.html\n",
"Model available publicly for 8 hours at: https://43b48cd0.ngrok.io/templates/tmp_html.html\n",
"Note: Gradio is in beta stage, please report all bugs to: a12d@stanford.edu\n"
]
}
],
@ -88,27 +69,13 @@
"iface = gradio.Interface(input='sketchpad', output='class', model=model, model_type='keras')\n",
"iface.launch()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3.6 (tensorflow)",
"language": "python",
"name": "python3"
"name": "tensorflow"
},
"language_info": {
"codemirror_mode": {
@ -120,7 +87,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.1"
"version": "3.6.7"
}
},
"nbformat": 4,

View File

@ -11,6 +11,7 @@ nest_asyncio.apply()
LOCALHOST_IP = '127.0.0.1'
INITIAL_WEBSOCKET_PORT = 9200
TRY_NUM_PORTS = 100
class Interface():
@ -59,11 +60,20 @@ class Interface():
for line in lines:
fout.write(line)
def _set_socket_port_in_js(self, socket_port):
with open('js/all-io.js') as fin:
lines = fin.readlines()
lines[1] = 'var SOCKET_PORT = {}\n'.format(socket_port)
with open('js/all-io.js', 'w') as fout:
for line in lines:
fout.write(line)
def predict(self, array):
if self.model_type=='sklearn':
return self.model_obj.predict(array)[0]
return self.model_obj.predict(array)
elif self.model_type=='keras':
return self.model_obj.predict(array)[0].argmax()
return self.model_obj.predict(array)
elif self.model_type=='func':
return self.model_obj(array)
else:
@ -89,27 +99,29 @@ class Interface():
"""
Standard method shared by interfaces that launches a websocket at a specified IP address.
"""
networking.kill_processes([4040, 4041])
server_port = networking.start_simple_server()
path_to_server = 'http://localhost:{}/'.format(server_port)
path_to_template = self._build_template()
try:
start_server = websockets.serve(self.communicate, LOCALHOST_IP, INITIAL_WEBSOCKET_PORT)
except OSError:
print("Error: port 9200 is already taken. Please close the process running on 9200 "
"and try running gradio again.") # TODO(abidlabs): increment port number until free port is found
ports_in_use = networking.get_ports_in_use()
for i in range(TRY_NUM_PORTS):
if not ((INITIAL_WEBSOCKET_PORT + i) in ports_in_use):
break
else:
raise OSError("All ports from {} to {} are in use. Please close a port.".format(
INITIAL_WEBSOCKET_PORT, INITIAL_WEBSOCKET_PORT + TRY_NUM_PORTS))
print("Model available locally at: {}".format(path_to_server + path_to_template))
start_server = websockets.serve(self.communicate, LOCALHOST_IP, INITIAL_WEBSOCKET_PORT + i)
self._set_socket_port_in_js(INITIAL_WEBSOCKET_PORT + i)
if share_link:
site_ngrok_url = networking.setup_ngrok(server_port)
socket_ngrok_url = networking.setup_ngrok(INITIAL_WEBSOCKET_PORT, api_url=networking.NGROK_TUNNELS_API_URL2)
print(socket_ngrok_url)
self._set_socket_url_in_js(socket_ngrok_url)
print("NOTE: Gradio is in beta stage, please report all bugs to: a12d@stanford.edu")
print("Model available locally at: {}".format(path_to_server + path_to_template))
print("Model available publicly for 8 hours at: {}".format(site_ngrok_url + '/' + path_to_template))
print("-- Gradio is in beta stage --")
print("Please report all bugs to: a12d@stanford.edu")
print("If you'd like to launch another gradio instance, please restart your notebook/python kernel.")
asyncio.get_event_loop().run_until_complete(start_server)
try:
asyncio.get_event_loop().run_forever()

View File

@ -1,11 +1,12 @@
var NGROK_URL = "wss://40d65b0a.ngrok.io"
var NGROK_URL = "ws://b2208ab1.ngrok.io"
var SOCKET_PORT = 9201
try {
var origin = window.location.origin;
if (origin.includes("ngrok")){
var ws = new WebSocket(NGROK_URL)
} else {
var ws = new WebSocket("ws://127.0.0.1:9200/")
var ws = new WebSocket("ws://127.0.0.1:" + SOCKET_PORT + "/")
}
ws.onerror = function(evt) {
console.log(evt)

View File

@ -4,8 +4,11 @@ import zipfile
import io
import sys
import os
from psutil import process_iter, AccessDenied
from signal import SIGTERM # or SIGKILL
INITIAL_PORT_VALUE = 7860
TRY_NUM_PORTS = 100
LOCALHOST_PREFIX = 'localhost:'
NGROK_TUNNELS_API_URL = "http://localhost:4040/api/tunnels" # TODO(this should be captured from output)
NGROK_TUNNELS_API_URL2 = "http://localhost:4041/api/tunnels" # TODO(this should be captured from output)
@ -16,10 +19,26 @@ NGROK_ZIP_URLS = {
"win32": "https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-windows-amd64.zip",
}
def get_ports_in_use():
ports_in_use = []
for proc in process_iter():
for conns in proc.connections(kind='inet'):
ports_in_use.append(conns.laddr.port)
return ports_in_use
def start_simple_server():
# TODO(abidlabs): increment port number until free port is found
subprocess.Popen(['python', '-m', 'http.server', str(INITIAL_PORT_VALUE)])
return INITIAL_PORT_VALUE
ports_in_use = get_ports_in_use()
for i in range(TRY_NUM_PORTS):
if not((INITIAL_PORT_VALUE + i) in ports_in_use):
break
else:
raise OSError("All ports from {} to {} are in use. Please close a port.".format(
INITIAL_PORT_VALUE, INITIAL_PORT_VALUE + TRY_NUM_PORTS))
subprocess.Popen(['python', '-m', 'http.server', str(INITIAL_PORT_VALUE + i)])
return INITIAL_PORT_VALUE + i
def download_ngrok():
@ -45,3 +64,13 @@ def setup_ngrok(local_port, api_url=NGROK_TUNNELS_API_URL):
raise RuntimeError("Not able to retrieve ngrok public URL")
def kill_processes(process_ids):
for proc in process_iter():
for conns in proc.connections(kind='inet'):
if conns.laddr.port in process_ids:
try:
proc.send_signal(SIGTERM) # or SIGKILL
except AccessDenied:
print("Unable to kill process running on port {}, please kill manually.".format(conns.laddr.port))

View File

@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
import numpy as np
class AbstractOutput(ABC):
"""
@ -37,7 +37,16 @@ class Class(AbstractOutput):
def _post_process(self, prediction):
"""
"""
return prediction
if isinstance(prediction, np.ndarray):
prediction = prediction.squeeze()
if prediction.size == 1:
return prediction
else:
return prediction.argmax()
elif isinstance(prediction, str):
return prediction
else:
raise ValueError("Unable to post-process model prediction.")
class Textbox(AbstractOutput):

View File

@ -42,14 +42,14 @@
<!-- INPUT
====================================================================================================================================================== -->
<div class="col-6">
<h5>Textbox Input:</h5>
<textarea id="textbox-input"></textarea>
<h5>Sketch Pad Input: (Use your cursor to draw)</h5>
<canvas height="400" id="canvas" width="400"></canvas><br/>
<div aria-label="Basic example" class="btn-group" role="group">
<button class="btn btn-primary" id="submit-button" type="button">Submit</button>
<button class="btn btn-primary" id="submit-button" type="button">Recognize</button>
<button class="btn btn-secondary" id="clear-button" type="button">Clear</button>
</div>
</div>
<script src="../js/textbox-input.js"></script>
<script src="../js/sketchpad-input.js"></script>
<!-- OUTPUT
====================================================================================================================================================== -->