Merge branch 'abidlabs/external' of https://github.com/gradio-app/gradio into abidlabs/external

This commit is contained in:
dawoodkhan82 2021-05-17 14:57:31 -04:00
commit b2f8aa39cf
4 changed files with 67 additions and 38 deletions

View File

@ -5,6 +5,7 @@ gradio/__init__.py
gradio/component.py
gradio/embeddings.py
gradio/encryptor.py
gradio/external.py
gradio/inputs.py
gradio/interface.py
gradio/interpretation.py
@ -14,6 +15,7 @@ gradio/outputs.py
gradio/processing_utils.py
gradio/strings.py
gradio/test_data.py
gradio/transforms.py
gradio/tunneling.py
gradio/utils.py
gradio.egg-info/PKG-INFO

View File

@ -128,31 +128,50 @@ def get_huggingface_interface(model_name, api_key, alias):
return interface_info
def get_gradio_interface(model_name, api_key, alias):
api_url = "http://4553.gradiohub.com/api/predict/" #TODO(dawood): fetch based on model name
pipeline = { #TODO(dawood): load from the config file
'inputs': inputs.Textbox(label="Input"),
'outputs': outputs.Textbox(label="Question"),
'preprocess': lambda x: {"data": [x]},
'postprocess': lambda r: r["data"][0],
'examples': [['Hi, how are you?']]
model_info = requests.get("https://gradio.app/get_config/{}".format(model_name)).json()
config_info = json.loads(model_info["config"])
api_url = "{}/api/predict/".format(model_info["url"])
headers = {
'authority': model_info["url"],
'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="90", "Microsoft Edge";v="90"',
'accept': 'application/json, text/javascript, */*; q=0.01',
'sec-ch-ua-mobile': '?1',
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Mobile Safari/537.36 Edg/90.0.818.56',
'content-type': 'application/json; charset=UTF-8',
'origin': 'https://gradio.app',
'sec-fetch-site': 'cross-site',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://gradio.app/',
'accept-language': 'en-US,en;q=0.9',
}
def query_gradio_api(*input):
payload = pipeline['preprocess'](*input)
data = json.dumps(payload)
response = requests.request("POST", api_url, data=data)
response = requests.post(api_url, headers=headers, data=data)
result = json.loads(response.content.decode("utf-8"))
output = pipeline['postprocess'](result)
return output
query_gradio_api.__name__ = model_name
if alias is None:
query_gradio_api.__name__ = model_name
else:
query_gradio_api.__name__ = alias
pipeline = {
'inputs': [inp[0] for inp in config_info["input_interfaces"]],
'outputs': [out[0] for out in config_info["output_interfaces"]],
'preprocess': lambda x: {"data": [x]},
'postprocess': lambda r: r["data"][0],
}
interface_info = {
'fn': query_gradio_api,
'inputs': pipeline['inputs'],
'outputs': pipeline['outputs'],
'title': model_name,
# 'examples': pipeline['examples'],
}
return interface_info

View File

@ -287,6 +287,7 @@ class Interface:
if len(self.output_interfaces) == len(self.predict):
prediction = [prediction]
durations.append(duration)
predictions.extend(prediction)

View File

@ -3,34 +3,41 @@ Ways to transform interfaces to produce new interfaces
"""
from gradio.interface import Interface
def parallel(*interfaces, **options):
fns = []
outputs = []
for io in interfaces:
fns.extend(io.predict)
outputs.extend(io.output_interfaces)
return Interface(fn=fns, inputs=interfaces[0].input_interfaces, outputs=outputs,
repeat_outputs_per_model=False, **options)
def series(*interfaces, **options):
fns = [io.predict for io in interfaces]
def connected_fn(data): # actually not used.
for fn in fns:
data = fn(data)
return data
connected_fn.__name__ = " => ".join([f[0].__name__ for f in fns])
def connected_process_fn(data): # we have to include the pre/postprocessing of every interface
class Parallel(Interface):
def __init__(self, *interfaces, **options):
fns = []
outputs = []
for io in interfaces:
data = io.process(data)
return data
fns.extend(io.predict)
outputs.extend(io.output_interfaces)
super().__init__(fn=fns, inputs=interfaces[0].input_interfaces, outputs=outputs,
repeat_outputs_per_model=False, **options)
io = Interface(connected_fn, interfaces[0].input_interfaces, interfaces[-1].output_interfaces, **options)
io.process = connected_process_fn
return io
class Series(Interface):
def __init__(self, *interfaces, **options):
fns = [io.predict for io in interfaces]
def connected_fn(data): # Run each function with the appropriate preprocessing and postprocessing
data = [data] # put it in a list before it gets unraveled
for idx, io in enumerate(interfaces):
# skip preprocessing for first interface since the compound interface will include it
if idx > 0:
data = [input_interface.preprocess(data[i]) for i, input_interface in enumerate(io.input_interfaces)]
# run all of predictions sequentially
predictions = []
for predict_fn in io.predict:
prediction = predict_fn(*data)
predictions.append(prediction)
data = predictions
# skip postprocessing for final interface since the compound interface will include it
if idx < len(interfaces) - 1:
data = [output_interface.postprocess(data[i]) for i, output_interface in enumerate(io.output_interfaces)]
return data[0]
connected_fn.__name__ = " => ".join([f[0].__name__ for f in fns])
super().__init__(connected_fn, interfaces[0].input_interfaces, interfaces[-1].output_interfaces, **options)