Spaces:
Runtime error
Runtime error
import torch | |
import gradio as gr | |
from transformers import BioGptTokenizer, BioGptForCausalLM | |
model_names = [ | |
"BioGPT", | |
"BioGPT-Large", | |
"BioGPT-QA-PubMedQA-BioGPT", | |
"BioGPT-QA-PubMEDQA-BioGPT-Large", | |
"BioGPT-RE-BC5CDR", | |
"BioGPT-RE-DDI", | |
"BioGPT-RE-DTI", | |
"BioGPT-DC-HoC" | |
] | |
def load_model(model_name): | |
model_name_map = { | |
"BioGPT":"microsoft/biogpt", | |
"BioGPT-QA-PubMedQA-BioGPT":"microsoft/BioGPT-Large-PubMedQA" | |
} | |
tokenizer = BioGptTokenizer.from_pretrained(model_name_map[model_name]) | |
model = BioGptForCausalLM.from_pretrained(model_name_map[model_name]) | |
return tokenizer, model | |
def get_beam_output(sentence, selected_model, min_len,max_len, n_beams): | |
tokenizer, model = load_model(selected_model) | |
inputs = tokenizer(sentence, return_tensors="pt") | |
with torch.no_grad(): | |
beam_output = model.generate(**inputs, | |
min_length=100, | |
max_length=1024, | |
num_beams=n_beams, | |
early_stopping=True | |
) | |
output=tokenizer.decode(beam_output[0], skip_special_tokens=True) | |
return output | |
inputs = [ | |
gr.inputs.Textbox(label="prompt", lines=5, default="Bicalutamide"), | |
gr.Dropdown(model_names, value="microsoft/biogpt", label="selected_model"), | |
gr.inputs.Slider(1, 500, 1, default=100, label="min_len"), | |
gr.inputs.Slider(1, 2048, 1, default=1024, label="max_len"), | |
gr.inputs.Slider(1, 10, 1, default=5, label="num_beams") | |
] | |
outputs = gr.outputs.Textbox(label="output") | |
iface = gr.Interface( | |
fn=get_beam_output, | |
inputs=inputs, | |
outputs=outputs, | |
examples=[["Bicalutamide"], ["Janus kinase 3 (JAK-3)"], ["Apricitabine"], ["Xylazine"], ["Psoralen"], ["CP-673451"]] | |
) | |
iface.launch(debug=True, enable_queue=True) |