import gradio as gr from transformers import BartForSequenceClassification, BartTokenizer # model = pipeline("text-generation") # following https://joeddav.github.io/blog/2020/05/29/ZSL.html tokenizer_bart = BartTokenizer.from_pretrained('facebook/bart-large-mnli') model_bart_sq = BartForSequenceClassification.from_pretrained('facebook/bart-large-mnli') def zs(premise,hypothesis): input_ids = tokenizer_bart.encode(premise, hypothesis, return_tensors='pt') logits = model_bart_sq(input_ids)[0] entail_contradiction_logits = logits[:,[0,1,2]] probs = entail_contradiction_logits.softmax(dim=1) contra_prob = round(probs[:,0].item() * 100,2) neut_prob = round(probs[:,1].item() * 100,2) entail_prob = round(probs[:,2].item() * 100,2) return contra_prob, neut_prob, entail_prob # gr.Interface(fn=zs, inputs=["text", "text"], outputs=["text","text","text"]).launch() with gr.Blocks() as demo: with gr.Row(): premise = gr.Textbox(label="Premise") hypothesis = gr.Textbox(label="Hypothesis") with gr.Row(): greet_btn = gr.Button("Compute") with gr.Row(): entailment = gr.Textbox(label="Entailment Probability") contradiction = gr.Textbox(label="Contradiction Probability") neutral = gr.Textbox(label="Neutral Probability") greet_btn.click(fn=zs, inputs=[premise,hypothesis], outputs=[contradiction,neutral,entailment]) demo.launch()