run480 commited on
Commit
f6f725c
1 Parent(s): 3b5fa4f

Create app.py

Browse files

Use the huggingface Transformers library for different tasks starting with a question and answering task.

Files changed (1) hide show
  1. app.py +16 -0
app.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
2
+ import gradio as grad
3
+ import ast
4
+
5
+ # First, the RoBERTa base model is used, fine-tuned using the SQuAD 2.0 dataset.
6
+ # It’s been trained on question-answer pairs, including unanswerable questions, for the task of question and answering.
7
+ mdl_name = "deepset/roberta-base-squad2"
8
+ my_pipeline = pipeline('question-answering', model=mdl_name, tokenizer=mdl_name)
9
+
10
+ def answer_question(question,context):
11
+ text= "{"+"'question': '"+question+"','context': '"+context+"'}"
12
+ di=ast.literal_eval(text)
13
+ response = my_pipeline(di)
14
+ return response
15
+
16
+ grad.Interface(answer_question, inputs=["text","text"], outputs="text").launch()