Wootang01 commited on
Commit
d4b5af9
1 Parent(s): a6945bf

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -0
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import transformers
2
+ import sentencepiece
3
+ import ipython-autotime
4
+
5
+ from transformers import T5ForConditionalGeneration,T5Tokenizer
6
+ question_model = T5ForConditionalGeneration.from_pretrained('ramsrigouthamg/t5_squad_v1')
7
+ question_tokenizer = T5Tokenizer.from_pretrained('ramsrigouthamg/t5_squad_v1')
8
+
9
+ def get_question(sentence,answer,mdl,tknizer):
10
+ text = "context: {} answer: {}".format(sentence,answer)
11
+ print (text)
12
+ max_len = 256
13
+ encoding = tknizer.encode_plus(text,max_length=max_len, pad_to_max_length=False,truncation=True, return_tensors="pt")
14
+
15
+ input_ids, attention_mask = encoding["input_ids"], encoding["attention_mask"]
16
+
17
+ outs = mdl.generate(input_ids=input_ids,
18
+ attention_mask=attention_mask,
19
+ early_stopping=True,
20
+ num_beams=5,
21
+ num_return_sequences=1,
22
+ no_repeat_ngram_size=2,
23
+ max_length=300)
24
+
25
+
26
+ dec = [tknizer.decode(ids,skip_special_tokens=True) for ids in outs]
27
+
28
+
29
+ Question = dec[0].replace("question:","")
30
+ Question= Question.strip()
31
+ return Question
32
+
33
+
34
+ context = "Elon Musk said that Tesla will not accept payments in Bitcoin because of environmental concerns."
35
+ answer = "Elon Musk"
36
+
37
+ ques = get_question(context,answer,question_model,question_tokenizer)
38
+ print ("question: ",ques)
39
+
40
+ import gradio as gr
41
+
42
+ context = gr.inputs.Textbox(lines=5, placeholder="Enter paragraph/context here...")
43
+ answer = gr.inputs.Textbox(lines=3, placeholder="Enter answer/keyword here...")
44
+ question = gr.outputs.Textbox( type="auto", label="Question")
45
+
46
+ def generate_question(context,answer):
47
+ return get_question(context,answer,question_model,question_tokenizer)
48
+
49
+ iface = gr.Interface(
50
+ fn=generate_question,
51
+ inputs=[context,answer],
52
+ outputs=question)
53
+ iface.launch(debug=False)