File size: 3,231 Bytes
de5d292
 
 
6583354
 
 
 
 
 
 
de5d292
 
 
 
 
 
771d736
de5d292
bc5f574
de5d292
 
 
6583354
0f5dc43
 
 
6583354
 
 
99cb6bd
6583354
0f5dc43
 
 
 
 
 
 
 
 
 
 
787cf57
6583354
0f5dc43
 
 
 
 
 
 
de5d292
 
0f5dc43
de5d292
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import streamlit as st
from transformers import pipeline

st.set_page_config(page_title="Automated Question Answering System")
st.title("Automated Question Answering System")
st.subheader("Try")
# """
# [![](https://img.shields.io/github/followers/OOlajide?label=OOlajide&style=social)](https://gitHub.com/OOlajide)
# [![](https://img.shields.io/twitter/follow/sageOlamide?label=@sageOlamide&style=social)](https://twitter.com/sageOlamide)
# """
# expander = st.sidebar.expander("About")
# expander.write("This web app allows you to perform common Natural Language Processing tasks, select a task below to get started.")

# st.sidebar.header("What will you like to do?")
# option = st.sidebar.radio("", ["Text summarization", "Extractive question answering", "Text generation"])

@st.cache(show_spinner=False, allow_output_mutation=True)
def question_model():
    model_name = "kxx-kkk/FYP_deberta-v3-base-squad2_mrqa"
    question_answerer = pipeline(model=model_name, tokenizer=model_name, task="question-answering")
    return question_answerer

st.markdown("<h2 style='text-align: center; color:grey;'>Question Answering on Academic Essays</h2>", unsafe_allow_html=True)
st.markdown("<h3 style='text-align: left; color:#F63366; font-size:18px;'><b>What is extractive question answering about?<b></h3>", unsafe_allow_html=True)
st.write("Extractive question answering is a Natural Language Processing task where text is provided for a model so that the model can refer to it and make predictions about where the answer to a question is.")
st.markdown('___')

source = st.radio("How would you upload the essay? Choose an option below", ["I want to input some text", "I want to upload a file"])

sample_question = "What is NLP?"

if source == "I want to input some text":
    with open("sample.txt", "r") as text_file:
        sample_text = text_file.read()
    context = st.text_area("Use the example below or input your own text in English (10,000 characters max)", value=sample_text, max_chars=10000, height=330)
    question = st.text_input(label="Use the question below or enter your own question", value=sample_question)
    button = st.button("Get answer")
    if button:
        with st.spinner(text="Loading question model..."):
            question_answerer = question_model()
        with st.spinner(text="Getting answer..."):
            answer = question_answerer(context=context, question=question)
            answer = answer["answer"]

            st.text(answer)
elif source == "I want to upload a file":
    uploaded_file = st.file_uploader("Choose a .txt file to upload", type=["txt"])
    if uploaded_file is not None:
        raw_text = str(uploaded_file.read(),"utf-8")
        context = st.text_area("", value=raw_text, height=330)
        question = st.text_input(label="Enter your question", value=sample_question)
        button = st.button("Get answer")
        if button:
            with st.spinner(text="Loading summarization model..."):
                question_answerer = question_model()
            with st.spinner(text="Getting answer..."):
                answer = question_answerer(context=context, question=question)
                answer = answer["answer"]
                st.text(answer)