kxx-kkk commited on
Commit
6583354
1 Parent(s): 0f5dc43

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -12
app.py CHANGED
@@ -1,13 +1,13 @@
1
  import streamlit as st
2
  from transformers import pipeline
3
 
4
- st.set_page_config(page_title="Common NLP Tasks")
5
- st.title("Common NLP Tasks")
6
- st.subheader(":point_left: Use the menu on the left to select a NLP task (click on > if closed).")
7
- """
8
- [![](https://img.shields.io/github/followers/OOlajide?label=OOlajide&style=social)](https://gitHub.com/OOlajide)
9
- [![](https://img.shields.io/twitter/follow/sageOlamide?label=@sageOlamide&style=social)](https://twitter.com/sageOlamide)
10
- """
11
  # expander = st.sidebar.expander("About")
12
  # expander.write("This web app allows you to perform common Natural Language Processing tasks, select a task below to get started.")
13
 
@@ -20,14 +20,15 @@ def question_model():
20
  question_answerer = pipeline(model=model_name, tokenizer=model_name, task="question-answering")
21
  return question_answerer
22
 
23
-
24
- # if option == "Extractive question answering":
25
- st.markdown("<h2 style='text-align: center; color:grey;'>Extractive Question Answering</h2>", unsafe_allow_html=True)
26
  st.markdown("<h3 style='text-align: left; color:#F63366; font-size:18px;'><b>What is extractive question answering about?<b></h3>", unsafe_allow_html=True)
27
  st.write("Extractive question answering is a Natural Language Processing task where text is provided for a model so that the model can refer to it and make predictions about where the answer to a question is.")
28
  st.markdown('___')
29
- source = st.radio("How would you like to start? Choose an option below", ["I want to input some text", "I want to upload a file"])
 
 
30
  sample_question = "What did the shepherd boy do to amuse himself?"
 
31
  if source == "I want to input some text":
32
  with open("sample.txt", "r") as text_file:
33
  sample_text = text_file.read()
@@ -39,7 +40,8 @@ if source == "I want to input some text":
39
  question_answerer = question_model()
40
  with st.spinner(text="Getting answer..."):
41
  answer = question_answerer(context=context, question=question)
42
- answer = answer["answer"]
 
43
  st.text(answer)
44
  elif source == "I want to upload a file":
45
  uploaded_file = st.file_uploader("Choose a .txt file to upload", type=["txt"])
 
1
  import streamlit as st
2
  from transformers import pipeline
3
 
4
+ st.set_page_config(page_title="Automated Question Answering System")
5
+ st.title("Automated Question Answering System")
6
+ st.subheader("Try")
7
+ # """
8
+ # [![](https://img.shields.io/github/followers/OOlajide?label=OOlajide&style=social)](https://gitHub.com/OOlajide)
9
+ # [![](https://img.shields.io/twitter/follow/sageOlamide?label=@sageOlamide&style=social)](https://twitter.com/sageOlamide)
10
+ # """
11
  # expander = st.sidebar.expander("About")
12
  # expander.write("This web app allows you to perform common Natural Language Processing tasks, select a task below to get started.")
13
 
 
20
  question_answerer = pipeline(model=model_name, tokenizer=model_name, task="question-answering")
21
  return question_answerer
22
 
23
+ st.markdown("<h2 style='text-align: center; color:grey;'>Question Answering on Academic Essays</h2>", unsafe_allow_html=True)
 
 
24
  st.markdown("<h3 style='text-align: left; color:#F63366; font-size:18px;'><b>What is extractive question answering about?<b></h3>", unsafe_allow_html=True)
25
  st.write("Extractive question answering is a Natural Language Processing task where text is provided for a model so that the model can refer to it and make predictions about where the answer to a question is.")
26
  st.markdown('___')
27
+
28
+ source = st.radio("How would you upload the essay? Choose an option below", ["I want to input some text", "I want to upload a file"])
29
+
30
  sample_question = "What did the shepherd boy do to amuse himself?"
31
+
32
  if source == "I want to input some text":
33
  with open("sample.txt", "r") as text_file:
34
  sample_text = text_file.read()
 
40
  question_answerer = question_model()
41
  with st.spinner(text="Getting answer..."):
42
  answer = question_answerer(context=context, question=question)
43
+ # answer = answer["answer"]
44
+
45
  st.text(answer)
46
  elif source == "I want to upload a file":
47
  uploaded_file = st.file_uploader("Choose a .txt file to upload", type=["txt"])