LLM_local / app.py
Omer Danziger
add chatbot
bb96974
from LLM import LLM
import streamlit as st
def format_chat_history(chat_history):
formatted_history = ""
for chat in chat_history:
formatted_history += f"{chat[0]}: {chat[1]}\n"
return formatted_history
def main():
st.title("LLM Chat")
model = "gpt2"
llm = LLM(model)
chat_history = []
context = "You are an helpfully assistant in a school. You are helping a student with his homework."
chat = llm.get_chat(context=context)
user_input = st.text_input("User:")
button = st.button("Send")
chat_area = st.empty()
while True:
print(user_input)
if button:
if user_input:
chat_history.append(("User", user_input))
bot_response = chat.answerStoreHistory(qn=user_input)
chat_history.append(("Bot", bot_response))
print(chat_history)
chat_area.text(format_chat_history(chat_history))
if __name__ == "__main__":
main()
# model = st.text_input("model name: ")
#
# while model == "":
# time.sleep(0.1)
#
# # model = "mosaicml/mpt-7b-chat"
#
#
# st.write("Model name: ", model)
# st.write("Loading model...")
#
# llm = LLM(model)
# chat = llm.get_chat(context=context)
# while True:
# qn = input("Question: ")
# if qn == "exit":
# break
# chat.answerStoreHistory(qn=qn)