File size: 1,245 Bytes
7da8368 519b826 50ccb93 7da8368 41c82ad 7da8368 3979dfb 7da8368 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
from LLM import LLM
import streamlit as st
def format_chat_history(chat_history):
formatted_history = ""
for chat in chat_history:
formatted_history += f"{chat[0]}: {chat[1]}\n"
return formatted_history
def main():
st.title("LLM Chat")
model = "gpt2"
llm = LLM(model)
chat_history = []
context = "You are an helpfully assistant in a school. You are helping a student with his homework."
chat = llm.get_chat(context=context)
while True:
user_input = st.text_input("User:")
if st.button("Send"):
if user_input:
chat_history.append(("User", user_input))
bot_response = chat.answerStoreHistory(qn=user_input)
chat_history.append(("Bot", bot_response))
st.text_area("Chat History:", value=format_chat_history(chat_history), readonly=True)
# model = st.text_input("model name: ")
#
# while model == "":
# time.sleep(0.1)
#
# # model = "mosaicml/mpt-7b-chat"
#
#
# st.write("Model name: ", model)
# st.write("Loading model...")
#
# llm = LLM(model)
# chat = llm.get_chat(context=context)
# while True:
# qn = input("Question: ")
# if qn == "exit":
# break
# chat.answerStoreHistory(qn=qn) |