CRETA / app.py
Mr-Vicky-01's picture
Update app.py
e76c232 verified
raw
history blame
No virus
3.27 kB
import os
import streamlit as st
from PyPDF2 import PdfReader
from langchain import LLMChain
from streamlit_chat import message
import google.generativeai as genai
from langchain.prompts import PromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
llm = ChatGoogleGenerativeAI(model="gemini-pro",
temperature=0.8)
template = """You are a friendly chat assistant called "CRETA" having a conversation with a human and you are created by Pachaiappan an AI Specialist.
provided document:
{provided_docs}
previous_chat:
{chat_history}
Human: {human_input}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["chat_history", "human_input", "provided_docs"], template=template
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
verbose=True,
)
previous_response = ""
provided_docs = ""
def conversational_chat(query):
global previous_response, provided_docs
for i in st.session_state['history']:
if i is not None:
previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}"
for j in st.session_state["docs"]:
if j is not None:
provided_docs += j
result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs)
st.session_state['history'].append((query, result))
return result
st.title("Chat Bot:")
st.text("I am CRETA Your Friendly Assitant")
if 'history' not in st.session_state:
st.session_state['history'] = []
# Initialize messages
if 'generated' not in st.session_state:
st.session_state['generated'] = ["Hello ! Ask me anything"]
if 'past' not in st.session_state:
st.session_state['past'] = [" "]
if 'docs' not in st.session_state:
st.session_state['docs'] = []
def get_pdf_text(pdf_docs):
text = ""
for pdf in pdf_docs:
pdf_reader = PdfReader(pdf)
for page in pdf_reader.pages:
text += page.extract_text()
return text
with st.sidebar:
st.title("Add a file for CRETA memory:")
uploaded_file = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
if st.button("Submit & Process"):
with st.spinner("Processing..."):
st.session_state["docs"] += get_pdf_text(uploaded_file)
st.success("Done")
# Create containers for chat history and user input
response_container = st.container()
container = st.container()
# User input form
user_input = st.chat_input("Ask Your Questions πŸ‘‰..")
with container:
if user_input:
output = conversational_chat(user_input)
# answer = response_generator(output)
st.session_state['past'].append(user_input)
st.session_state['generated'].append(output)
# Display chat history
if st.session_state['generated']:
with response_container:
for i in range(len(st.session_state['generated'])):
if i != 0:
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
message(st.session_state["generated"][i], key=str(i), avatar_style="bottts")