GenXai / app.py
suriya7's picture
Update app.py
f8d0caa verified
raw
history blame
No virus
4.08 kB
import streamlit as st
import os
from streamlit_chat import message
from PyPDF2 import PdfReader
import google.generativeai as genai
from langchain.prompts import PromptTemplate
from langchain import LLMChain
from langchain_google_genai import ChatGoogleGenerativeAI
import nest_asyncio
from langchain.document_loaders import WebBaseLoader
nest_asyncio.apply()
os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
genai.configure(api_key=os.environ["GOOGLE_API_KEY"])
llm = ChatGoogleGenerativeAI(model="gemini-pro",
temperature=0.4)
template = """You are a friendly chatbot called "CRETA" who give clear an well having a conversation with a human and you are created by suriya an AI Enthusiastic.
provied_url_extracted_text:
{extracted_text}
provided document:
{provided_docs}
previous_chat:
{chat_history}
Human: {human_input}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["chat_history", "human_input", "provided_docs"], template=template
)
llm_chain = LLMChain(
llm=llm,
prompt=prompt,
verbose=True,
)
previous_response = ""
provided_docs = ""
def conversational_chat(query):
global previous_response, provided_docs
for i in st.session_state['history']:
if i is not None:
previous_response += f"Human: {i[0]}\n Chatbot: {i[1]}"
docs = ""
for j in st.session_state["docs"]:
if j is not None:
docs += j
ex_text = st.session_state["extracted_text"]
provided_docs = docs
result = llm_chain.predict(chat_history=previous_response, human_input=query, provided_docs=provided_docs,extracted_text=ex_text)
st.session_state['history'].append((query, result))
return result
st.title("Chat Bot:")
st.text("I am CRETA Your Friendly Assitant")
if 'history' not in st.session_state:
st.session_state['history'] = []
# Initialize messages
if 'generated' not in st.session_state:
st.session_state['generated'] = ["Hello ! Ask me anything"]
if 'past' not in st.session_state:
st.session_state['past'] = [" "]
if 'docs' not in st.session_state:
st.session_state['docs'] = []
if "extracted_text" not in st.session_state:
st.session_state["extracted_text"] = ""
def get_pdf_text(pdf_docs):
text = ""
for pdf in pdf_docs:
pdf_reader = PdfReader(pdf)
for page in pdf_reader.pages:
text += page.extract_text()
return text
def get_url_text(url_link):
website_url = url_link
loader = WebBaseLoader(website_url)
loader.requests_per_second = 1
docs = loader.aload()
extracted_text = ""
for page in docs:
extracted_text+=page.page_content
return extracted_text
with st.sidebar:
st.title("Add a file for CRETA memory:")
uploaded_file = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
uploaded_url = st.text_area("please upload an url..")
if st.button("Submit & Process"):
if uploaded_file or uploaded_url:
with st.spinner("Processing..."):
st.session_state["docs"] += get_pdf_text(uploaded_file)
st.session_state["docs"] += get_url_text(uploaded_url)
st.success("Done")
# Create containers for chat history and user input
response_container = st.container()
container = st.container()
# User input form
user_input = st.chat_input("Ask Your Questions πŸ‘‰..")
with container:
if user_input:
output = conversational_chat(user_input)
# answer = response_generator(output)
st.session_state['past'].append(user_input)
st.session_state['generated'].append(output)
# Display chat history
if st.session_state['generated']:
with response_container:
for i in range(len(st.session_state['generated'])):
if i != 0:
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer")
message(st.session_state["generated"][i], key=str(i), avatar_style="bottts")