AilexGPT commited on
Commit
41f82fa
1 Parent(s): 3de9c89

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +73 -0
app.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from langchain.document_loaders import OnlinePDFLoader
3
+ from langchain.text_splitter import CharacterTextSplitter
4
+ from langchain.llms import HuggingFaceHub
5
+ from langchain.embeddings import HuggingFaceHubEmbeddings
6
+ from langchain.vectorstores import Chroma
7
+ from langchain.chains import RetrievalQA
8
+
9
+ def loading_pdf(): return 'Loading...'
10
+
11
+ def pdf_changes(pdf_doc, repo_id):
12
+ loader = OnlinePDFLoader(pdf_doc.name)
13
+ documents = loader.load()
14
+ text_splitter = CharacterTextSplitter(chunk_size=1024, chunk_overlap=0)
15
+ texts = text_splitter.split_documents(documents)
16
+ embeddings = HuggingFaceHubEmbeddings()
17
+ db = Chroma.from_documents(texts, embeddings)
18
+ retriever = db.as_retriever()
19
+ llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={'temperature': 0.5, 'max_new_tokens': 2096})
20
+ global qa
21
+ qa = RetrievalQA.from_chain_type(llm=llm, chain_type='stuff', retriever=retriever, return_source_documents=True)
22
+ return "Ready"
23
+
24
+ def add_text(history, text):
25
+ history = history + [(text, None)]
26
+ return history, ''
27
+
28
+ def bot(history):
29
+ response = infer(history[-1][0])
30
+ history[-1][1] = response['result']
31
+ return history
32
+
33
+ def infer(question):
34
+ query = question
35
+ result = qa({'query': query})
36
+ return result
37
+
38
+ css="""
39
+ #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
40
+ """
41
+
42
+ title = """
43
+ <h1>Chat with PDF</h1>
44
+ """
45
+
46
+ with gr.Blocks(css=css, theme='NoCrypt/miku@1.2.1') as demo:
47
+ with gr.Column(elem_id='col-container'):
48
+ gr.HTML(title)
49
+
50
+ with gr.Column():
51
+ pdf_doc = gr.File(label='Upload a PDF', file_types=['.pdf'])
52
+ repo_id = gr.Dropdown(label='LLM',
53
+ choices=[
54
+ 'mistralai/Mistral-7B-Instruct-v0.1',
55
+ 'HuggingFaceH4/zephyr-7b-beta',
56
+ 'meta-llama/Llama-2-7b-chat-hf',
57
+ '01-ai/Yi-6B-200K'
58
+ ],
59
+ value='mistralai/Mistral-7B-Instruct-v0.1')
60
+ with gr.Row():
61
+ langchain_status = gr.Textbox(label='Status', placeholder='', interactive=False)
62
+ load_pdf = gr.Button('Load PDF to LangChain')
63
+
64
+ chatbot = gr.Chatbot([], elem_id='chatbot')#.style(height=350)
65
+ question = gr.Textbox(label='Question', placeholder='Type your query')
66
+ submit_btn = gr.Button('Send')
67
+
68
+ repo_id.change(pdf_changes, inputs=[pdf_doc, repo_id], outputs=[langchain_status], queue=False)
69
+ load_pdf.click(pdf_changes, inputs=[pdf_doc, repo_id], outputs=[langchain_status], queue=False)
70
+ question.submit(add_text, [chatbot, question], [chatbot, question]).then(bot, chatbot, chatbot)
71
+ submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then(bot, chatbot, chatbot)
72
+
73
+ demo.launch()