import gradio as gr def respond( message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p, ): messages = [{"role": "system", "content": system_message}] for val in history: if val[0]: messages.append({"role": "user", "content": val[0]}) if val[1]: messages.append({"role": "assistant", "content": val[1]}) messages.append({"role": "user", "content": message}) response = "" for message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = message.choices[0].delta.content response += token yield response """ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface """ TITLE = "FUT FUT Chatbot" DESCRIPTION = """ '해요'체를 사용하며 친절하게 답하는 풋풋이 챗봇. A rag pipeline with a chatbot feature Resources used to build this project : * embedding model : https://huggingface.co/BM-K/KoSimCSE-roberta-multitask * dataset : https://huggingface.co/datasets/Dongwookss/q_a_korean_futsal * vector DB : PINECONE * chatbot : https://huggingface.co/Dongwookss/small_fut_final """ Examples = [['시흥 풋살 구장 추천해줘'],['풋살 경기 규칙 설명해줘'], ['풋살 경기 시간 알려줘']] demo = gr.ChatInterface( fn=talk, chatbot=gr.Chatbot( show_label=True, show_share_button=True, show_copy_button=True, likeable=True, layout="bubble", bubble_full_width=False, ), theme="Soft", examples=[["what's anarchy ? "]], title=TITLE, description=DESCRIPTION, examples=Examples ) demo.launch(debug=True) # if __name__ == "__main__": # demo.launch()