import streamlit as st from transformers import pipeline, AutoTokenizer, AutoModelForQuestionAnswering # Load Flan Alpaca Large model model_name = "declare-lab/flan-alpaca-base" model = AutoModelForQuestionAnswering.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer) def main(): # Set app title st.title("Flan Alpaca Large Model") # Create input for user's question question = st.text_input("Enter your question here:") # Create button to submit question if st.button("Submit"): # Generate answer using Flan Alpaca Large model answer = qa_pipeline(question=question, context="")["answer"] # Display answer in output box st.write("Answer: ", answer) if __name__ == "__main__": main()