import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline sonnets_pretrained_model = "datificate/gpt2-small-spanish" sonnets_tokenizer = AutoTokenizer.from_pretrained(sonnets_pretrained_model, use_fast=True) sonnets_tuned_model = 'hackathon-pln-es/gpt2-small-spanish-disco-poetry' sonnets_pipe = pipeline('text2text-generation', model=sonnets_tuned_model, tokenizer=sonnets_tokenizer) def make_new_sonnet(prompt, max_lenght): ouputs = sonnets_pipe(prompt, max_length=max_lenght, num_beams=5, early_stopping=True, repetition_penalty=20.0, num_return_sequences=1) return ouputs[0]['generated_text'] article = "

Don't miss this other cool space based in a model of different styles of poetry in spanish: poem-generation-es

" examples = [ ['vendrá la muerte y tendrá tus ojos','140'], ['buenas cosas pasan cuando anochece', '200'], ['al despertar el dinosaurio todavía estaba ahí', '140' ] ] iface = gr.Interface(fn=make_new_sonnet, title= "Generador de poesía basada en sonetos en español", inputs=[ gr.inputs.Textbox(lines=2, placeholder="Escrbe algo para comenzar", label='Escribe algo para comenzar'), gr.inputs.Slider(minimum = 60, maximum = 200, default = 140, step = 10, label='Salida de caracteres')], outputs=[ gr.outputs.Textbox(label="Tu poema"), ], article= article, examples = examples ) iface.launch(enable_queue=True)