import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Load the tokenizer and model model_checkpoint = "google/mt5-small" tokenizer = AutoTokenizer.from_pretrained(model_checkpoint) model = AutoModelForSeq2SeqLM.from_pretrained(model_checkpoint) def summarize(text): inputs = tokenizer.encode("summarize: " + text, return_tensors="pt", max_length=1024, truncation=True) outputs = model.generate(inputs, max_length=150, min_length=30, length_penalty=2.0, num_beams=4, early_stopping=True) return tokenizer.decode(outputs[0], skip_special_tokens=True) iface = gr.Interface(fn=summarize, inputs="text", outputs="text", title="Text Summarizer", description="Summarize any text using the mT5 model.") iface.launch()