# -*- coding: utf-8 -*- """app Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1ORnyeMQYmIQwXKecOr52Fr5YOzjrsxvn """ # Commented out IPython magic to ensure Python compatibility. # %%capture # !pip install gradio transformers==4.28.0 datasets import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM from datasets import load_dataset import numpy as np tokenizer = AutoTokenizer.from_pretrained("mehnaazasad/bart-large-finetuned-arxiv-co-ga-latest") model = AutoModelForSeq2SeqLM.from_pretrained("mehnaazasad/bart-large-finetuned-arxiv-co-ga-latest") dataset = load_dataset("mehnaazasad/arxiv_astro_co_ga") def summarize(text, temperature): num_beams = 5 temp = temperature top_k = 35 top_p = 0.94 inputs = tokenizer(text, return_tensors="pt").input_ids output = model.generate(inputs, max_length=50, num_beams=num_beams, temperature=temp, top_k=top_k, top_p=top_p, do_sample=True) title = tokenizer.decode(output[0], skip_special_tokens=True) return title title = "Title Generator" description = """This model was trained to generate a title given scientific paper abstracts. You can find more details about the fine-tuning of this BART model [here](https://huggingface.co/mehnaazasad/bart-large-finetuned-arxiv-co-ga-latest). While default parameter values are shown, feel free to experiment! """ article="[Image credit](https://adapterhub.ml/blog/2021/04/adapters-for-generative-and-seq2seq-models-in-nlp/)" gr.Interface( summarize, [ gr.Textbox(type="text", label="Paste text here"), gr.Slider(minimum=0.4, maximum=2.0, step=0.2, value=0.7, label="Temperature: crank this up for more creativity (travel beyond 1 at your own risk!)"), ], gr.Textbox(type="text", label="Your title is"), title=title, description=description, article=article, theme="finlaymacklon/boxy_violet", ).launch()