Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
# Load BioGPT model | |
bio_gpt = pipeline("text-generation", model="microsoft/biogpt") | |
def medical_chatbot(query): | |
try: | |
result = bio_gpt( | |
query, | |
max_length=300, # Increased length for full response | |
num_return_sequences=5, | |
temperature=5.9, # Higher temperature for diverse responses | |
top_p=0.95 # Uses nucleus sampling instead of fixed top_k | |
) | |
generated_text = result[0]["generated_text"] # Full response without trimming | |
return generated_text | |
except Exception as e: | |
print(f"Error: {e}") | |
return "An error occurred." | |
# Create Gradio interface | |
iface = gr.Interface( | |
fn=medical_chatbot, | |
inputs=gr.Textbox(placeholder="Ask me a medical question..."), | |
outputs="text", | |
title="Medical Chatbot" | |
) | |
iface.launch(share=True) | |