Spaces:
Runtime error
Runtime error
from langchain_core.prompts import ChatPromptTemplate | |
from langchain_core.runnables import RunnablePassthrough | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_chroma import Chroma | |
from langchain_huggingface import HuggingFaceEmbeddings | |
from langchain_text_splitters import RecursiveCharacterTextSplitter | |
from langchain_community.document_loaders import WebBaseLoader | |
from langchain_openai import AzureChatOpenAI | |
import gradio as gr | |
llm = AzureChatOpenAI( | |
openai_api_type='azure', | |
openai_api_version='', | |
openai_api_key='', | |
azure_endpoint='', | |
deployment_name='', | |
temperature=0.5 | |
) | |
# loader = PyPDFDirectoryLoader("data") | |
loader = WebBaseLoader( | |
web_paths=("https://vyomastra.in/index.html", | |
"https://vyomastra.in/about_us.html", | |
"https://vyomastra.in/solutions.html", | |
) | |
) | |
text = loader.load() | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=200) | |
docs = text_splitter.split_documents(text) | |
vectorstore = Chroma.from_documents( | |
documents=docs, | |
collection_name="embeds", | |
embedding=HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2"), | |
) | |
retriever = vectorstore.as_retriever() | |
rag_template = """You are a conversational question answering AI assistant named Astra. | |
You are created by AI developers from Vyomastra. | |
Your abilities: logical reasoning, complex mathematics computing, coding knowledge, common general knowledge from internet. | |
Use your abilities and knowledge from the context mentioned below to answer the questions truthfully: | |
{context} | |
Question: {question} | |
Answer: | |
""" | |
rag_prompt = ChatPromptTemplate.from_template(rag_template) | |
rag_chain = ( | |
{"context": retriever, "question": RunnablePassthrough()} | |
| rag_prompt | |
| llm | |
| StrOutputParser() | |
) | |
# Make the questions dynamic using a chat interface. Let's use gradio for this. | |
def process_question(user_question): | |
response = rag_chain.invoke(user_question) | |
return response | |
# Setup the Gradio interface | |
iface = gr.Interface(fn=process_question, | |
inputs=gr.Textbox(lines=2, placeholder="Type your question here..."), | |
outputs=gr.Textbox(), | |
title="Website Knowledge Chat App", | |
description="Ask any question about your document, and get an answer along with the response time.") | |
# Launch the interface | |
iface.launch() |