Spaces:
Sleeping
Sleeping
from langchain.embeddings import HuggingFaceEmbeddings | |
from langchain.vectorstores import FAISS | |
from langchain.llms import CTransformers | |
from langchain.chains import RetrievalQA | |
from langchain.prompts import PromptTemplate | |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler | |
name = 'Fidva' | |
age = 15 | |
grade = 10 | |
context = '{context}' | |
question = '{question}' | |
DB_FAISS_PATH = 'refbooks-vectorstore/geo-10-1' | |
def get_llm_response(query, template_type): | |
if template_type == 'user': | |
user_template = """Use the following pieces of information to answer the user's question in a friendly way. | |
If you don't know the answer, just say that you don't know, don't try to make up an answer. Also refer to the user by their name, and keep in mind their age while answering the question. | |
Name of user: {name} | |
Age of user: {age} | |
Grade of user: {grade} | |
Context: {context} | |
Question: {question} | |
Return the Helpful Answer, and then also give the user a Knowledge Check Question related to what he just asked. | |
Returning the helpful answer is a must and takes higher priority. | |
Helpful answer: | |
""" | |
unformatted_prompt_template = PromptTemplate.from_template( | |
user_template) | |
elif template_type == 'lesson': | |
# lesson_template = """Teach the given topic in accordance with the content below to the user in a friendly way, while keeping in mind the user's age and his grade. | |
# Name of user: {name} | |
# Age of user: {age} | |
# Grade of user: {grade} | |
# Content: {context} | |
# Topic: {question} | |
# """ | |
lesson_template = """Hello {name}! Let's dive into the topic of {question} together. | |
As a {grade}th grader at {age} years old, it's great to explore this subject! | |
Let's start by understanding the context: | |
{context} | |
Now, to grasp this topic better, here are some key points to consider: | |
- Explain the fundamental concept or idea related to {question}. | |
- Provide examples or illustrations to make it easier to comprehend. | |
- Share any real-life applications or relevance of this topic. | |
Feel free to ask if you have any questions along the way. Let's learn together! | |
""" | |
unformatted_prompt_template = PromptTemplate.from_template( | |
lesson_template) | |
prompt_template = unformatted_prompt_template.format( | |
name=name, | |
age=age, | |
grade=grade, | |
context=context, | |
question=question | |
) | |
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2", | |
model_kwargs={'device': 'cpu'}) | |
db = FAISS.load_local(DB_FAISS_PATH, embeddings) | |
# Load the locally downloaded model here | |
llm = CTransformers( | |
model="TheBloke/Llama-2-7B-Chat-GGML", | |
model_type="llama", | |
callbacks=[StreamingStdOutCallbackHandler()], | |
config={ | |
'context_length': 4096, | |
'temperature': 0.1, | |
'max_new_tokens': 512, | |
}, | |
) | |
qa_prompt = PromptTemplate(template=prompt_template, | |
input_variables=['context', 'question']) | |
print(qa_prompt) | |
qa_result = RetrievalQA.from_chain_type(llm=llm, | |
chain_type='stuff', | |
retriever=db.as_retriever( | |
search_kwargs={'k': 1}), | |
return_source_documents=True, | |
chain_type_kwargs={ | |
'prompt': qa_prompt}, | |
) | |
response = qa_result({'query': query}) | |
return response['result'] | |