File size: 3,854 Bytes
888d109
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain.llms import CTransformers
from langchain.chains import RetrievalQA
from langchain.prompts import PromptTemplate
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler

name = 'Fidva'
age = 15
grade = 10
context = '{context}'
question = '{question}'

DB_FAISS_PATH = 'refbooks-vectorstore/geo-10-1'


def get_llm_response(query, template_type):

    if template_type == 'user':
        user_template = """Use the following pieces of information to answer the user's question in a friendly way.
        If you don't know the answer, just say that you don't know, don't try to make up an answer. Also refer to the user by their name, and keep in mind their age while answering the question.

        Name of user: {name}
        Age of user: {age}
        Grade of user: {grade}
        Context: {context}
        Question: {question}

        Return the Helpful Answer, and then also give the user a Knowledge Check Question related to what he just asked.
        Returning the helpful answer is a must and takes higher priority.

        Helpful answer:
        """
        unformatted_prompt_template = PromptTemplate.from_template(
            user_template)

    elif template_type == 'lesson':
        # lesson_template = """Teach the given topic in accordance with the content below to the user in a friendly way, while keeping in mind the user's age and his grade.
        # Name of user: {name}
        # Age of user: {age}
        # Grade of user: {grade}
        # Content: {context}
        # Topic: {question}
        # """
        lesson_template = """Hello {name}! Let's dive into the topic of {question} together.
    
As a {grade}th grader at {age} years old, it's great to explore this subject!

Let's start by understanding the context:
    
    {context}

Now, to grasp this topic better, here are some key points to consider:
    
    - Explain the fundamental concept or idea related to {question}.
    
    - Provide examples or illustrations to make it easier to comprehend.
    
    - Share any real-life applications or relevance of this topic.

Feel free to ask if you have any questions along the way. Let's learn together!
"""
        unformatted_prompt_template = PromptTemplate.from_template(
            lesson_template)

    prompt_template = unformatted_prompt_template.format(
        name=name,
        age=age,
        grade=grade,
        context=context,
        question=question
    )

    embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
                                       model_kwargs={'device': 'cpu'})
    db = FAISS.load_local(DB_FAISS_PATH, embeddings)

    # Load the locally downloaded model here
    llm = CTransformers(
        model="TheBloke/Llama-2-7B-Chat-GGML",
        model_type="llama",
        callbacks=[StreamingStdOutCallbackHandler()],
        config={
            'context_length': 4096,
            'temperature': 0.1,
            'max_new_tokens': 512,
        },
    )

    qa_prompt = PromptTemplate(template=prompt_template,
                               input_variables=['context', 'question'])

    print(qa_prompt)
    qa_result = RetrievalQA.from_chain_type(llm=llm,
                                            chain_type='stuff',
                                            retriever=db.as_retriever(
                                                search_kwargs={'k': 1}),
                                            return_source_documents=True,
                                            chain_type_kwargs={
                                                'prompt': qa_prompt},
                                            )

    response = qa_result({'query': query})
    return response['result']