Update app.py
Browse filesmodel outdated, updated to llama 3.3
app.py
CHANGED
@@ -96,7 +96,7 @@ print("rerank loaded")
|
|
96 |
|
97 |
|
98 |
# Create LLM model
|
99 |
-
llm = get_groq_chat(model_name="llama-3.
|
100 |
|
101 |
|
102 |
# Create conversation qa chain (Note: conversation is not supported yet)
|
|
|
96 |
|
97 |
|
98 |
# Create LLM model
|
99 |
+
llm = get_groq_chat(model_name="llama-3.3-70b-versatile")
|
100 |
|
101 |
|
102 |
# Create conversation qa chain (Note: conversation is not supported yet)
|