LLMManuscript / app_groq.py
NassimeBejaia's picture
Rename app.py to app_groq.py
dda273a verified
import os
from groq import Groq
# Retrieve the DEEPSEEK_API_KEY from environment variables or secret manager
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY") # Assumes the key is stored as an environment variable
# Ensure the API key is available
if not DEEPSEEK_API_KEY:
raise ValueError("API key is missing. Please check your environment variables.")
# Initialize the Groq client with the retrieved API key
client = Groq(api_key=DEEPSEEK_API_KEY)
# Prepare the message (this is the input from the user)
messages = [{"role": "user", "content": "Hello, how can I improve my coding skills?"}]
# Call the API to get a completion from DeepSeek (streaming)
try:
print("Calling API...")
# Make the API call
completion = client.chat.completions.create(
model="meta-llama/llama-4-scout-17b-16e-instruct", # Model to use
messages=messages, # Messages to send
temperature=1, # Adjust the creativity of the response
max_completion_tokens=1024, # Max tokens for the completion
top_p=1, # Top-p sampling for response variety
stream=True # Enable streaming mode
)
print("Streaming response received...")
# Handle the streaming response (output each chunk as it arrives)
for chunk in completion:
print(chunk.choices[0].delta.content or "", end="")
except Exception as e:
print(f"Error occurred: {str(e)}")