Spaces:
Sleeping
Sleeping
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool | |
import datetime | |
import requests | |
import pytz | |
import yaml | |
import random | |
from tools.final_answer import FinalAnswerTool | |
from Gradio_UI import GradioUI | |
def quantum_fortune_llama(question: str, asker_name: str) -> str: | |
"""A Quantum Fortune Llama that spits cryptic, hilarious, and oddly specific predictions from across the multiverse. | |
Args: | |
question: The question the user dares to ask this majestic beast (it might ignore it entirely). | |
asker_name: The brave soul who summoned the Llama’s cosmic spit. | |
""" | |
llama_spits = [ | |
f"{asker_name}, the Llama peers through the 7th dimension: ‘You’ll invent a sandwich that screams on Thursdays.’", | |
"With a majestic bleat, it prophesies: ‘Your socks will unionize and demand better lint rights.’", | |
f"Spit lands from a parallel timeline—‘{asker_name}, a disco-loving squid will propose next Tuesday.’", | |
"The Llama’s third eye winks: ‘Your Wi-Fi will gain sentience and ghost you for fun.’", | |
f"{asker_name}, the Quantum Llama snorts: ‘A rogue pickle will challenge you to a duel at dawn.’", | |
"From the void, it bleats: ‘You’ll sneeze glitter for 3 days straight. Plan accordingly.’", | |
"The Llama chews on your fate: ‘A sentient cloud will adopt you as its emotional support human.’", | |
f"{asker_name}, it spits a vision: ‘You’ll star in a reality show judged by feral raccoons.’", | |
"Quantum drool reveals: ‘Your shadow will elope with a streetlamp. Send a postcard.’", | |
"The Llama tap-dances across time: ‘Your breakfast will stage a coup. Toast leads the charge.’", | |
f"Behold, {asker_name}! ‘A penguin in flip-flops will deliver your next big idea.’", | |
"The Llama’s cosmic burp decrees: ‘You’ll accidentally join a cult of overly polite pigeons.’", | |
"Spit swirls in 4D: ‘Your left shoe will write a bestselling memoir. Spoiler: it hates laces.’", | |
f"{asker_name}, the Llama grins: ‘A time-traveling toaster will sue you for emotional damages.’", | |
"The prophecy drips: ‘You’ll wake up fluent in Dolphin but only for karaoke night.’", | |
"With a cosmic headbutt, it declares: ‘Your next pet rock will demand a corner office.’", | |
f"{asker_name}, the Llama spits glitter: ‘A rogue kazoo will haunt your dreams with polka.’", | |
"The Quantum Llama howls: ‘Your fridge will start a podcast about existential cheese.’", | |
"A multiversal hoof-stomp reveals: ‘You’ll befriend a cloud shaped like a judgmental cat.’", | |
f"Spit lands with sass, {asker_name}: ‘Your future smells like burnt popcorn and ambition.’", | |
] | |
# Add a random "Llama Action" for extra flair and engagement | |
llama_actions = [ | |
"The Llama does a backflip and spits twice for emphasis.", | |
"It waggles its eyebrows and teleports three seconds into the future.", | |
"The Llama moonwalks across a rainbow, leaving hoofprints of chaos.", | |
"It dons sunglasses and bleats your fortune in a funky rhythm.", | |
"The Llama chews on a quantum carrot, muttering about your alternate lives.", | |
] | |
# Randomly select response and action | |
fortune = random.choice(llama_spits) | |
action = random.choice(llama_actions) | |
return f"🦙 *The Quantum Fortune Llama turns its majestic gaze to {asker_name}*...\n{fortune}\n*{action}*" | |
def get_current_time_in_timezone(timezone: str) -> str: | |
"""A tool that fetches the current local time in a specified timezone. | |
Args: | |
timezone: A string representing a valid timezone (e.g., 'America/New_York'). | |
""" | |
try: | |
# Create timezone object | |
tz = pytz.timezone(timezone) | |
# Get current time in that timezone | |
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") | |
return f"The current local time in {timezone} is: {local_time}" | |
except Exception as e: | |
return f"Error fetching time for timezone '{timezone}': {str(e)}" | |
final_answer = FinalAnswerTool() | |
model = HfApiModel( | |
max_tokens=2096, | |
temperature=0.5, | |
model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud', | |
custom_role_conversions=None, | |
) | |
# Import tool from Hub | |
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True) | |
with open("prompts.yaml", 'r') as stream: | |
prompt_templates = yaml.safe_load(stream) | |
agent = CodeAgent( | |
model=model, | |
tools=[get_current_time_in_timezone, quantum_fortune_llama, DuckDuckGoSearchTool(), final_answer], # Replaced cosmic_fizz_oracle with quantum_fortune_llama | |
max_steps=6, | |
verbosity_level=1, | |
grammar=None, | |
planning_interval=None, | |
name=None, | |
description=None, | |
prompt_templates=prompt_templates | |
) | |
response = agent.run("Ask the Quantum Fortune Llama about my future", {"asker_name": "YourName"}) | |
print(response) | |
GradioUI(agent).launch() |