Spaces:
Running
Running
import gradio as gr | |
import os | |
import importlib | |
import sys | |
from algos.PWS import * | |
from utils.util import * | |
from nodes.Worker import * | |
from prompts import fewshots | |
# Load API keys | |
with open(os.path.join('./keys/', 'openai.key'), 'r') as f: | |
os.environ["OPENAI_API_KEY"] = f.read().strip() | |
with open(os.path.join('./keys/', 'serpapi.key'), 'r') as f: | |
os.environ["SERPAPI_API_KEY"] = f.read().strip() | |
def reload_modules(): | |
"""Reload all relevant modules""" | |
importlib.reload(sys.modules['nodes.Worker']) | |
importlib.reload(sys.modules['algos.PWS']) | |
importlib.reload(sys.modules['utils.util']) | |
importlib.reload(sys.modules['prompts.fewshots']) | |
importlib.reload(sys.modules['prompts.solver']) | |
return "✅ Modules reloaded successfully!" | |
def process(tools, model, input_text): | |
# Use study abroad fewshot for study-related questions | |
if any(word in input_text.lower() for word in ["study", "student", "university", "college", "school", "abroad", "học", "trường", "du học", "học bổng", "gpa", "ielts", "tcf", "delf", "scholarship"]): | |
# Ensure both Google and LLM are included for study abroad queries | |
if "Google" not in tools: | |
tools.append("Google") | |
if "LLM" not in tools: | |
tools.append("LLM") | |
method = PWS_Base(planner_model=model, solver_model=model, | |
fewshot=fewshots.STUDY_ABROAD_PWS, available_tools=tools) | |
else: | |
method = PWS_Base(planner_model=model, solver_model=model, | |
fewshot=fewshots.TRIVIAQA_PWS, available_tools=tools) | |
response = method.run(input_text) | |
# Extract planner log | |
plan = response["planner_log"].split(input_text)[1].strip('\n') | |
# Extract full solver log without truncating at "Now begin to solve the task" | |
solve = response["solver_log"].split(input_text)[1].strip('\n') | |
# Get the complete output | |
output = response["output"] | |
return plan, solve, output | |
with gr.Blocks() as iface: | |
gr.Markdown("# ReWOO Demo 🤗") | |
gr.Markdown(""" | |
Demonstrating our recent work -- ReWOO: Decoupling Reasoning from Observations for Efficient Augmented Language Models. | |
Note that this demo is only a conceptual impression of our work, we use a zero-shot set up and not optimizing the run time. | |
""") | |
with gr.Row(): | |
with gr.Column(): | |
tools = gr.CheckboxGroup(['Wikipedia', 'Google', 'LLM', 'WolframAlpha', 'Calculator'], label="Tools") | |
model = gr.Dropdown(["text-davinci-003", "gpt-3.5-turbo"], label="Model") | |
input_text = gr.Textbox(lines=2, placeholder="Input Here...", label="Input") | |
with gr.Row(): | |
submit_btn = gr.Button("Submit") | |
refresh_btn = gr.Button("🔄 Refresh Modules") | |
with gr.Column(): | |
planner = gr.Textbox(lines=4, label="Planner") | |
solver = gr.Textbox(lines=4, label="Solver") | |
output = gr.Textbox(label="Output") | |
status = gr.Textbox(label="Status", value="Ready") | |
# Set up event handlers | |
submit_btn.click(fn=process, inputs=[tools, model, input_text], outputs=[planner, solver, output]) | |
refresh_btn.click(fn=reload_modules, outputs=status) | |
input_text.submit(fn=process, inputs=[tools, model, input_text], outputs=[planner, solver, output]) # Keep Enter key functionality | |
# Examples | |
gr.Examples([ | |
[["Wikipedia", "LLM"], "gpt-3.5-turbo", "American Callan Pinckney's eponymously named system became a best-selling (1980s-2000s) book/video franchise in what genre?"], | |
[['Google', 'LLM'], "gpt-3.5-turbo", "What is the recent paper ReWOO: Decoupling Reasoning from Observations for Efficient Augmented Language Models about?"], | |
[["Calculator","WolframAlpha"], "gpt-3.5-turbo", "the car can accelerate from 0 to 27.8 m/s in a time of 3.85 seconds. Determine the acceleration of this car in m/s/s."], | |
], inputs=[tools, model, input_text]) | |
if __name__ == "__main__": | |
iface.launch() |