|
import os |
|
import streamlit as st |
|
import pandas as pd |
|
import numpy as np |
|
from groq import Groq |
|
from sentence_transformers import SentenceTransformer |
|
import faiss |
|
|
|
GROQ_API_KEY = "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" |
|
|
|
client = Groq(api_key=GROQ_API_KEY) |
|
|
|
|
|
embedding_model = SentenceTransformer("all-MiniLM-L6-v2") |
|
|
|
|
|
uploaded_file = None |
|
faiss_index = None |
|
dataframe = None |
|
|
|
|
|
def load_dataset(file): |
|
global dataframe, faiss_index |
|
dataframe = pd.read_csv(file) |
|
st.success("Dataset loaded successfully!") |
|
|
|
embeddings = embedding_model.encode(dataframe["Energy Usage (kWh)"].astype(str).tolist()) |
|
faiss_index = faiss.IndexFlatL2(embeddings.shape[1]) |
|
faiss_index.add(np.array(embeddings)) |
|
|
|
|
|
def retrieve_relevant_data(query, top_k=5): |
|
if dataframe is None or faiss_index is None: |
|
st.error("Please upload a dataset first.") |
|
return [] |
|
query_embedding = embedding_model.encode([query]) |
|
distances, indices = faiss_index.search(np.array(query_embedding), top_k) |
|
relevant_rows = dataframe.iloc[indices[0]].to_dict(orient="records") |
|
return relevant_rows |
|
|
|
|
|
def generate_cost_saving_recommendations(data): |
|
if not data: |
|
return "No relevant data found for recommendations." |
|
|
|
total_energy_usage = sum(row["Energy Usage (kWh)"] for row in data) |
|
avg_energy_usage = total_energy_usage / len(data) |
|
total_cost = sum(row["Cost"] for row in data) |
|
|
|
recommendations = [ |
|
f"Total energy usage: {total_energy_usage:.2f} kWh", |
|
f"Average energy usage per household: {avg_energy_usage:.2f} kWh", |
|
f"Total cost: ${total_cost:.2f}", |
|
"Recommendations:", |
|
"- Implement energy-efficient appliances.", |
|
"- Use renewable energy sources like solar or wind.", |
|
"- Schedule high-energy tasks during off-peak hours.", |
|
"- Conduct regular maintenance to reduce energy wastage." |
|
] |
|
return "\n".join(recommendations) |
|
|
|
|
|
def generate_report(query): |
|
relevant_data = retrieve_relevant_data(query) |
|
context = "\n".join([str(row) for row in relevant_data]) |
|
chat_completion = client.chat.completions.create( |
|
messages=[ |
|
{ |
|
"role": "user", |
|
"content": f"Based on the following query: '{query}' and context:\n{context}\nProvide an energy usage analysis report." |
|
} |
|
], |
|
model="llama3-8b-8192", |
|
stream=False, |
|
) |
|
detailed_report = chat_completion.choices[0].message.content |
|
cost_saving_recommendations = generate_cost_saving_recommendations(relevant_data) |
|
return detailed_report, cost_saving_recommendations |
|
|
|
|
|
st.title("Energy Usage Analysis & Cost-Saving Report Generator") |
|
st.sidebar.header("Upload Dataset") |
|
|
|
|
|
uploaded_file = st.sidebar.file_uploader("Upload your energy usage dataset (CSV)", type=["csv"]) |
|
if uploaded_file is not None: |
|
load_dataset(uploaded_file) |
|
|
|
|
|
st.header("Generate Energy Usage Report") |
|
query = st.text_input("Enter your query (e.g., 'Analyze peak usage times in urban areas')") |
|
|
|
if st.button("Generate Report"): |
|
if uploaded_file is None: |
|
st.error("Please upload a dataset first.") |
|
elif query.strip() == "": |
|
st.error("Please enter a query.") |
|
else: |
|
with st.spinner("Generating report..."): |
|
try: |
|
detailed_report, cost_saving_recommendations = generate_report(query) |
|
st.subheader("Energy Usage Analysis Report") |
|
st.write(detailed_report) |
|
st.subheader("Cost-Saving Recommendations") |
|
st.write(cost_saving_recommendations) |
|
except Exception as e: |
|
st.error(f"An error occurred: {e}") |
|
|