Spaces:
Sleeping
Sleeping
| import re | |
| from dataclasses import dataclass | |
| from typing import List, Dict, Optional | |
| import pandas as pd | |
| import streamlit as st | |
| import os | |
| api_key = os.getenv("OPENAI_API_KEY") | |
| print("API key loaded?", bool(api_key)) | |
| # from dotenv import load_dotenv | |
| # load_dotenv() | |
| # HuggingFace optional | |
| try: | |
| from transformers import pipeline | |
| HF_AVAILABLE = True | |
| except Exception: | |
| HF_AVAILABLE = False | |
| # OpenAI | |
| try: | |
| from openai import OpenAI | |
| OPENAI_AVAILABLE = True | |
| except Exception: | |
| OPENAI_AVAILABLE = False | |
| # Load environment variables | |
| # load_dotenv() | |
| OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "") | |
| MODEL = os.getenv("MODEL", "gpt-3.5-turbo") | |
| # Streamlit config | |
| st.set_page_config(page_title="Personal Finance Chatbot", page_icon="π¬", layout="wide") | |
| class FinanceRecord: | |
| date: str | |
| description: str | |
| amount: float | |
| category: Optional[str] = None | |
| class HuggingFaceProvider: | |
| def __init__(self): | |
| self.available = HF_AVAILABLE | |
| self.name = "huggingface" | |
| self.generator = None | |
| if self.available: | |
| try: | |
| self.generator = pipeline("text2text-generation", model="google/flan-t5-small") | |
| except Exception: | |
| self.available = False | |
| def ok(self): | |
| return self.available and self.generator is not None | |
| def generate(self, prompt: str, max_tokens: int = 256): | |
| if not self.ok(): | |
| return "[HF provider unavailable]" | |
| try: | |
| result = self.generator(prompt, max_length=max_tokens, do_sample=True) | |
| return result[0]['generated_text'] | |
| except Exception as e: | |
| return f"[HF error] {e}" | |
| class GraniteWatsonProvider: | |
| def __init__(self): | |
| self.name = "granite_watson" | |
| def ok(self): | |
| return True | |
| def generate(self, prompt: str, max_tokens: int = 256): | |
| return "[Granite/Watson] This is a placeholder response. Connect IBM SDK here." | |
| class OpenAIProvider: | |
| def __init__(self): | |
| self.api_key = OPENAI_API_KEY | |
| self.model = MODEL | |
| self.client = None | |
| if self.api_key and OPENAI_AVAILABLE: | |
| try: | |
| self.client = OpenAI(api_key=self.api_key) | |
| except Exception: | |
| self.client = None | |
| self.name = "openai" | |
| def ok(self): | |
| return self.client is not None | |
| def generate(self, prompt: str, max_tokens: int = 512): | |
| if not self.client: | |
| return "[OpenAI] API not configured. Please set OPENAI_API_KEY in your environment." | |
| try: | |
| resp = self.client.chat.completions.create( | |
| model=self.model, | |
| messages=[ | |
| {"role": "system", "content": "You are a financial assistant."}, | |
| {"role": "user", "content": prompt}, | |
| ], | |
| max_tokens=max_tokens, | |
| temperature=0.7, | |
| ) | |
| return resp.choices[0].message.content.strip() | |
| except Exception as e: | |
| return f"[OpenAI error] {e}" | |
| def categorize_with_ai(provider, description: str): | |
| prompt = f"Categorize this financial transaction description into: Food, Rent, Utilities, Entertainment, Transport, Other.\nDescription: {description}\nCategory:" | |
| return provider.generate(prompt) | |
| def get_ai_suggestions(provider, records: List[FinanceRecord]): | |
| df = pd.DataFrame([r.__dict__ for r in records]) | |
| prompt = ( | |
| "You are a financial advisor. Here are the user's transactions:\n" | |
| f"{df.to_string(index=False)}\n\n" | |
| "Provide insights and suggestions to improve savings and manage money better." | |
| ) | |
| return provider.generate(prompt, max_tokens=400) | |
| # Streamlit UI | |
| st.title("π¬ Personal Finance Chatbot") | |
| st.write("Manage savings, taxes, and investments with AI guidance.") | |
| provider_choice = st.selectbox("AI Provider", ["HuggingFace", "Granite/Watson", "OpenAI"], index=0) | |
| hf_provider = HuggingFaceProvider() | |
| granite_provider = GraniteWatsonProvider() | |
| openai_provider = OpenAIProvider() | |
| if provider_choice == "HuggingFace": | |
| provider = hf_provider | |
| elif provider_choice == "Granite/Watson": | |
| provider = granite_provider | |
| else: | |
| provider = openai_provider | |
| if "records" not in st.session_state: | |
| st.session_state.records: List[FinanceRecord] = [] | |
| st.sidebar.header("Add Transaction") | |
| date = st.sidebar.text_input("Date", "2025-08-30") | |
| description = st.sidebar.text_input("Description", "") | |
| amount = st.sidebar.number_input("Amount", 0.0, 1e9, step=100.0) | |
| if st.sidebar.button("Add Record"): | |
| record = FinanceRecord(date=date, description=description, amount=amount) | |
| record.category = categorize_with_ai(provider, record.description) | |
| st.session_state.records.append(record) | |
| st.sidebar.success("Record added!") | |
| if st.session_state.records: | |
| st.subheader("Transaction Records") | |
| df = pd.DataFrame([r.__dict__ for r in st.session_state.records]) | |
| st.dataframe(df) | |
| st.subheader("AI Suggestions") | |
| suggestions = get_ai_suggestions(provider, st.session_state.records) | |
| st.write(suggestions) | |
| else: | |
| st.info("No records yet. Add transactions from the sidebar.") | |