Implement initial Streamlit UI for AI Life Coach
Browse files
app.py
CHANGED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
from utils.config import config
|
| 3 |
+
import requests
|
| 4 |
+
|
| 5 |
+
# Set page config
|
| 6 |
+
st.set_page_config(page_title="AI Life Coach", page_icon="🧘", layout="centered")
|
| 7 |
+
|
| 8 |
+
# Sidebar for user selection
|
| 9 |
+
st.sidebar.title("🧘 AI Life Coach")
|
| 10 |
+
user = st.sidebar.selectbox("Select User", ["Rob", "Sarah"])
|
| 11 |
+
st.sidebar.markdown("---")
|
| 12 |
+
|
| 13 |
+
# Fetch Ollama status
|
| 14 |
+
def get_ollama_status():
|
| 15 |
+
try:
|
| 16 |
+
response = requests.get("http://localhost:8000/api/ollama-status")
|
| 17 |
+
if response.status_code == 200:
|
| 18 |
+
return response.json()
|
| 19 |
+
except Exception:
|
| 20 |
+
return {"running": False, "model_loaded": None}
|
| 21 |
+
|
| 22 |
+
ollama_status = get_ollama_status()
|
| 23 |
+
|
| 24 |
+
# Display Ollama status
|
| 25 |
+
if ollama_status["running"]:
|
| 26 |
+
st.sidebar.success(f"🧠 Model Running: {ollama_status['model_loaded']}")
|
| 27 |
+
else:
|
| 28 |
+
st.sidebar.error("🧠 Ollama is not running or no model loaded.")
|
| 29 |
+
|
| 30 |
+
# Main chat interface
|
| 31 |
+
st.title("🧘 AI Life Coach")
|
| 32 |
+
st.markdown("Talk to your personal development assistant.")
|
| 33 |
+
|
| 34 |
+
if not ollama_status["running"]:
|
| 35 |
+
st.warning("⚠️ Ollama is not running. Please start Ollama to use the AI Life Coach.")
|
| 36 |
+
else:
|
| 37 |
+
# Chat input
|
| 38 |
+
user_input = st.text_input("Your message...", key="input")
|
| 39 |
+
if st.button("Send"):
|
| 40 |
+
if user_input.strip() == "":
|
| 41 |
+
st.warning("Please enter a message.")
|
| 42 |
+
else:
|
| 43 |
+
# TODO: Send to backend and get response
|
| 44 |
+
st.markdown(f"**You:** {user_input}")
|
| 45 |
+
st.markdown(f"**AI Coach:** [Response will appear here after backend integration.]")
|