Fix session state initialization and improve Redis error handling
Browse files- app.py +27 -14
- core/memory.py +63 -10
app.py
CHANGED
|
@@ -1,19 +1,24 @@
|
|
| 1 |
-
# Force redeploy trigger - version 1.
|
| 2 |
import streamlit as st
|
| 3 |
from utils.config import config
|
| 4 |
import requests
|
| 5 |
import json
|
| 6 |
import os
|
| 7 |
-
from core.memory import load_user_state
|
| 8 |
-
from core.llm import LLMClient
|
| 9 |
|
| 10 |
# Set page config
|
| 11 |
st.set_page_config(page_title="AI Life Coach", page_icon="🧘", layout="centered")
|
| 12 |
|
| 13 |
# Initialize session state for ngrok URL
|
| 14 |
-
if
|
| 15 |
st.session_state.ngrok_url = config.ollama_host
|
| 16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
# Sidebar for user selection
|
| 18 |
st.sidebar.title("🧘 AI Life Coach")
|
| 19 |
user = st.sidebar.selectbox("Select User", ["Rob", "Sarah"])
|
|
@@ -24,6 +29,7 @@ st.sidebar.subheader("Ollama Connection")
|
|
| 24 |
ngrok_input = st.sidebar.text_input("Ngrok URL", value=st.session_state.ngrok_url)
|
| 25 |
if st.sidebar.button("Update Ngrok URL"):
|
| 26 |
st.session_state.ngrok_url = ngrok_input
|
|
|
|
| 27 |
st.sidebar.success("Ngrok URL updated!")
|
| 28 |
st.experimental_rerun()
|
| 29 |
|
|
@@ -39,13 +45,6 @@ NGROK_HEADERS = {
|
|
| 39 |
"User-Agent": "AI-Life-Coach-App"
|
| 40 |
}
|
| 41 |
|
| 42 |
-
# Session state for model status
|
| 43 |
-
if "model_status" not in st.session_state:
|
| 44 |
-
st.session_state.model_status = "checking"
|
| 45 |
-
|
| 46 |
-
if "available_models" not in st.session_state:
|
| 47 |
-
st.session_state.available_models = []
|
| 48 |
-
|
| 49 |
# Fetch Ollama status
|
| 50 |
def get_ollama_status(ngrok_url):
|
| 51 |
try:
|
|
@@ -110,9 +109,12 @@ def poll_model_status(ngrok_url):
|
|
| 110 |
|
| 111 |
# After user selects name, load conversation history
|
| 112 |
def get_conversation_history(user_id):
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
| 116 |
return []
|
| 117 |
|
| 118 |
# Check Ollama status with the current ngrok URL
|
|
@@ -145,6 +147,13 @@ elif st.session_state.model_status == "different_models":
|
|
| 145 |
else: # unreachable
|
| 146 |
model_status_container.error("❌ Ollama unreachable")
|
| 147 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 148 |
# Main chat interface
|
| 149 |
st.title("🧘 AI Life Coach")
|
| 150 |
st.markdown("Talk to your personal development assistant.")
|
|
@@ -160,6 +169,7 @@ with st.expander("🔍 Connection Status"):
|
|
| 160 |
st.write("- Configured Ollama Host:", config.ollama_host)
|
| 161 |
st.write("- Current Ngrok URL:", st.session_state.ngrok_url)
|
| 162 |
st.write("- Using Fallback:", use_fallback)
|
|
|
|
| 163 |
|
| 164 |
# Function to send message to Ollama
|
| 165 |
def send_to_ollama(user_input, conversation_history, ngrok_url):
|
|
@@ -191,6 +201,9 @@ def send_to_ollama(user_input, conversation_history, ngrok_url):
|
|
| 191 |
# Function to send message to Hugging Face (fallback)
|
| 192 |
def send_to_hf(user_input, conversation_history):
|
| 193 |
try:
|
|
|
|
|
|
|
|
|
|
| 194 |
# Initialize LLM client for Hugging Face
|
| 195 |
llm_client = LLMClient(provider="huggingface")
|
| 196 |
|
|
|
|
| 1 |
+
# Force redeploy trigger - version 1.6
|
| 2 |
import streamlit as st
|
| 3 |
from utils.config import config
|
| 4 |
import requests
|
| 5 |
import json
|
| 6 |
import os
|
| 7 |
+
from core.memory import load_user_state, check_redis_health
|
|
|
|
| 8 |
|
| 9 |
# Set page config
|
| 10 |
st.set_page_config(page_title="AI Life Coach", page_icon="🧘", layout="centered")
|
| 11 |
|
| 12 |
# Initialize session state for ngrok URL
|
| 13 |
+
if 'ngrok_url' not in st.session_state:
|
| 14 |
st.session_state.ngrok_url = config.ollama_host
|
| 15 |
|
| 16 |
+
if 'model_status' not in st.session_state:
|
| 17 |
+
st.session_state.model_status = "checking"
|
| 18 |
+
|
| 19 |
+
if 'available_models' not in st.session_state:
|
| 20 |
+
st.session_state.available_models = []
|
| 21 |
+
|
| 22 |
# Sidebar for user selection
|
| 23 |
st.sidebar.title("🧘 AI Life Coach")
|
| 24 |
user = st.sidebar.selectbox("Select User", ["Rob", "Sarah"])
|
|
|
|
| 29 |
ngrok_input = st.sidebar.text_input("Ngrok URL", value=st.session_state.ngrok_url)
|
| 30 |
if st.sidebar.button("Update Ngrok URL"):
|
| 31 |
st.session_state.ngrok_url = ngrok_input
|
| 32 |
+
st.session_state.model_status = "checking"
|
| 33 |
st.sidebar.success("Ngrok URL updated!")
|
| 34 |
st.experimental_rerun()
|
| 35 |
|
|
|
|
| 45 |
"User-Agent": "AI-Life-Coach-App"
|
| 46 |
}
|
| 47 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
# Fetch Ollama status
|
| 49 |
def get_ollama_status(ngrok_url):
|
| 50 |
try:
|
|
|
|
| 109 |
|
| 110 |
# After user selects name, load conversation history
|
| 111 |
def get_conversation_history(user_id):
|
| 112 |
+
try:
|
| 113 |
+
user_state = load_user_state(user_id)
|
| 114 |
+
if user_state and "conversation" in user_state:
|
| 115 |
+
return json.loads(user_state["conversation"])
|
| 116 |
+
except Exception as e:
|
| 117 |
+
st.warning(f"Could not load conversation history: {e}")
|
| 118 |
return []
|
| 119 |
|
| 120 |
# Check Ollama status with the current ngrok URL
|
|
|
|
| 147 |
else: # unreachable
|
| 148 |
model_status_container.error("❌ Ollama unreachable")
|
| 149 |
|
| 150 |
+
# Redis status indicator
|
| 151 |
+
redis_status_container = st.sidebar.empty()
|
| 152 |
+
if check_redis_health():
|
| 153 |
+
redis_status_container.success("✅ Redis Connected")
|
| 154 |
+
else:
|
| 155 |
+
redis_status_container.warning("⚠️ Redis Not Available")
|
| 156 |
+
|
| 157 |
# Main chat interface
|
| 158 |
st.title("🧘 AI Life Coach")
|
| 159 |
st.markdown("Talk to your personal development assistant.")
|
|
|
|
| 169 |
st.write("- Configured Ollama Host:", config.ollama_host)
|
| 170 |
st.write("- Current Ngrok URL:", st.session_state.ngrok_url)
|
| 171 |
st.write("- Using Fallback:", use_fallback)
|
| 172 |
+
st.write("- Redis Health:", check_redis_health())
|
| 173 |
|
| 174 |
# Function to send message to Ollama
|
| 175 |
def send_to_ollama(user_input, conversation_history, ngrok_url):
|
|
|
|
| 201 |
# Function to send message to Hugging Face (fallback)
|
| 202 |
def send_to_hf(user_input, conversation_history):
|
| 203 |
try:
|
| 204 |
+
# Import here to avoid issues if not needed
|
| 205 |
+
from core.llm import LLMClient
|
| 206 |
+
|
| 207 |
# Initialize LLM client for Hugging Face
|
| 208 |
llm_client = LLMClient(provider="huggingface")
|
| 209 |
|
core/memory.py
CHANGED
|
@@ -1,18 +1,57 @@
|
|
| 1 |
import json
|
| 2 |
-
import
|
| 3 |
from utils.config import config
|
| 4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
# Initialize Redis connection
|
| 6 |
-
redis_client =
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
decode_responses=True
|
| 12 |
-
)
|
| 13 |
|
| 14 |
def save_user_state(user_id: str, state: dict):
|
| 15 |
-
"""Save user state to Redis"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
try:
|
| 17 |
redis_client.hset(f"user:{user_id}", mapping=state)
|
| 18 |
return True
|
|
@@ -21,9 +60,23 @@ def save_user_state(user_id: str, state: dict):
|
|
| 21 |
return False
|
| 22 |
|
| 23 |
def load_user_state(user_id: str):
|
| 24 |
-
"""Load user state from Redis"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
try:
|
| 26 |
return redis_client.hgetall(f"user:{user_id}")
|
| 27 |
except Exception as e:
|
| 28 |
print(f"Error loading user state: {e}")
|
| 29 |
return {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import json
|
| 2 |
+
import time
|
| 3 |
from utils.config import config
|
| 4 |
|
| 5 |
+
# Try to import redis, but handle if not available
|
| 6 |
+
try:
|
| 7 |
+
import redis
|
| 8 |
+
REDIS_AVAILABLE = True
|
| 9 |
+
except ImportError:
|
| 10 |
+
REDIS_AVAILABLE = False
|
| 11 |
+
redis = None
|
| 12 |
+
|
| 13 |
+
def get_redis_client():
|
| 14 |
+
"""Create and return Redis client with retry logic"""
|
| 15 |
+
if not REDIS_AVAILABLE:
|
| 16 |
+
return None
|
| 17 |
+
|
| 18 |
+
last_exception = None
|
| 19 |
+
|
| 20 |
+
for attempt in range(config.redis_retries + 1):
|
| 21 |
+
try:
|
| 22 |
+
redis_client = redis.Redis(
|
| 23 |
+
host=config.redis_host,
|
| 24 |
+
port=config.redis_port,
|
| 25 |
+
username=config.redis_username,
|
| 26 |
+
password=config.redis_password,
|
| 27 |
+
decode_responses=True,
|
| 28 |
+
socket_connect_timeout=5,
|
| 29 |
+
socket_timeout=5
|
| 30 |
+
)
|
| 31 |
+
# Test the connection
|
| 32 |
+
redis_client.ping()
|
| 33 |
+
return redis_client
|
| 34 |
+
except Exception as e:
|
| 35 |
+
last_exception = e
|
| 36 |
+
if attempt < config.redis_retries:
|
| 37 |
+
time.sleep(config.redis_retry_delay * (2 ** attempt)) # Exponential backoff
|
| 38 |
+
continue
|
| 39 |
+
|
| 40 |
+
return None
|
| 41 |
+
|
| 42 |
# Initialize Redis connection
|
| 43 |
+
redis_client = None
|
| 44 |
+
try:
|
| 45 |
+
redis_client = get_redis_client()
|
| 46 |
+
except Exception as e:
|
| 47 |
+
print(f"Warning: Could not connect to Redis: {e}")
|
|
|
|
|
|
|
| 48 |
|
| 49 |
def save_user_state(user_id: str, state: dict):
|
| 50 |
+
"""Save user state to Redis with retry logic"""
|
| 51 |
+
if not redis_client:
|
| 52 |
+
print("Redis not available, skipping save_user_state")
|
| 53 |
+
return False
|
| 54 |
+
|
| 55 |
try:
|
| 56 |
redis_client.hset(f"user:{user_id}", mapping=state)
|
| 57 |
return True
|
|
|
|
| 60 |
return False
|
| 61 |
|
| 62 |
def load_user_state(user_id: str):
|
| 63 |
+
"""Load user state from Redis with retry logic"""
|
| 64 |
+
if not redis_client:
|
| 65 |
+
print("Redis not available, returning empty state")
|
| 66 |
+
return {}
|
| 67 |
+
|
| 68 |
try:
|
| 69 |
return redis_client.hgetall(f"user:{user_id}")
|
| 70 |
except Exception as e:
|
| 71 |
print(f"Error loading user state: {e}")
|
| 72 |
return {}
|
| 73 |
+
|
| 74 |
+
def check_redis_health():
|
| 75 |
+
"""Check if Redis is healthy"""
|
| 76 |
+
if not redis_client:
|
| 77 |
+
return False
|
| 78 |
+
try:
|
| 79 |
+
redis_client.ping()
|
| 80 |
+
return True
|
| 81 |
+
except:
|
| 82 |
+
return False
|