Spaces:
Running
Running
Commit
·
2f0030b
1
Parent(s):
cbd94a4
fix(rag): address remaining CodeRabbit feedback
Browse files- Validate OPENAI_API_KEY before creating LLM/embedding instances
- Re-raise exceptions in retrieve() instead of returning empty list
- Re-raise exceptions in query() instead of returning error string
src/services/llamaindex_rag.py
CHANGED
|
@@ -8,6 +8,7 @@ from typing import Any
|
|
| 8 |
import structlog
|
| 9 |
|
| 10 |
from src.utils.config import settings
|
|
|
|
| 11 |
from src.utils.models import Evidence
|
| 12 |
|
| 13 |
logger = structlog.get_logger()
|
|
@@ -58,6 +59,10 @@ class LlamaIndexRAGService:
|
|
| 58 |
self.persist_dir = persist_dir or settings.chroma_db_path
|
| 59 |
self.similarity_top_k = similarity_top_k
|
| 60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
# Configure LlamaIndex settings (use centralized config)
|
| 62 |
self._Settings.llm = OpenAI(
|
| 63 |
model=settings.openai_model,
|
|
@@ -190,7 +195,7 @@ class LlamaIndexRAGService:
|
|
| 190 |
|
| 191 |
except Exception as e:
|
| 192 |
logger.error("failed_to_retrieve", error=str(e), query=query[:50])
|
| 193 |
-
|
| 194 |
|
| 195 |
def query(self, query_str: str, top_k: int | None = None) -> str:
|
| 196 |
"""
|
|
@@ -217,7 +222,7 @@ class LlamaIndexRAGService:
|
|
| 217 |
|
| 218 |
except Exception as e:
|
| 219 |
logger.error("failed_to_query", error=str(e), query=query_str[:50])
|
| 220 |
-
|
| 221 |
|
| 222 |
def clear_collection(self) -> None:
|
| 223 |
"""Clear all documents from the collection."""
|
|
|
|
| 8 |
import structlog
|
| 9 |
|
| 10 |
from src.utils.config import settings
|
| 11 |
+
from src.utils.exceptions import ConfigurationError
|
| 12 |
from src.utils.models import Evidence
|
| 13 |
|
| 14 |
logger = structlog.get_logger()
|
|
|
|
| 59 |
self.persist_dir = persist_dir or settings.chroma_db_path
|
| 60 |
self.similarity_top_k = similarity_top_k
|
| 61 |
|
| 62 |
+
# Validate API key before use
|
| 63 |
+
if not settings.openai_api_key:
|
| 64 |
+
raise ConfigurationError("OPENAI_API_KEY required for LlamaIndex RAG service")
|
| 65 |
+
|
| 66 |
# Configure LlamaIndex settings (use centralized config)
|
| 67 |
self._Settings.llm = OpenAI(
|
| 68 |
model=settings.openai_model,
|
|
|
|
| 195 |
|
| 196 |
except Exception as e:
|
| 197 |
logger.error("failed_to_retrieve", error=str(e), query=query[:50])
|
| 198 |
+
raise # Re-raise to allow callers to distinguish errors from empty results
|
| 199 |
|
| 200 |
def query(self, query_str: str, top_k: int | None = None) -> str:
|
| 201 |
"""
|
|
|
|
| 222 |
|
| 223 |
except Exception as e:
|
| 224 |
logger.error("failed_to_query", error=str(e), query=query_str[:50])
|
| 225 |
+
raise # Re-raise to allow callers to handle errors explicitly
|
| 226 |
|
| 227 |
def clear_collection(self) -> None:
|
| 228 |
"""Clear all documents from the collection."""
|