|
|
"""FastAPI backend for LLM Council.""" |
|
|
|
|
|
from fastapi import FastAPI, HTTPException |
|
|
from fastapi.middleware.cors import CORSMiddleware |
|
|
from fastapi.responses import StreamingResponse |
|
|
from pydantic import BaseModel |
|
|
from typing import List, Dict, Any |
|
|
import uuid |
|
|
import json |
|
|
import asyncio |
|
|
|
|
|
from . import storage |
|
|
from .council import run_full_council, generate_conversation_title, stage1_collect_responses, stage2_collect_rankings, stage3_synthesize_final, calculate_aggregate_rankings |
|
|
|
|
|
app = FastAPI(title="LLM Council API") |
|
|
|
|
|
|
|
|
app.add_middleware( |
|
|
CORSMiddleware, |
|
|
allow_origins=["http://localhost:5173", "http://localhost:3000"], |
|
|
allow_credentials=True, |
|
|
allow_methods=["*"], |
|
|
allow_headers=["*"], |
|
|
) |
|
|
|
|
|
|
|
|
class CreateConversationRequest(BaseModel): |
|
|
"""Request to create a new conversation.""" |
|
|
pass |
|
|
|
|
|
|
|
|
class SendMessageRequest(BaseModel): |
|
|
"""Request to send a message in a conversation.""" |
|
|
content: str |
|
|
|
|
|
|
|
|
class ConversationMetadata(BaseModel): |
|
|
"""Conversation metadata for list view.""" |
|
|
id: str |
|
|
created_at: str |
|
|
title: str |
|
|
message_count: int |
|
|
|
|
|
|
|
|
class Conversation(BaseModel): |
|
|
"""Full conversation with all messages.""" |
|
|
id: str |
|
|
created_at: str |
|
|
title: str |
|
|
messages: List[Dict[str, Any]] |
|
|
|
|
|
|
|
|
@app.get("/") |
|
|
async def root(): |
|
|
"""Health check endpoint.""" |
|
|
return {"status": "ok", "service": "LLM Council API"} |
|
|
|
|
|
|
|
|
@app.get("/api/conversations", response_model=List[ConversationMetadata]) |
|
|
async def list_conversations(): |
|
|
"""List all conversations (metadata only).""" |
|
|
return storage.list_conversations() |
|
|
|
|
|
|
|
|
@app.post("/api/conversations", response_model=Conversation) |
|
|
async def create_conversation(request: CreateConversationRequest): |
|
|
"""Create a new conversation.""" |
|
|
conversation_id = str(uuid.uuid4()) |
|
|
conversation = storage.create_conversation(conversation_id) |
|
|
return conversation |
|
|
|
|
|
|
|
|
@app.get("/api/conversations/{conversation_id}", response_model=Conversation) |
|
|
async def get_conversation(conversation_id: str): |
|
|
"""Get a specific conversation with all its messages.""" |
|
|
conversation = storage.get_conversation(conversation_id) |
|
|
if conversation is None: |
|
|
raise HTTPException(status_code=404, detail="Conversation not found") |
|
|
return conversation |
|
|
|
|
|
|
|
|
@app.post("/api/conversations/{conversation_id}/message") |
|
|
async def send_message(conversation_id: str, request: SendMessageRequest): |
|
|
""" |
|
|
Send a message and run the 3-stage council process. |
|
|
Returns the complete response with all stages. |
|
|
""" |
|
|
|
|
|
conversation = storage.get_conversation(conversation_id) |
|
|
if conversation is None: |
|
|
raise HTTPException(status_code=404, detail="Conversation not found") |
|
|
|
|
|
|
|
|
is_first_message = len(conversation["messages"]) == 0 |
|
|
|
|
|
|
|
|
storage.add_user_message(conversation_id, request.content) |
|
|
|
|
|
|
|
|
if is_first_message: |
|
|
title = await generate_conversation_title(request.content) |
|
|
storage.update_conversation_title(conversation_id, title) |
|
|
|
|
|
|
|
|
stage1_results, stage2_results, stage3_result, metadata = await run_full_council( |
|
|
request.content |
|
|
) |
|
|
|
|
|
|
|
|
storage.add_assistant_message( |
|
|
conversation_id, |
|
|
stage1_results, |
|
|
stage2_results, |
|
|
stage3_result |
|
|
) |
|
|
|
|
|
|
|
|
return { |
|
|
"stage1": stage1_results, |
|
|
"stage2": stage2_results, |
|
|
"stage3": stage3_result, |
|
|
"metadata": metadata |
|
|
} |
|
|
|
|
|
|
|
|
@app.post("/api/conversations/{conversation_id}/message/stream") |
|
|
async def send_message_stream(conversation_id: str, request: SendMessageRequest): |
|
|
""" |
|
|
Send a message and stream the 3-stage council process. |
|
|
Returns Server-Sent Events as each stage completes. |
|
|
""" |
|
|
|
|
|
conversation = storage.get_conversation(conversation_id) |
|
|
if conversation is None: |
|
|
raise HTTPException(status_code=404, detail="Conversation not found") |
|
|
|
|
|
|
|
|
is_first_message = len(conversation["messages"]) == 0 |
|
|
|
|
|
async def event_generator(): |
|
|
try: |
|
|
|
|
|
storage.add_user_message(conversation_id, request.content) |
|
|
|
|
|
|
|
|
title_task = None |
|
|
if is_first_message: |
|
|
title_task = asyncio.create_task(generate_conversation_title(request.content)) |
|
|
|
|
|
|
|
|
yield f"data: {json.dumps({'type': 'stage1_start'})}\n\n" |
|
|
stage1_results = await stage1_collect_responses(request.content) |
|
|
yield f"data: {json.dumps({'type': 'stage1_complete', 'data': stage1_results})}\n\n" |
|
|
|
|
|
|
|
|
yield f"data: {json.dumps({'type': 'stage2_start'})}\n\n" |
|
|
stage2_results, label_to_model = await stage2_collect_rankings(request.content, stage1_results) |
|
|
aggregate_rankings = calculate_aggregate_rankings(stage2_results, label_to_model) |
|
|
yield f"data: {json.dumps({'type': 'stage2_complete', 'data': stage2_results, 'metadata': {'label_to_model': label_to_model, 'aggregate_rankings': aggregate_rankings}})}\n\n" |
|
|
|
|
|
|
|
|
yield f"data: {json.dumps({'type': 'stage3_start'})}\n\n" |
|
|
stage3_result = await stage3_synthesize_final(request.content, stage1_results, stage2_results) |
|
|
yield f"data: {json.dumps({'type': 'stage3_complete', 'data': stage3_result})}\n\n" |
|
|
|
|
|
|
|
|
if title_task: |
|
|
title = await title_task |
|
|
storage.update_conversation_title(conversation_id, title) |
|
|
yield f"data: {json.dumps({'type': 'title_complete', 'data': {'title': title}})}\n\n" |
|
|
|
|
|
|
|
|
storage.add_assistant_message( |
|
|
conversation_id, |
|
|
stage1_results, |
|
|
stage2_results, |
|
|
stage3_result |
|
|
) |
|
|
|
|
|
|
|
|
yield f"data: {json.dumps({'type': 'complete'})}\n\n" |
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
yield f"data: {json.dumps({'type': 'error', 'message': str(e)})}\n\n" |
|
|
|
|
|
return StreamingResponse( |
|
|
event_generator(), |
|
|
media_type="text/event-stream", |
|
|
headers={ |
|
|
"Cache-Control": "no-cache", |
|
|
"Connection": "keep-alive", |
|
|
} |
|
|
) |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
import uvicorn |
|
|
uvicorn.run(app, host="0.0.0.0", port=8001) |
|
|
|