ProfessorCEO's picture
Cool Shot Systems
0fc9402 verified
raw
history blame
1.78 kB
import os
from fastapi import FastAPI, Depends, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from auth import get_current_user
app = FastAPI(
title="Text Prediction API",
description="AI-powered text prediction service",
version="0.1.0"
)
# Configure CORS - use environment variable for allowed origins in production
allowed_origins = os.getenv("ALLOWED_ORIGINS", "").split(",") if os.getenv("ALLOWED_ORIGINS") else ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=allowed_origins,
allow_credentials=False,
allow_methods=["*"],
allow_headers=["*"],
)
class TextPredictRequest(BaseModel):
text: str
class TextPredictResponse(BaseModel):
prediction: str
confidence: float
input_text: str
@app.get("/")
async def root():
"""Health check endpoint."""
return {"status": "healthy", "service": "text-api"}
@app.get("/health")
async def health():
"""Health check endpoint."""
return {"status": "healthy"}
@app.post("/predict", response_model=TextPredictResponse)
async def predict(
request: TextPredictRequest,
current_user: dict = Depends(get_current_user)
):
"""
Protected endpoint for text prediction.
Requires valid Bearer token.
"""
# Placeholder prediction logic
# In a real application, this would call an ML model
text = request.text
# Simple mock prediction
prediction = f"Processed: {text[:50]}..." if len(text) > 50 else f"Processed: {text}"
confidence = 0.95
return TextPredictResponse(
prediction=prediction,
confidence=confidence,
input_text=text
)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8001)