ai / Dockerfile
helperai's picture
Create Dockerfile
73a8ead verified
# Start from a standard Python image (3.10 is robust and common)
FROM python:3.10
# Set the working directory inside the container
# All subsequent commands will run from here
WORKDIR /app
# Copy the requirements file and install the necessary dependencies
# The RUN command executes the installation of PyTorch, Transformers, and FastAPI
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy the main application code
COPY main.py .
# Expose the port the application will run on
# Hugging Face Spaces requires port 7860 for custom apps
EXPOSE 7860
# Define the command to run the Uvicorn server (which hosts FastAPI)
# It tells the container to start the 'app' object inside 'main.py'
# and listen on port 7860
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]