Dockerfile 723 B

123456789101112131415161718192021222324
  1. # Use official Python slim image
  2. FROM python:3.10-slim
  3. # Set working directory
  4. WORKDIR /app
  5. # Install system dependencies for Ollama
  6. RUN apt-get update && apt-get install -y curl libgomp1 && rm -rf /var/lib/apt/lists/*
  7. # Install Ollama
  8. RUN curl -fsSL https://ollama.com/install.sh | sh
  9. # Copy requirements and install Python dependencies
  10. COPY requirements.txt .
  11. RUN pip install --no-cache-dir -r requirements.txt
  12. # Copy the application code
  13. COPY . .
  14. # Expose port 8000 for FastAPI
  15. EXPOSE 8000
  16. # Command to start Ollama server, pull the model if needed, and run FastAPI
  17. CMD ["sh", "-c", "ollama serve & (ollama list | grep -q mistral || ollama pull mistral) && uvicorn app.main:app --host 0.0.0.0 --port 8000 & wait"]