| 123456789101112131415161718192021222324 |
- # Use official Python slim image
- FROM python:3.10-slim
- # Set working directory
- WORKDIR /app
- # Install system dependencies for Ollama
- RUN apt-get update && apt-get install -y curl libgomp1 && rm -rf /var/lib/apt/lists/*
- # Install Ollama
- RUN curl -fsSL https://ollama.com/install.sh | sh
- # Copy requirements and install Python dependencies
- COPY requirements.txt .
- RUN pip install --no-cache-dir -r requirements.txt
- # Copy the application code
- COPY . .
- # Expose port 8000 for FastAPI
- EXPOSE 8000
- # Command to start Ollama server, pull the model if needed, and run FastAPI
- CMD ["sh", "-c", "ollama serve & (ollama list | grep -q mistral || ollama pull mistral) && uvicorn app.main:app --host 0.0.0.0 --port 8000 & wait"]
|