amitbhatt6075 commited on
Commit
d2020f6
·
1 Parent(s): 28f6f67

fix: Add build essentials and cmake to Dockerfile for llama-cpp-python

Browse files
Files changed (1) hide show
  1. Dockerfile +11 -4
Dockerfile CHANGED
@@ -1,12 +1,20 @@
1
- # Start with the official Python 3.11 image
2
- FROM python:3.11-slim
3
 
4
  # Set the working directory inside the container
5
  WORKDIR /app
6
 
 
 
 
 
 
 
7
  # Copy the requirements file first and install dependencies
8
- # This is a best practice for Docker caching
9
  COPY requirements.txt requirements.txt
 
 
 
10
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
 
12
  # Copy the rest of your application code
@@ -16,5 +24,4 @@ COPY . .
16
  EXPOSE 7860
17
 
18
  # Command to run the application using uvicorn
19
- # We need to specify the host and port for Hugging Face Spaces
20
  CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "7860"]
 
1
+ # Start with the official Python 3.11 image (the full version, not slim)
2
+ FROM python:3.11
3
 
4
  # Set the working directory inside the container
5
  WORKDIR /app
6
 
7
+ # Install system dependencies needed to build wheels for libraries like llama-cpp-python
8
+ RUN apt-get update && apt-get install -y --no-install-recommends \
9
+ build-essential \
10
+ cmake \
11
+ && rm -rf /var/lib/apt/lists/*
12
+
13
  # Copy the requirements file first and install dependencies
 
14
  COPY requirements.txt requirements.txt
15
+
16
+ # Tell llama-cpp-python to build itself during installation
17
+ ENV CMAKE_ARGS="-DLLAMA_CUBLAS=OFF -DLLAMA_HIPBLAS=OFF -DLLAMA_METAL=OFF"
18
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
19
 
20
  # Copy the rest of your application code
 
24
  EXPOSE 7860
25
 
26
  # Command to run the application using uvicorn
 
27
  CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "7860"]