Spaces:
Sleeping
Sleeping
FROM ubuntu:22.04 | |
# Install system dependencies | |
RUN apt-get update && \ | |
apt-get install -y \ | |
build-essential \ | |
libssl-dev \ | |
zlib1g-dev \ | |
libboost-system-dev \ | |
libboost-filesystem-dev \ | |
cmake \ | |
git \ | |
python3-pip \ | |
curl \ | |
wget && \ | |
rm -rf /var/lib/apt/lists/* | |
# Install Python dependencies | |
RUN pip3 install huggingface-hub openai gradio | |
# Build llama.cpp | |
RUN git clone https://github.com/ggerganov/llama.cpp && \ | |
cd llama.cpp && \ | |
mkdir build && \ | |
cd build && \ | |
cmake .. -DLLAMA_BUILD_SERVER=ON -DLLAMA_BUILD_EXAMPLES=ON -DCMAKE_BUILD_TYPE=Release && \ | |
cmake --build . --config Release --target llama-server -j $(nproc) | |
# Download model | |
RUN mkdir -p /models && \ | |
wget -O /models/model.q8_0.gguf https://huggingface.co/ValueFX9507/Tifa-DeepsexV2-7b-MGRPO-GGUF-Q4/resolve/main/Tifa-DeepsexV2-7b-0218-Q4_KM.gguf | |
# Copy app and startup script | |
COPY app.py /app.py | |
COPY start.sh /start.sh | |
RUN chmod +x /start.sh | |
# Expose ports | |
EXPOSE 7860 8080 | |
# Start services | |
CMD ["/start.sh"] |