Update Dockerfile
Browse files- Dockerfile +17 -31
Dockerfile
CHANGED
@@ -7,49 +7,37 @@ WORKDIR /app
|
|
7 |
# 安装系统依赖
|
8 |
RUN apt-get update && apt-get install -y curl sudo fontconfig
|
9 |
|
10 |
-
#
|
11 |
-
# ENV OLLAMA_HOME=/app/.ollama
|
12 |
-
# RUN mkdir -p /app/.ollama && chmod -R 777 /app/.ollama
|
13 |
-
|
14 |
-
# 修改 OLLAMA_HOME 路径并确保正确设置权限
|
15 |
ENV OLLAMA_HOME=/root/.ollama
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
ENV
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
|
30 |
# 确保/app目录可写
|
31 |
RUN chmod -R 777 /app
|
32 |
|
33 |
-
# 检查环境变量
|
34 |
-
RUN echo "OLLAMA_HOME: $OLLAMA_HOME"
|
35 |
-
RUN echo "GRADIO_TEMP_DIR: $GRADIO_TEMP_DIR"
|
36 |
-
RUN echo "MPLCONFIGDIR: $MPLCONFIGDIR"
|
37 |
-
RUN echo "FONTCONFIG_PATH: $FONTCONFIG_PATH"
|
38 |
-
|
39 |
# 安装Ollama
|
40 |
RUN curl -L https://ollama.ai/install.sh | sh
|
41 |
|
42 |
-
|
43 |
# 安装Gradio及其依赖
|
44 |
-
RUN pip install gradio
|
45 |
|
|
|
46 |
RUN curl -L https://huggingface.co/shenzhi-wang/Llama3.1-8B-Chinese-Chat/resolve/main/gguf/llama3.1_8b_chinese_chat_q8_0.gguf?download=true -o /app/llama3.1_8b_chinese_chat_q8_0.gguf
|
47 |
|
48 |
# 将当前目录下的所有文件复制到容器内的/app目录
|
49 |
COPY . /app
|
50 |
|
51 |
-
RUN pip install requests
|
52 |
-
|
53 |
# 启动Ollama服务并放到后台运行
|
54 |
RUN nohup ollama serve > /dev/null 2>&1 &
|
55 |
|
@@ -60,6 +48,4 @@ RUN sleep 10
|
|
60 |
RUN ollama create llama3-zh -f /app/Modelfile
|
61 |
|
62 |
# 启动Ollama服务和Gradio应用
|
63 |
-
|
64 |
-
|
65 |
-
CMD ollama serve --path ${OLLAMA_HOME} & python app.py
|
|
|
7 |
# 安装系统依赖
|
8 |
RUN apt-get update && apt-get install -y curl sudo fontconfig
|
9 |
|
10 |
+
# 设置所有环境变量
|
|
|
|
|
|
|
|
|
11 |
ENV OLLAMA_HOME=/root/.ollama
|
12 |
+
ENV GRADIO_TEMP_DIR=/root/.gradio
|
13 |
+
ENV MPLCONFIGDIR=/root/.config/matplotlib
|
14 |
+
ENV FONTCONFIG_PATH=/root/.cache/fontconfig
|
15 |
+
ENV XDG_CACHE_HOME=/root/.cache
|
16 |
+
|
17 |
+
# 创建必要的目录并设置权限
|
18 |
+
RUN mkdir -p ${OLLAMA_HOME} \
|
19 |
+
&& mkdir -p ${GRADIO_TEMP_DIR} \
|
20 |
+
&& mkdir -p ${MPLCONFIGDIR} \
|
21 |
+
&& mkdir -p ${FONTCONFIG_PATH} \
|
22 |
+
&& mkdir -p ${XDG_CACHE_HOME} \
|
23 |
+
&& chown -R root:root /root \
|
24 |
+
&& chmod -R 777 /root
|
25 |
|
26 |
# 确保/app目录可写
|
27 |
RUN chmod -R 777 /app
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
# 安装Ollama
|
30 |
RUN curl -L https://ollama.ai/install.sh | sh
|
31 |
|
|
|
32 |
# 安装Gradio及其依赖
|
33 |
+
RUN pip install gradio requests
|
34 |
|
35 |
+
# 下载模型文件
|
36 |
RUN curl -L https://huggingface.co/shenzhi-wang/Llama3.1-8B-Chinese-Chat/resolve/main/gguf/llama3.1_8b_chinese_chat_q8_0.gguf?download=true -o /app/llama3.1_8b_chinese_chat_q8_0.gguf
|
37 |
|
38 |
# 将当前目录下的所有文件复制到容器内的/app目录
|
39 |
COPY . /app
|
40 |
|
|
|
|
|
41 |
# 启动Ollama服务并放到后台运行
|
42 |
RUN nohup ollama serve > /dev/null 2>&1 &
|
43 |
|
|
|
48 |
RUN ollama create llama3-zh -f /app/Modelfile
|
49 |
|
50 |
# 启动Ollama服务和Gradio应用
|
51 |
+
CMD ollama serve & python app.py
|
|
|
|