Update start.sh
Browse files
start.sh
CHANGED
@@ -4,12 +4,26 @@
|
|
4 |
ollama serve &
|
5 |
|
6 |
# 等待 Ollama 服务完全启动
|
|
|
7 |
sleep 10
|
8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
# 检查模型是否存在,如果不存在则创建
|
10 |
if ! ollama list | grep -q "llama3-zh"; then
|
|
|
11 |
ollama create llama3-zh -f /app/Modelfile
|
|
|
|
|
|
|
|
|
12 |
fi
|
13 |
|
|
|
14 |
# 启动 Python 应用
|
15 |
python app.py
|
|
|
4 |
ollama serve &
|
5 |
|
6 |
# 等待 Ollama 服务完全启动
|
7 |
+
echo "Waiting for Ollama service to start..."
|
8 |
sleep 10
|
9 |
|
10 |
+
# 检查 Ollama 服务是否正在运行
|
11 |
+
curl --retry 5 --retry-delay 2 --retry-connrefused http://127.0.0.1:11434/api/version
|
12 |
+
if [ $? -ne 0 ]; then
|
13 |
+
echo "Error: Ollama service failed to start"
|
14 |
+
exit 1
|
15 |
+
fi
|
16 |
+
|
17 |
# 检查模型是否存在,如果不存在则创建
|
18 |
if ! ollama list | grep -q "llama3-zh"; then
|
19 |
+
echo "Creating llama3-zh model..."
|
20 |
ollama create llama3-zh -f /app/Modelfile
|
21 |
+
if [ $? -ne 0 ]; then
|
22 |
+
echo "Error: Failed to create llama3-zh model"
|
23 |
+
exit 1
|
24 |
+
fi
|
25 |
fi
|
26 |
|
27 |
+
echo "Starting Python application..."
|
28 |
# 启动 Python 应用
|
29 |
python app.py
|