Spaces:
Sleeping
Sleeping
Create entrypoint.sh
Browse files- entrypoint.sh +31 -0
entrypoint.sh
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
# Activar el entorno Conda
|
4 |
+
source /opt/conda/etc/profile.d/conda.sh
|
5 |
+
conda activate sllm
|
6 |
+
|
7 |
+
# Definir puerto de la aplicaci贸n
|
8 |
+
export APP_PORT=${PORT:-8080}
|
9 |
+
|
10 |
+
echo "Iniciando servicios en el puerto $APP_PORT"
|
11 |
+
|
12 |
+
# Iniciar Ray
|
13 |
+
ray start --head --port=6379 --num-cpus=4 --num-gpus=0 --resources='{"control_node": 1}' &
|
14 |
+
sleep 5
|
15 |
+
|
16 |
+
# Iniciar Serverless LLM Store
|
17 |
+
python -m serverless_llm_store start &
|
18 |
+
sleep 5
|
19 |
+
|
20 |
+
# Iniciar SLLM Serve
|
21 |
+
sllm-serve start --port 8080 &
|
22 |
+
sleep 5
|
23 |
+
|
24 |
+
# Esperar a que el puerto 8073 est茅 disponible antes de desplegar el modelo
|
25 |
+
while ! nc -z localhost 8073; do sleep 2; done
|
26 |
+
|
27 |
+
# Desplegar el modelo
|
28 |
+
sllm-cli deploy --model lilmeaty/my_xdd
|
29 |
+
|
30 |
+
# Mantener el contenedor corriendo
|
31 |
+
wait
|