Spaces:
Sleeping
Sleeping
| mkdir -p data | |
| #HF_MODEL_PATH=Qwen/Qwen2.5-1.5B-Instruct | |
| #HF_MODEL_PATH=deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B | |
| #HF_MODEL_PATH=Qwen/Qwen2.5-VL-3B-Instruct | |
| HF_MODEL_PATH=TEN-framework/TEN_Turn_Detector | |
| HF_MODEL_NAME=$(basename ${HF_MODEL_PATH}) | |
| LOCAL_MODEL_PATH=./data/${HF_MODEL_NAME} | |
| # TODO: use your own key and put into secret | |
| VLLM_SERVE_API_KEY=TEN_Turn_Detector | |
| # download model | |
| HF_HUB_ENABLE_HF_TRANSFER=1 huggingface-cli download ${HF_MODEL_PATH} --local-dir ${LOCAL_MODEL_PATH} | |
| # start vllm server | |
| vllm serve ${LOCAL_MODEL_PATH} --served-model-name ${HF_MODEL_NAME} --api-key ${VLLM_SERVE_API_KEY} & | |
| # start frontend | |
| export NEXT_PUBLIC_EDIT_GRAPH_MODE=false | |
| #cd /app/playground && npm run dev & | |
| cd /app/demo && npm run dev & | |
| # start backend | |
| #export OPENAI_API_BASE=http://127.0.0.1:8000/v1 | |
| #export OPENAI_PROXY_URL= | |
| #export OPENAI_MODEL=${HF_MODEL_NAME} | |
| #export OPENAI_API_KEY=${VLLM_SERVE_API_KEY} | |
| cd /app && task run | |