quantitative-analysis-platform / docker-compose.yml
boyinfuture's picture
preparing of deplyoment on higgingface
1e6288e
raw
history blame
1.96 kB
services:
# --- Application Services ---
redis:
image: redis:7-alpine
ports:
- "6379:6379"
restart: always
backend:
build:
context: .
dockerfile: ./backend/Dockerfile
ports:
- "8000:8000"
volumes:
- ./backend:/code/app
env_file:
- .env
command: python -m uvicorn main:app --host 0.0.0.0 --port 8000 --reload
restart: always
depends_on:
- redis
worker:
build:
context: .
dockerfile: ./backend/Dockerfile
volumes:
- ./backend:/code/app
env_file:
- .env
command: python -m celery -A celery_worker.celery worker --loglevel=info
restart: always
depends_on:
- redis
- backend
frontend:
build:
context: .
dockerfile: ./frontend/Dockerfile
ports:
- "5173:5173"
volumes:
- ./frontend:/app
- /app/node_modules
restart: always
depends_on:
- backend
# services:
# redis:
# image: redis:7-alpine
# ports:
# - "6379:6379"
# restart: always
# backend:
# build:
# context: .
# dockerfile: ./backend/Dockerfile
# ports:
# - "8000:8000"
# volumes:
# - ./backend:/code/app
# env_file:
# - .env
# command: python -m uvicorn main:app --host 0.0.0.0 --port 8000 --reload
# restart: always
# depends_on:
# - redis
# worker:
# build:
# context: .
# dockerfile: ./backend/Dockerfile
# volumes:
# - ./backend:/code/app
# env_file:
# - .env
# command: python -m celery -A celery_worker.celery worker --loglevel=info
# restart: always
# depends_on:
# - redis
# - backend
# frontend:
# build:
# context: .
# dockerfile: ./frontend/Dockerfile
# ports:
# - "5173:5173"
# volumes:
# - ./frontend:/app
# - /app/node_modules
# restart: always
# depends_on:
# - backend