#!/bin/bash #SBATCH --job-name=lavt_easyhard # Submit a job named "example" #SBATCH --mail-user=vip.maildummy@gmail.com #SBATCH --mail-type=BEGIN,END,FAIL #SBATCH --partition=a4000 # a6000 or a100 #SBATCH --gres=gpu:1 #SBATCH --time=7-00:00:00 # d-hh:mm:ss, max time limit #SBATCH --mem=48000 # cpu memory size #SBATCH --cpus-per-task=4 # cpu num #SBATCH --output=log_refcocog_umd_ckpt_testAB.txt # std output filename ml cuda/11.0 # 필요한 쿠다 버전 로드 eval "$(conda shell.bash hook)" # Initialize Conda Environment conda activate lavt # Activate your conda environment # test lavt_one srun python test.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testA --resume ./checkpoints/ckpt_lavt_one/gref_umd.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 srun python test.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testB --resume ./checkpoints/ckpt_lavt_one/gref_umd.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 # srun python test.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testA --resume ./checkpoints/repro_lavt_one/model_best_gref_umd_lavt_one.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 # srun python test.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testB --resume ./checkpoints/repro_lavt_one/model_best_gref_umd_lavt_one.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 # srun python test_mosaic.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testA --resume ./checkpoints/random_550_lavt_one/model_best_mosaic_gref_umd_lavt_one.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 --config config/random_550.yaml # srun python test_mosaic.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testB --resume ./checkpoints/random_550_lavt_one/model_best_mosaic_gref_umd_lavt_one.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 --config config/random_550.yaml # srun python test_mosaic.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testA --resume experiments/refcocog_umd/random_gref_umd_460_40epoch_2/model_best_random_gref_umd_460_40epoch_2.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 --config config/random_460.yaml # srun python test_mosaic.py --model lavt_one --swin_type base --dataset refcocog --splitBy umd --split testB --resume experiments/refcocog_umd/random_gref_umd_460_40epoch_2/model_best_random_gref_umd_460_40epoch_2.pth --workers 4 --ddp_trained_weights --window12 --img_size 480 --config config/random_460.yaml # retrieval # srun python test_mosaic_retrieval.py --model lavt_one --swin_type base --dataset refcocog \ # --splitBy umd --split testA --resume experiments/refcocog_umd/retrieval_filter_gref_umd_433_10up_top200/model_best_retrieval_filter_gref_umd_433_10up_top200.pth \ # --workers 4 --ddp_trained_weights --window12 --img_size 480 --config config/retrieval_433_10up.yaml # srun python test_mosaic_retrieval.py --model lavt_one --swin_type base --dataset refcocog \ # --splitBy umd --split testB --resume experiments/refcocog_umd/retrieval_filter_gref_umd_433_10up_top200/model_best_retrieval_filter_gref_umd_433_10up_top200.pth \ # --workers 4 --ddp_trained_weights --window12 --img_size 480 --config config/retrieval_433_10up.yaml