#SBATCH -A csc605 | |
#SBATCH -J ETTh1_inital | |
#SBATCH -o logs/etth1/%x-%j.o | |
#SBATCH -e logs/etth1/%x-%j.e | |
#SBATCH -t 01:45:00 | |
#SBATCH -p batch | |
#SBATCH -N 1 | |
# Only necessary if submitting like: sbatch --export=NONE ... (recommended) | |
# Do NOT include this line when submitting without --export=NONE | |
unset SLURM_EXPORT_ENV | |
# Load modules | |
module load PrgEnv-gnu/8.5.0 | |
module load rocm/5.7.1 | |
module load craype-accel-amd-gfx90a | |
module load miniforge3/23.11.0-0 | |
# Activate your environment | |
ENV_NAME=time-llm-env | |
export PATH="/lustre/orion/csc605/scratch/rolandriachi/$ENV_NAME/bin:$PATH" | |
source /autofs/nccs-svm1_sw/frontier/miniforge3/23.11.0/etc/profile.d/conda.sh | |
conda activate time-llm-env | |
export MIOPEN_USER_DB_PATH="$SCRATCH/my-miopen-cache" | |
export MIOPEN_CUSTOM_CACHE_DIR=${MIOPEN_USER_DB_PATH} | |
rm -rf ${MIOPEN_USER_DB_PATH} | |
mkdir -p ${MIOPEN_USER_DB_PATH} | |
# V --- Time-LLM Config Args --- V | |
model_name=TimeLLM # Or, DLinear | |
train_epochs=50 | |
learning_rate=0.001 | |
llama_layers=32 | |
batch_size=16 | |
d_model=32 | |
d_ff=128 | |
comment='TimeLLM-ETTh1' # Or, 'DLinear-ETTh1' | |
export LAUNCHER="accelerate launch \ | |
--num_processes 1 \ | |
--num_machines 1 \ | |
--mixed_precision bf16 \ | |
--dynamo_backend no \ | |
" | |
# To resume training, include a --resume flag | |
$LAUNCHER run_main.py \ | |
--task_name long_term_forecast \ | |
--is_training 1 \ | |
--root_path ./dataset/ETT-small/ \ | |
--data_path ETTh1.csv \ | |
--model_id ETTh1_512_96 \ | |
--model $model_name \ | |
--data ETTh1 \ | |
--features M \ | |
--seq_len 96 \ | |
--label_len 48 \ | |
--pred_len 96 \ | |
--factor 3 \ | |
--enc_in 7 \ | |
--dec_in 7 \ | |
--c_out 7 \ | |
--des 'Exp' \ | |
--itr 1 \ | |
--d_model $d_model \ | |
--d_ff $d_ff \ | |
--batch_size $batch_size \ | |
--learning_rate $learning_rate \ | |
--llm_layers $llama_layers \ | |
--train_epochs $train_epochs \ | |
--model_comment $comment \ | |