Upload example-script.sh with huggingface_hub
Browse files- example-script.sh +76 -0
example-script.sh
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
#SBATCH -A csc605
|
3 |
+
#SBATCH -J ETTh1_inital
|
4 |
+
#SBATCH -o logs/etth1/%x-%j.o
|
5 |
+
#SBATCH -e logs/etth1/%x-%j.e
|
6 |
+
#SBATCH -t 01:45:00
|
7 |
+
#SBATCH -p batch
|
8 |
+
#SBATCH -N 1
|
9 |
+
|
10 |
+
# Only necessary if submitting like: sbatch --export=NONE ... (recommended)
|
11 |
+
# Do NOT include this line when submitting without --export=NONE
|
12 |
+
unset SLURM_EXPORT_ENV
|
13 |
+
|
14 |
+
# Load modules
|
15 |
+
module load PrgEnv-gnu/8.5.0
|
16 |
+
module load rocm/5.7.1
|
17 |
+
module load craype-accel-amd-gfx90a
|
18 |
+
module load miniforge3/23.11.0-0
|
19 |
+
|
20 |
+
# Activate your environment
|
21 |
+
ENV_NAME=time-llm-env
|
22 |
+
export PATH="/lustre/orion/csc605/scratch/rolandriachi/$ENV_NAME/bin:$PATH"
|
23 |
+
source /autofs/nccs-svm1_sw/frontier/miniforge3/23.11.0/etc/profile.d/conda.sh
|
24 |
+
conda activate time-llm-env
|
25 |
+
|
26 |
+
export MIOPEN_USER_DB_PATH="$SCRATCH/my-miopen-cache"
|
27 |
+
export MIOPEN_CUSTOM_CACHE_DIR=${MIOPEN_USER_DB_PATH}
|
28 |
+
rm -rf ${MIOPEN_USER_DB_PATH}
|
29 |
+
mkdir -p ${MIOPEN_USER_DB_PATH}
|
30 |
+
|
31 |
+
# V --- Time-LLM Config Args --- V
|
32 |
+
|
33 |
+
model_name=TimeLLM # Or, DLinear
|
34 |
+
train_epochs=50
|
35 |
+
learning_rate=0.001
|
36 |
+
llama_layers=32
|
37 |
+
|
38 |
+
batch_size=16
|
39 |
+
d_model=32
|
40 |
+
d_ff=128
|
41 |
+
|
42 |
+
comment='TimeLLM-ETTh1' # Or, 'DLinear-ETTh1'
|
43 |
+
|
44 |
+
export LAUNCHER="accelerate launch \
|
45 |
+
--num_processes 1 \
|
46 |
+
--num_machines 1 \
|
47 |
+
--mixed_precision bf16 \
|
48 |
+
--dynamo_backend no \
|
49 |
+
"
|
50 |
+
|
51 |
+
# To resume training, include a --resume flag
|
52 |
+
$LAUNCHER run_main.py \
|
53 |
+
--task_name long_term_forecast \
|
54 |
+
--is_training 1 \
|
55 |
+
--root_path ./dataset/ETT-small/ \
|
56 |
+
--data_path ETTh1.csv \
|
57 |
+
--model_id ETTh1_512_96 \
|
58 |
+
--model $model_name \
|
59 |
+
--data ETTh1 \
|
60 |
+
--features M \
|
61 |
+
--seq_len 96 \
|
62 |
+
--label_len 48 \
|
63 |
+
--pred_len 96 \
|
64 |
+
--factor 3 \
|
65 |
+
--enc_in 7 \
|
66 |
+
--dec_in 7 \
|
67 |
+
--c_out 7 \
|
68 |
+
--des 'Exp' \
|
69 |
+
--itr 1 \
|
70 |
+
--d_model $d_model \
|
71 |
+
--d_ff $d_ff \
|
72 |
+
--batch_size $batch_size \
|
73 |
+
--learning_rate $learning_rate \
|
74 |
+
--llm_layers $llama_layers \
|
75 |
+
--train_epochs $train_epochs \
|
76 |
+
--model_comment $comment \
|