--model_name_or_path /data/public_models/Meta-Llama-3-8B --output_dir /data/yujia_wang/fedmoe_llama/moe/checkpoints_moe/Meta-Llama-3-8B/vicgalle/alpaca-gpt4/1e-5_bnoniid_layer --dataset_name vicgalle/alpaca-gpt4 --dataset_config_name default --per_device_train_batch_size 1 --per_device_eval_batch_size 8 --num_train_epochs 1 --overwrite_output_dir --do_train True --do_eval --seed 42 --dataloader_num_workers 16 --disable_tqdm False --save_strategy no --evaluation_strategy epoch --load_best_model_at_end True --learning_rate 1e-5 --notes layer --split_strategy bnoniid --num_rounds 32 --num_clients 10 --sample_clients 10 --max_gate_samples 50 --max_train_samples 60000 --gradient_accumulation_steps 4 | |